1 2 /*************************************xxt.c************************************ 3 Module Name: xxt 4 Module Info: 5 6 author: Henry M. Tufo III 7 e-mail: hmt@asci.uchicago.edu 8 contact: 9 +--------------------------------+--------------------------------+ 10 |MCS Division - Building 221 |Department of Computer Science | 11 |Argonne National Laboratory |Ryerson 152 | 12 |9700 S. Cass Avenue |The University of Chicago | 13 |Argonne, IL 60439 |Chicago, IL 60637 | 14 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 15 +--------------------------------+--------------------------------+ 16 17 Last Modification: 3.20.01 18 **************************************xxt.c***********************************/ 19 #include <../src/ksp/pc/impls/tfs/tfs.h> 20 21 #define LEFT -1 22 #define RIGHT 1 23 #define BOTH 0 24 25 typedef struct xxt_solver_info { 26 PetscInt n, m, n_global, m_global; 27 PetscInt nnz, max_nnz, msg_buf_sz; 28 PetscInt *nsep, *lnsep, *fo, nfo, *stages; 29 PetscInt *col_sz, *col_indices; 30 PetscScalar **col_vals, *x, *solve_uu, *solve_w; 31 PetscInt nsolves; 32 PetscScalar tot_solve_time; 33 } xxt_info; 34 35 typedef struct matvec_info { 36 PetscInt n, m, n_global, m_global; 37 PetscInt *local2global; 38 PCTFS_gs_ADT PCTFS_gs_handle; 39 PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*); 40 void *grid_data; 41 } mv_info; 42 43 struct xxt_CDT{ 44 PetscInt id; 45 PetscInt ns; 46 PetscInt level; 47 xxt_info *info; 48 mv_info *mvi; 49 }; 50 51 static PetscInt n_xxt=0; 52 static PetscInt n_xxt_handles=0; 53 54 /* prototypes */ 55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs); 56 static PetscErrorCode check_handle(xxt_ADT xxt_handle); 57 static PetscErrorCode det_separators(xxt_ADT xxt_handle); 58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 59 static PetscInt xxt_generate(xxt_ADT xxt_handle); 60 static PetscInt do_xxt_factor(xxt_ADT xxt_handle); 61 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data); 62 63 /**************************************xxt.c***********************************/ 64 xxt_ADT XXT_new(void) 65 { 66 xxt_ADT xxt_handle; 67 68 /* rolling count on n_xxt ... pot. problem here */ 69 n_xxt_handles++; 70 xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT)); 71 xxt_handle->id = ++n_xxt; 72 xxt_handle->info = NULL; xxt_handle->mvi = NULL; 73 74 return(xxt_handle); 75 } 76 77 /**************************************xxt.c***********************************/ 78 PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */ 79 PetscInt *local2global, /* global column mapping */ 80 PetscInt n, /* local num rows */ 81 PetscInt m, /* local num cols */ 82 PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc */ 83 void *grid_data) /* grid data for matvec */ 84 { 85 PCTFS_comm_init(); 86 check_handle(xxt_handle); 87 88 /* only 2^k for now and all nodes participating */ 89 if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes); 90 91 /* space for X info */ 92 xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info)); 93 94 /* set up matvec handles */ 95 xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data); 96 97 /* matrix is assumed to be of full rank */ 98 /* LATER we can reset to indicate rank def. */ 99 xxt_handle->ns=0; 100 101 /* determine separators and generate firing order - NB xxt info set here */ 102 det_separators(xxt_handle); 103 104 return(do_xxt_factor(xxt_handle)); 105 } 106 107 /**************************************xxt.c***********************************/ 108 PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b) 109 { 110 111 PCTFS_comm_init(); 112 check_handle(xxt_handle); 113 114 /* need to copy b into x? */ 115 if (b) {PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);} 116 do_xxt_solve(xxt_handle,x); 117 118 return(0); 119 } 120 121 /**************************************xxt.c***********************************/ 122 PetscInt XXT_free(xxt_ADT xxt_handle) 123 { 124 125 PCTFS_comm_init(); 126 check_handle(xxt_handle); 127 n_xxt_handles--; 128 129 free(xxt_handle->info->nsep); 130 free(xxt_handle->info->lnsep); 131 free(xxt_handle->info->fo); 132 free(xxt_handle->info->stages); 133 free(xxt_handle->info->solve_uu); 134 free(xxt_handle->info->solve_w); 135 free(xxt_handle->info->x); 136 free(xxt_handle->info->col_vals); 137 free(xxt_handle->info->col_sz); 138 free(xxt_handle->info->col_indices); 139 free(xxt_handle->info); 140 free(xxt_handle->mvi->local2global); 141 PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle); 142 free(xxt_handle->mvi); 143 free(xxt_handle); 144 145 /* if the check fails we nuke */ 146 /* if NULL pointer passed to free we nuke */ 147 /* if the calls to free fail that's not my problem */ 148 return(0); 149 } 150 151 /**************************************xxt.c***********************************/ 152 PetscInt XXT_stats(xxt_ADT xxt_handle) 153 { 154 PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD}; 155 PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD}; 156 PetscInt vals[9], work[9]; 157 PetscScalar fvals[3], fwork[3]; 158 PetscErrorCode ierr; 159 160 PCTFS_comm_init(); 161 check_handle(xxt_handle); 162 163 /* if factorization not done there are no stats */ 164 if (!xxt_handle->info||!xxt_handle->mvi) { 165 if (!PCTFS_my_id) { ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");CHKERRQ(ierr); } 166 return 1; 167 } 168 169 vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz; 170 vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n; 171 vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz; 172 PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 173 174 fvals[0]=fvals[1]=fvals[2] 175 =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++; 176 PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop); 177 178 if (!PCTFS_my_id) { 179 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_nnz=%D\n",PCTFS_my_id,vals[0]);CHKERRQ(ierr); 180 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_nnz=%D\n",PCTFS_my_id,vals[1]);CHKERRQ(ierr); 181 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);CHKERRQ(ierr); 182 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_nnz=%D\n",PCTFS_my_id,vals[2]);CHKERRQ(ierr); 183 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(2d) =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.5)));CHKERRQ(ierr); 184 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt C(3d) =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.6667)));CHKERRQ(ierr); 185 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_n =%D\n",PCTFS_my_id,vals[3]);CHKERRQ(ierr); 186 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_n =%D\n",PCTFS_my_id,vals[4]);CHKERRQ(ierr); 187 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_n =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);CHKERRQ(ierr); 188 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xxt_n =%D\n",PCTFS_my_id,vals[5]);CHKERRQ(ierr); 189 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_buf=%D\n",PCTFS_my_id,vals[6]);CHKERRQ(ierr); 190 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_buf=%D\n",PCTFS_my_id,vals[7]);CHKERRQ(ierr); 191 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);CHKERRQ(ierr); 192 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min xxt_slv=%g\n",PCTFS_my_id,fvals[0]);CHKERRQ(ierr); 193 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max xxt_slv=%g\n",PCTFS_my_id,fvals[1]);CHKERRQ(ierr); 194 ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);CHKERRQ(ierr); 195 } 196 197 return(0); 198 } 199 200 /*************************************xxt.c************************************ 201 202 Description: get A_local, local portion of global coarse matrix which 203 is a row dist. nxm matrix w/ n<m. 204 o my_ml holds address of ML struct associated w/A_local and coarse grid 205 o local2global holds global number of column i (i=0,...,m-1) 206 o local2global holds global number of row i (i=0,...,n-1) 207 o mylocmatvec performs A_local . vec_local (note that gs is performed using 208 PCTFS_gs_init/gop). 209 210 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 211 mylocmatvec (void :: void *data, double *in, double *out) 212 **************************************xxt.c***********************************/ 213 static PetscInt do_xxt_factor(xxt_ADT xxt_handle) 214 { 215 return xxt_generate(xxt_handle); 216 } 217 218 /**************************************xxt.c***********************************/ 219 static PetscInt xxt_generate(xxt_ADT xxt_handle) 220 { 221 PetscInt i,j,k,idex; 222 PetscInt dim, col; 223 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 224 PetscInt *segs; 225 PetscInt op[] = {GL_ADD,0}; 226 PetscInt off, len; 227 PetscScalar *x_ptr; 228 PetscInt *iptr, flag; 229 PetscInt start=0, end, work; 230 PetscInt op2[] = {GL_MIN,0}; 231 PCTFS_gs_ADT PCTFS_gs_handle; 232 PetscInt *nsep, *lnsep, *fo; 233 PetscInt a_n=xxt_handle->mvi->n; 234 PetscInt a_m=xxt_handle->mvi->m; 235 PetscInt *a_local2global=xxt_handle->mvi->local2global; 236 PetscInt level; 237 PetscInt xxt_nnz=0, xxt_max_nnz=0; 238 PetscInt n, m; 239 PetscInt *col_sz, *col_indices, *stages; 240 PetscScalar **col_vals, *x; 241 PetscInt n_global; 242 PetscInt xxt_zero_nnz=0; 243 PetscInt xxt_zero_nnz_0=0; 244 PetscBLASInt i1 = 1,dlen; 245 PetscScalar dm1 = -1.0; 246 PetscErrorCode ierr; 247 248 n=xxt_handle->mvi->n; 249 nsep=xxt_handle->info->nsep; 250 lnsep=xxt_handle->info->lnsep; 251 fo=xxt_handle->info->fo; 252 end=lnsep[0]; 253 level=xxt_handle->level; 254 PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle; 255 256 /* is there a null space? */ 257 /* LATER add in ability to detect null space by checking alpha */ 258 for (i=0, j=0; i<=level; i++) 259 {j+=nsep[i];} 260 261 m = j-xxt_handle->ns; 262 if (m!=j) { 263 ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);CHKERRQ(ierr); 264 } 265 266 /* get and initialize storage for x local */ 267 /* note that x local is nxm and stored by columns */ 268 col_sz = (PetscInt*) malloc(m*sizeof(PetscInt)); 269 col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt)); 270 col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 271 for (i=j=0; i<m; i++, j+=2) { 272 col_indices[j]=col_indices[j+1]=col_sz[i]=-1; 273 col_vals[i] = NULL; 274 } 275 col_indices[j]=-1; 276 277 /* size of separators for each sub-hc working from bottom of tree to top */ 278 /* this looks like nsep[]=segments */ 279 stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 280 segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 281 PCTFS_ivec_zero(stages,level+1); 282 PCTFS_ivec_copy(segs,nsep,level+1); 283 for (i=0; i<level; i++) { segs[i+1] += segs[i]; } 284 stages[0] = segs[0]; 285 286 /* temporary vectors */ 287 u = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 288 z = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 289 v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar)); 290 uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 291 w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 292 293 /* extra nnz due to replication of vertices across separators */ 294 for (i=1, j=0; i<=level; i++) {j+=nsep[i];} 295 296 /* storage for sparse x values */ 297 n_global = xxt_handle->info->n_global; 298 xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes; 299 x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 300 xxt_nnz = 0; 301 302 /* LATER - can embed next sep to fire in gs */ 303 /* time to make the donuts - generate X factor */ 304 for (dim=i=j=0;i<m;i++) 305 { 306 /* time to move to the next level? */ 307 while (i==segs[dim]) { 308 if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n"); 309 stages[dim++]=i; 310 end+=lnsep[dim]; 311 } 312 stages[dim]=i; 313 314 /* which column are we firing? */ 315 /* i.e. set v_l */ 316 /* use new seps and do global min across hc to determine which one to fire */ 317 (start<end) ? (col=fo[start]) : (col=INT_MAX); 318 PCTFS_giop_hc(&col,&work,1,op2,dim); 319 320 /* shouldn't need this */ 321 if (col==INT_MAX) { 322 ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr); 323 continue; 324 } 325 326 /* do I own it? I should */ 327 PCTFS_rvec_zero(v ,a_m); 328 if (col==fo[start]) { 329 start++; 330 idex=PCTFS_ivec_linear_search(col, a_local2global, a_n); 331 if (idex!=-1) { 332 v[idex] = 1.0; j++; 333 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n"); 334 } else { 335 idex=PCTFS_ivec_linear_search(col, a_local2global, a_m); 336 if (idex!=-1) 337 {v[idex] = 1.0;} 338 } 339 340 /* perform u = A.v_l */ 341 PCTFS_rvec_zero(u,n); 342 do_matvec(xxt_handle->mvi,v,u); 343 344 /* uu = X^T.u_l (local portion) */ 345 /* technically only need to zero out first i entries */ 346 /* later turn this into an XXT_solve call ? */ 347 PCTFS_rvec_zero(uu,m); 348 x_ptr=x; 349 iptr = col_indices; 350 for (k=0; k<i; k++) { 351 off = *iptr++; 352 len = *iptr++; 353 dlen = PetscBLASIntCast(len); 354 uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1); 355 x_ptr+=len; 356 } 357 358 359 /* uu = X^T.u_l (comm portion) */ 360 PCTFS_ssgl_radd (uu, w, dim, stages); 361 362 /* z = X.uu */ 363 PCTFS_rvec_zero(z,n); 364 x_ptr=x; 365 iptr = col_indices; 366 for (k=0; k<i; k++) { 367 off = *iptr++; 368 len = *iptr++; 369 dlen = PetscBLASIntCast(len); 370 BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1); 371 x_ptr+=len; 372 } 373 374 /* compute v_l = v_l - z */ 375 PCTFS_rvec_zero(v+a_n,a_m-a_n); 376 dlen = PetscBLASIntCast(n); 377 BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1); 378 379 /* compute u_l = A.v_l */ 380 if (a_n!=a_m) { PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim); } 381 PCTFS_rvec_zero(u,n); 382 do_matvec(xxt_handle->mvi,v,u); 383 384 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */ 385 dlen = PetscBLASIntCast(n); 386 alpha = BLASdot_(&dlen,u,&i1,v,&i1); 387 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */ 388 PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim); 389 390 alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha); 391 392 /* check for small alpha */ 393 /* LATER use this to detect and determine null space */ 394 if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha); 395 396 /* compute v_l = v_l/sqrt(alpha) */ 397 PCTFS_rvec_scale(v,1.0/alpha,n); 398 399 /* add newly generated column, v_l, to X */ 400 flag = 1; 401 off=len=0; 402 for (k=0; k<n; k++) { 403 if (v[k]!=0.0) { 404 len=k; 405 if (flag) { off=k; flag=0; } 406 } 407 } 408 409 len -= (off-1); 410 411 if (len>0) { 412 if ((xxt_nnz+len)>xxt_max_nnz) { 413 ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr); 414 xxt_max_nnz *= 2; 415 x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 416 PCTFS_rvec_copy(x_ptr,x,xxt_nnz); 417 free(x); 418 x = x_ptr; 419 x_ptr+=xxt_nnz; 420 } 421 xxt_nnz += len; 422 PCTFS_rvec_copy(x_ptr,v+off,len); 423 424 /* keep track of number of zeros */ 425 if (dim) { 426 for (k=0; k<len; k++) { 427 if (x_ptr[k]==0.0) { xxt_zero_nnz++; } 428 } 429 } else { 430 for (k=0; k<len; k++) { 431 if (x_ptr[k]==0.0) {xxt_zero_nnz_0++;} 432 } 433 } 434 col_indices[2*i] = off; 435 col_sz[i] = col_indices[2*i+1] = len; 436 col_vals[i] = x_ptr; 437 } 438 else { 439 col_indices[2*i] = 0; 440 col_sz[i] = col_indices[2*i+1] = 0; 441 col_vals[i] = x_ptr; 442 } 443 } 444 445 /* close off stages for execution phase */ 446 while (dim!=level) { 447 stages[dim++]=i; 448 ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr); 449 } 450 stages[dim]=i; 451 452 xxt_handle->info->n=xxt_handle->mvi->n; 453 xxt_handle->info->m=m; 454 xxt_handle->info->nnz=xxt_nnz; 455 xxt_handle->info->max_nnz=xxt_max_nnz; 456 xxt_handle->info->msg_buf_sz=stages[level]-stages[0]; 457 xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 458 xxt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 459 xxt_handle->info->x=x; 460 xxt_handle->info->col_vals=col_vals; 461 xxt_handle->info->col_sz=col_sz; 462 xxt_handle->info->col_indices=col_indices; 463 xxt_handle->info->stages=stages; 464 xxt_handle->info->nsolves=0; 465 xxt_handle->info->tot_solve_time=0.0; 466 467 free(segs); 468 free(u); 469 free(v); 470 free(uu); 471 free(z); 472 free(w); 473 474 return(0); 475 } 476 477 /**************************************xxt.c***********************************/ 478 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc) 479 { 480 PetscInt off, len, *iptr; 481 PetscInt level =xxt_handle->level; 482 PetscInt n =xxt_handle->info->n; 483 PetscInt m =xxt_handle->info->m; 484 PetscInt *stages =xxt_handle->info->stages; 485 PetscInt *col_indices=xxt_handle->info->col_indices; 486 PetscScalar *x_ptr, *uu_ptr; 487 PetscScalar *solve_uu=xxt_handle->info->solve_uu; 488 PetscScalar *solve_w =xxt_handle->info->solve_w; 489 PetscScalar *x =xxt_handle->info->x; 490 PetscBLASInt i1 = 1,dlen; 491 492 PetscFunctionBegin; 493 uu_ptr=solve_uu; 494 PCTFS_rvec_zero(uu_ptr,m); 495 496 /* x = X.Y^T.b */ 497 /* uu = Y^T.b */ 498 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) { 499 off=*iptr++; len=*iptr++; 500 dlen = PetscBLASIntCast(len); 501 *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1); 502 } 503 504 /* comunication of beta */ 505 uu_ptr=solve_uu; 506 if (level) { PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages); } 507 508 PCTFS_rvec_zero(uc,n); 509 510 /* x = X.uu */ 511 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) { 512 off=*iptr++; len=*iptr++; 513 dlen = PetscBLASIntCast(len); 514 BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1); 515 } 516 PetscFunctionReturn(0); 517 } 518 519 /**************************************xxt.c***********************************/ 520 static PetscErrorCode check_handle(xxt_ADT xxt_handle) 521 { 522 PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX}; 523 524 PetscFunctionBegin; 525 if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle); 526 527 vals[0]=vals[1]=xxt_handle->id; 528 PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 529 if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id); 530 PetscFunctionReturn(0); 531 } 532 533 /**************************************xxt.c***********************************/ 534 static PetscErrorCode det_separators(xxt_ADT xxt_handle) 535 { 536 PetscInt i, ct, id; 537 PetscInt mask, edge, *iptr; 538 PetscInt *dir, *used; 539 PetscInt sum[4], w[4]; 540 PetscScalar rsum[4], rw[4]; 541 PetscInt op[] = {GL_ADD,0}; 542 PetscScalar *lhs, *rhs; 543 PetscInt *nsep, *lnsep, *fo, nfo=0; 544 PCTFS_gs_ADT PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle; 545 PetscInt *local2global=xxt_handle->mvi->local2global; 546 PetscInt n=xxt_handle->mvi->n; 547 PetscInt m=xxt_handle->mvi->m; 548 PetscInt level=xxt_handle->level; 549 PetscInt shared=0; 550 551 PetscFunctionBegin; 552 dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 553 nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 554 lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 555 fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1)); 556 used = (PetscInt*)malloc(sizeof(PetscInt)*n); 557 558 PCTFS_ivec_zero(dir ,level+1); 559 PCTFS_ivec_zero(nsep ,level+1); 560 PCTFS_ivec_zero(lnsep,level+1); 561 PCTFS_ivec_set (fo ,-1,n+1); 562 PCTFS_ivec_zero(used,n); 563 564 lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 565 rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 566 567 /* determine the # of unique dof */ 568 PCTFS_rvec_zero(lhs,m); 569 PCTFS_rvec_set(lhs,1.0,n); 570 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level); 571 PCTFS_rvec_zero(rsum,2); 572 for (ct=i=0;i<n;i++) { 573 if (lhs[i]!=0.0) 574 {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];} 575 } 576 PCTFS_grop_hc(rsum,rw,2,op,level); 577 rsum[0]+=0.1; 578 rsum[1]+=0.1; 579 580 if (fabs(rsum[0]-rsum[1])>EPS) { shared=1; } 581 582 xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0]; 583 xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0]; 584 585 /* determine separator sets top down */ 586 if (shared) 587 { 588 for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) { 589 590 /* set rsh of hc, fire, and collect lhs responses */ 591 (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m); 592 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge); 593 594 /* set lsh of hc, fire, and collect rhs responses */ 595 (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m); 596 PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge); 597 598 for (i=0;i<n;i++) { 599 if (id< mask) { 600 if (lhs[i]!=0.0) { lhs[i]=1.0; } 601 } 602 603 if (id>=mask) { 604 if (rhs[i]!=0.0) { rhs[i]=1.0; } 605 } 606 } 607 608 if (id< mask) { PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1); } 609 else { PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1); } 610 611 /* count number of dofs I own that have signal and not in sep set */ 612 PCTFS_rvec_zero(rsum,4); 613 for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) { 614 615 if (!used[i]) { 616 /* number of unmarked dofs on node */ 617 ct++; 618 /* number of dofs to be marked on lhs hc */ 619 if (id< mask) { 620 if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; } 621 } 622 /* number of dofs to be marked on rhs hc */ 623 if (id>=mask) { 624 if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; } 625 } 626 } 627 628 } 629 630 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 631 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 632 (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct); 633 PCTFS_giop_hc(sum,w,4,op,edge); 634 PCTFS_grop_hc(rsum,rw,4,op,edge); 635 rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1; 636 637 if (id<mask) { 638 /* mark dofs I own that have signal and not in sep set */ 639 for (ct=i=0;i<n;i++) { 640 if ((!used[i])&&(lhs[i]!=0.0)) { 641 ct++; nfo++; 642 643 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 644 645 *--iptr = local2global[i]; 646 used[i]=edge; 647 } 648 } 649 if (ct>1) {PCTFS_ivec_sort(iptr,ct);} 650 651 lnsep[edge]=ct; 652 nsep[edge]=(PetscInt) rsum[0]; 653 dir [edge]=LEFT; 654 } 655 656 if (id>=mask) { 657 /* mark dofs I own that have signal and not in sep set */ 658 for (ct=i=0;i<n;i++) { 659 if ((!used[i])&&(rhs[i]!=0.0)) { 660 ct++; nfo++; 661 662 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 663 664 *--iptr = local2global[i]; 665 used[i]=edge; 666 } 667 } 668 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 669 670 lnsep[edge]=ct; 671 nsep[edge]= (PetscInt) rsum[1]; 672 dir [edge]=RIGHT; 673 } 674 675 /* LATER or we can recur on these to order seps at this level */ 676 /* do we need full set of separators for this? */ 677 678 /* fold rhs hc into lower */ 679 if (id>=mask) { id-=mask; } 680 } 681 } else { 682 for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) { 683 /* set rsh of hc, fire, and collect lhs responses */ 684 (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m); 685 PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge); 686 687 /* set lsh of hc, fire, and collect rhs responses */ 688 (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m); 689 PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge); 690 691 /* count number of dofs I own that have signal and not in sep set */ 692 for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) { 693 if (!used[i]) { 694 /* number of unmarked dofs on node */ 695 ct++; 696 /* number of dofs to be marked on lhs hc */ 697 if ((id< mask)&&(lhs[i]!=0.0)) { sum[0]++; } 698 /* number of dofs to be marked on rhs hc */ 699 if ((id>=mask)&&(rhs[i]!=0.0)) { sum[1]++; } 700 } 701 } 702 703 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 704 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 705 PCTFS_giop_hc(sum,w,4,op,edge); 706 707 /* lhs hc wins */ 708 if (sum[2]>=sum[3]) { 709 if (id<mask) { 710 /* mark dofs I own that have signal and not in sep set */ 711 for (ct=i=0;i<n;i++) { 712 if ((!used[i])&&(lhs[i]!=0.0)) { 713 ct++; nfo++; 714 *--iptr = local2global[i]; 715 used[i]=edge; 716 } 717 } 718 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 719 lnsep[edge]=ct; 720 } 721 nsep[edge]=sum[0]; 722 dir [edge]=LEFT; 723 } else { /* rhs hc wins */ 724 if (id>=mask) { 725 /* mark dofs I own that have signal and not in sep set */ 726 for (ct=i=0;i<n;i++) { 727 if ((!used[i])&&(rhs[i]!=0.0)) { 728 ct++; nfo++; 729 *--iptr = local2global[i]; 730 used[i]=edge; 731 } 732 } 733 if (ct>1) {PCTFS_ivec_sort(iptr,ct);} 734 lnsep[edge]=ct; 735 } 736 nsep[edge]=sum[1]; 737 dir [edge]=RIGHT; 738 } 739 /* LATER or we can recur on these to order seps at this level */ 740 /* do we need full set of separators for this? */ 741 742 /* fold rhs hc into lower */ 743 if (id>=mask) { id-=mask; } 744 } 745 } 746 747 748 /* level 0 is on processor case - so mark the remainder */ 749 for (ct=i=0;i<n;i++) { 750 if (!used[i]) { 751 ct++; nfo++; 752 *--iptr = local2global[i]; 753 used[i]=edge; 754 } 755 } 756 if (ct>1) { PCTFS_ivec_sort(iptr,ct); } 757 lnsep[edge]=ct; 758 nsep [edge]=ct; 759 dir [edge]=LEFT; 760 761 xxt_handle->info->nsep=nsep; 762 xxt_handle->info->lnsep=lnsep; 763 xxt_handle->info->fo=fo; 764 xxt_handle->info->nfo=nfo; 765 766 free(dir); 767 free(lhs); 768 free(rhs); 769 free(used); 770 PetscFunctionReturn(0); 771 } 772 773 /**************************************xxt.c***********************************/ 774 static mv_info *set_mvi(PetscInt *local2global,PetscInt n,PetscInt m,PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*),void *grid_data) 775 { 776 mv_info *mvi; 777 778 779 mvi = (mv_info*)malloc(sizeof(mv_info)); 780 mvi->n=n; 781 mvi->m=m; 782 mvi->n_global=-1; 783 mvi->m_global=-1; 784 mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt)); 785 PCTFS_ivec_copy(mvi->local2global,local2global,m); 786 mvi->local2global[m] = INT_MAX; 787 mvi->matvec=matvec; 788 mvi->grid_data=grid_data; 789 790 /* set xxt communication handle to perform restricted matvec */ 791 mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes); 792 793 return(mvi); 794 } 795 796 /**************************************xxt.c***********************************/ 797 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) 798 { 799 PetscFunctionBegin; 800 A->matvec((mv_info*)A->grid_data,v,u); 801 PetscFunctionReturn(0); 802 } 803 804 805 806