197929ea7SJunchao Zhang static char help[]= " Test VecScatter with x, y on different communicators\n\n"; 297929ea7SJunchao Zhang 397929ea7SJunchao Zhang #include <petscvec.h> 497929ea7SJunchao Zhang 597929ea7SJunchao Zhang int main(int argc,char **argv) 697929ea7SJunchao Zhang { 797929ea7SJunchao Zhang PetscErrorCode ierr; 897929ea7SJunchao Zhang PetscInt i,n=5,rstart; 997929ea7SJunchao Zhang PetscScalar *val; 1097929ea7SJunchao Zhang const PetscScalar *dat; 1197929ea7SJunchao Zhang Vec x,y1,y2; 1297929ea7SJunchao Zhang MPI_Comm newcomm; 1397929ea7SJunchao Zhang PetscMPIInt nproc,rank; 1497929ea7SJunchao Zhang IS ix; 1597929ea7SJunchao Zhang VecScatter vscat1,vscat2; 1697929ea7SJunchao Zhang 1797929ea7SJunchao Zhang PetscFunctionBegin; 1897929ea7SJunchao Zhang ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; 19ffc4695bSBarry Smith ierr = MPI_Comm_size(PETSC_COMM_WORLD,&nproc);CHKERRMPI(ierr); 20ffc4695bSBarry Smith ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 2197929ea7SJunchao Zhang 22*2c71b3e2SJacob Faibussowitsch PetscCheckFalse(nproc != 2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This test must run with exactly two MPI ranks"); 2397929ea7SJunchao Zhang 2497929ea7SJunchao Zhang /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */ 2597929ea7SJunchao Zhang ierr = VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x);CHKERRQ(ierr); 2697929ea7SJunchao Zhang ierr = VecDuplicate(x,&y1);CHKERRQ(ierr); 2797929ea7SJunchao Zhang ierr = VecGetOwnershipRange(x,&rstart,NULL);CHKERRQ(ierr); 2897929ea7SJunchao Zhang 2997929ea7SJunchao Zhang /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */ 3097929ea7SJunchao Zhang ierr = VecGetArray(x,&val);CHKERRQ(ierr); 3197929ea7SJunchao Zhang for (i=0; i<n; i++) val[i] = rstart + i; 3297929ea7SJunchao Zhang ierr = VecRestoreArray(x,&val);CHKERRQ(ierr); 3397929ea7SJunchao Zhang 3497929ea7SJunchao Zhang /* Create index set ix = {0, 1, 2, ..., 9} */ 3597929ea7SJunchao Zhang ierr = ISCreateStride(PETSC_COMM_WORLD,n,rstart,1,&ix);CHKERRQ(ierr); 3697929ea7SJunchao Zhang 3797929ea7SJunchao Zhang /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/ 38ffc4695bSBarry Smith ierr = MPI_Comm_split(PETSC_COMM_WORLD,0/*color*/,-rank/*key*/,&newcomm);CHKERRMPI(ierr); 3997929ea7SJunchao Zhang ierr = VecCreateMPI(newcomm,n,PETSC_DECIDE,&y2);CHKERRQ(ierr); 4097929ea7SJunchao Zhang 4197929ea7SJunchao Zhang /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */ 4297929ea7SJunchao Zhang ierr = VecScatterCreate(x,ix,y1,ix,&vscat1);CHKERRQ(ierr); 4397929ea7SJunchao Zhang ierr = VecScatterCreate(x,ix,y2,ix,&vscat2);CHKERRQ(ierr); 4497929ea7SJunchao Zhang 4597929ea7SJunchao Zhang ierr = VecScatterBegin(vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 4697929ea7SJunchao Zhang ierr = VecScatterBegin(vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 4797929ea7SJunchao Zhang ierr = VecScatterEnd (vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 4897929ea7SJunchao Zhang ierr = VecScatterEnd (vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 4997929ea7SJunchao Zhang 5097929ea7SJunchao Zhang /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */ 51dd400576SPatrick Sanan if (rank == 0) { 5297929ea7SJunchao Zhang /* Print the part of x on rank 0, which is 0 1 2 3 4 */ 5397929ea7SJunchao Zhang ierr = PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, x = ");CHKERRQ(ierr); 5497929ea7SJunchao Zhang ierr = VecGetArrayRead(x,&dat);CHKERRQ(ierr); 5505d37114SJacob Faibussowitsch for (i=0; i<n; i++) {ierr = PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));CHKERRQ(ierr);} 5697929ea7SJunchao Zhang ierr = VecRestoreArrayRead(x,&dat);CHKERRQ(ierr); 5797929ea7SJunchao Zhang 5897929ea7SJunchao Zhang /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */ 5997929ea7SJunchao Zhang ierr = PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y1 = ");CHKERRQ(ierr); 6097929ea7SJunchao Zhang ierr = VecGetArrayRead(y1,&dat);CHKERRQ(ierr); 6105d37114SJacob Faibussowitsch for (i=0; i<n; i++) {ierr = PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));CHKERRQ(ierr);} 6297929ea7SJunchao Zhang ierr = VecRestoreArrayRead(y1,&dat);CHKERRQ(ierr); 6397929ea7SJunchao Zhang 6497929ea7SJunchao Zhang /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */ 6597929ea7SJunchao Zhang ierr = PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y2 = ");CHKERRQ(ierr); 6697929ea7SJunchao Zhang ierr = VecGetArrayRead(y2,&dat);CHKERRQ(ierr); 6705d37114SJacob Faibussowitsch for (i=0; i<n; i++) {ierr = PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));CHKERRQ(ierr);} 6897929ea7SJunchao Zhang ierr = VecRestoreArrayRead(y2,&dat);CHKERRQ(ierr); 6997929ea7SJunchao Zhang ierr = PetscPrintf(PETSC_COMM_SELF,"\n");CHKERRQ(ierr); 7097929ea7SJunchao Zhang } 7197929ea7SJunchao Zhang 7297929ea7SJunchao Zhang ierr = ISDestroy(&ix);CHKERRQ(ierr); 7397929ea7SJunchao Zhang ierr = VecDestroy(&x);CHKERRQ(ierr); 7497929ea7SJunchao Zhang ierr = VecDestroy(&y1);CHKERRQ(ierr); 7597929ea7SJunchao Zhang ierr = VecDestroy(&y2);CHKERRQ(ierr); 7697929ea7SJunchao Zhang ierr = VecScatterDestroy(&vscat1);CHKERRQ(ierr); 7797929ea7SJunchao Zhang ierr = VecScatterDestroy(&vscat2);CHKERRQ(ierr); 78ffc4695bSBarry Smith ierr = MPI_Comm_free(&newcomm);CHKERRMPI(ierr); 7997929ea7SJunchao Zhang ierr = PetscFinalize(); 8097929ea7SJunchao Zhang return ierr; 8197929ea7SJunchao Zhang } 8297929ea7SJunchao Zhang 8397929ea7SJunchao Zhang /*TEST 8497929ea7SJunchao Zhang 8597929ea7SJunchao Zhang test: 8697929ea7SJunchao Zhang nsize: 2 8797929ea7SJunchao Zhang requires: double 8897929ea7SJunchao Zhang TEST*/ 8997929ea7SJunchao Zhang 90