12452736bSFande Kong #include <petscsf.h> 22452736bSFande Kong #include <petsc/private/matimpl.h> 32452736bSFande Kong 42452736bSFande Kong 5*ca2fc57aSFande Kong #undef __FUNCT__ 6*ca2fc57aSFande Kong #define __FUNCT__ "MatIncreaseOverlapSplit_Single" 7*ca2fc57aSFande Kong 82452736bSFande Kong /* 92452736bSFande Kong * Increase overlap for the sub-matrix across sub communicator 102452736bSFande Kong * sub-matrix could be a graph or numerical matrix 112452736bSFande Kong * */ 122452736bSFande Kong PetscErrorCode MatIncreaseOverlapSplit_Single(Mat mat,IS *is,PetscInt ov) 132452736bSFande Kong { 142452736bSFande Kong PetscInt i,nindx,*indices_sc,*indices_ov,localsize,*localsizes_sc,localsize_tmp; 152452736bSFande Kong PetscInt *indices_ov_rd,nroots,nleaves,*localoffsets,*indices_recv,*sources_sc,*sources_sc_rd; 162452736bSFande Kong const PetscInt *indices; 172452736bSFande Kong PetscMPIInt srank,ssize,issamecomm,k; 182452736bSFande Kong IS is_sc,allis_sc,partitioning; 192452736bSFande Kong MPI_Comm gcomm,scomm; 202452736bSFande Kong PetscSF sf; 212452736bSFande Kong PetscSFNode *remote; 222452736bSFande Kong Mat *smat; 232452736bSFande Kong MatPartitioning part; 242452736bSFande Kong PetscErrorCode ierr; 252452736bSFande Kong 262452736bSFande Kong PetscFunctionBegin; 272452736bSFande Kong /* get a sub communicator before call individual MatIncreaseOverlap 282452736bSFande Kong * since the sub communicator may be changed. 292452736bSFande Kong * */ 302452736bSFande Kong ierr = PetscObjectGetComm((PetscObject)*is,&scomm);CHKERRQ(ierr); 31*ca2fc57aSFande Kong /*get a global communicator, where mat should be a global matrix */ 32*ca2fc57aSFande Kong ierr = PetscObjectGetComm((PetscObject)mat,&gcomm);CHKERRQ(ierr); 33*ca2fc57aSFande Kong #if 1 34*ca2fc57aSFande Kong ierr = PetscPrintf(gcomm,"before mat->ops->increaseoverlap\n");CHKERRQ(ierr); 35*ca2fc57aSFande Kong #endif 362452736bSFande Kong /*increase overlap on each individual subdomain*/ 372452736bSFande Kong ierr = (*mat->ops->increaseoverlap)(mat,1,is,ov);CHKERRQ(ierr); 38*ca2fc57aSFande Kong #if 1 39*ca2fc57aSFande Kong ierr = PetscPrintf(gcomm,"after mat->ops->increaseoverlap \n");CHKERRQ(ierr); 40*ca2fc57aSFande Kong ierr = ISView(*is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 41*ca2fc57aSFande Kong #endif 422452736bSFande Kong /*compare communicators */ 432452736bSFande Kong ierr = MPI_Comm_compare(gcomm,scomm,&issamecomm);CHKERRQ(ierr); 442452736bSFande Kong /* if the sub-communicator is the same as the global communicator, 452452736bSFande Kong * user does not want to use a sub-communicator 462452736bSFande Kong * */ 472452736bSFande Kong if(issamecomm == MPI_IDENT) PetscFunctionReturn(0); 482452736bSFande Kong /* if the sub-communicator is petsc_comm_self, 492452736bSFande Kong * user also does not care the sub-communicator 502452736bSFande Kong * */ 512452736bSFande Kong ierr = MPI_Comm_compare(scomm,PETSC_COMM_SELF,&issamecomm);CHKERRQ(ierr); 522452736bSFande Kong if(issamecomm == MPI_IDENT) PetscFunctionReturn(0); 53*ca2fc57aSFande Kong /*local rank, size in a sub-communicator */ 542452736bSFande Kong ierr = MPI_Comm_rank(scomm,&srank);CHKERRQ(ierr); 552452736bSFande Kong ierr = MPI_Comm_size(scomm,&ssize);CHKERRQ(ierr); 56*ca2fc57aSFande Kong /*create a new IS based on sub-communicator 57*ca2fc57aSFande Kong * since the old IS is often based on petsc_comm_self 582452736bSFande Kong * */ 592452736bSFande Kong ierr = ISGetLocalSize(*is,&nindx);CHKERRQ(ierr); 602452736bSFande Kong ierr = PetscCalloc1(nindx,&indices_sc);CHKERRQ(ierr); 612452736bSFande Kong ierr = ISGetIndices(*is,&indices);CHKERRQ(ierr); 622452736bSFande Kong ierr = PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*nindx);CHKERRQ(ierr); 632452736bSFande Kong ierr = ISRestoreIndices(*is,&indices);CHKERRQ(ierr); 642452736bSFande Kong /*we do not need any more*/ 652452736bSFande Kong ierr = ISDestroy(is);CHKERRQ(ierr); 662452736bSFande Kong /*create a index set based on the sub communicator */ 672452736bSFande Kong ierr = ISCreateGeneral(scomm,nindx,indices_sc,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr); 682452736bSFande Kong /*gather all indices within the sub communicator*/ 692452736bSFande Kong ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr); 702452736bSFande Kong ierr = ISDestroy(&is_sc);CHKERRQ(ierr); 712452736bSFande Kong /* gather local sizes */ 722452736bSFande Kong ierr = PetscMalloc1(ssize,&localsizes_sc);CHKERRQ(ierr); 73*ca2fc57aSFande Kong /*get individual local sizes for all index sets*/ 742452736bSFande Kong ierr = MPI_Gather(&nindx,1,MPIU_INT,localsizes_sc,1,MPIU_INT,0,scomm);CHKERRQ(ierr); 75*ca2fc57aSFande Kong /*only root does these computations */ 762452736bSFande Kong if(!srank){ 772452736bSFande Kong /*get local size for the big index set*/ 782452736bSFande Kong ierr = ISGetLocalSize(allis_sc,&localsize);CHKERRQ(ierr); 792452736bSFande Kong ierr = PetscCalloc2(localsize,&indices_ov,localsize,&sources_sc);CHKERRQ(ierr); 802452736bSFande Kong ierr = PetscCalloc2(localsize,&indices_ov_rd,localsize,&sources_sc_rd);CHKERRQ(ierr); 812452736bSFande Kong ierr = ISGetIndices(allis_sc,&indices);CHKERRQ(ierr); 822452736bSFande Kong ierr = PetscMemcpy(indices_ov,indices,sizeof(PetscInt)*localsize);CHKERRQ(ierr); 832452736bSFande Kong ierr = ISRestoreIndices(allis_sc,&indices);CHKERRQ(ierr); 84*ca2fc57aSFande Kong /*we do not need it any more */ 852452736bSFande Kong ierr = ISDestroy(&allis_sc);CHKERRQ(ierr); 862452736bSFande Kong /*assign corresponding sources */ 872452736bSFande Kong localsize_tmp = 0; 882452736bSFande Kong for(k=0; k<ssize; k++){ 892452736bSFande Kong for(i=0; i<localsizes_sc[k]; i++){ 902452736bSFande Kong sources_sc[localsize_tmp++] = k; 912452736bSFande Kong } 922452736bSFande Kong } 932452736bSFande Kong /*record where indices come from */ 942452736bSFande Kong ierr = PetscSortIntWithArray(localsize,indices_ov,sources_sc);CHKERRQ(ierr); 95*ca2fc57aSFande Kong /*count local sizes for reduced indices */ 962452736bSFande Kong ierr = PetscMemzero(localsizes_sc,sizeof(PetscInt)*ssize);CHKERRQ(ierr); 97*ca2fc57aSFande Kong /*initialize the first entity*/ 982452736bSFande Kong if(localsize){ 992452736bSFande Kong indices_ov_rd[0] = indices_ov[0]; 1002452736bSFande Kong sources_sc_rd[0] = sources_sc[0]; 1012452736bSFande Kong localsizes_sc[sources_sc[0]]++; 1022452736bSFande Kong } 103*ca2fc57aSFande Kong localsize_tmp = 1; 1042452736bSFande Kong /*remove duplicate integers */ 1052452736bSFande Kong for(i=1; i<localsize; i++){ 1062452736bSFande Kong if(indices_ov[i] != indices_ov[i-1]){ 1072452736bSFande Kong indices_ov_rd[localsize_tmp] = indices_ov[i]; 1082452736bSFande Kong sources_sc_rd[localsize_tmp++] = sources_sc[i]; 1092452736bSFande Kong localsizes_sc[sources_sc[i]]++; 1102452736bSFande Kong } 1112452736bSFande Kong } 1122452736bSFande Kong ierr = PetscFree2(indices_ov,sources_sc);CHKERRQ(ierr); 1132452736bSFande Kong ierr = PetscCalloc1(ssize+1,&localoffsets);CHKERRQ(ierr); 1142452736bSFande Kong for(k=0; k<ssize; k++){ 1152452736bSFande Kong localoffsets[k+1] = localoffsets[k] + localsizes_sc[i]; 1162452736bSFande Kong } 117*ca2fc57aSFande Kong /*construct a star forest to send data back */ 1182452736bSFande Kong nleaves = localoffsets[ssize]; 1192452736bSFande Kong ierr = PetscMemzero(localoffsets,(ssize+1)*sizeof(PetscInt));CHKERRQ(ierr); 1202452736bSFande Kong nroots = localsizes_sc[srank]; 1212452736bSFande Kong ierr = PetscCalloc1(nleaves,&remote);CHKERRQ(ierr); 1222452736bSFande Kong for(i=0; i<nleaves; i++){ 123*ca2fc57aSFande Kong remote[i].rank = sources_sc_rd[i]; 124*ca2fc57aSFande Kong remote[i].index = localoffsets[sources_sc_rd[i]]++; 1252452736bSFande Kong } 126*ca2fc57aSFande Kong ierr = PetscFree(localoffsets);CHKERRQ(ierr); 1272452736bSFande Kong }else{ 1282452736bSFande Kong ierr = ISDestroy(&allis_sc);CHKERRQ(ierr); 1292452736bSFande Kong nleaves = 0; 1302452736bSFande Kong indices_ov_rd = 0; 1312452736bSFande Kong sources_sc_rd = 0; 1322452736bSFande Kong } 1332452736bSFande Kong /*scatter sizes to everybody */ 1342452736bSFande Kong ierr = MPI_Scatter(localsizes_sc,1, MPIU_INT,&nroots,1, MPIU_INT,0,scomm);CHKERRQ(ierr); 135*ca2fc57aSFande Kong /*free memory */ 136*ca2fc57aSFande Kong ierr = PetscFree(localsizes_sc);CHKERRQ(ierr); 1372452736bSFande Kong ierr = PetscCalloc1(nroots,&indices_recv);CHKERRQ(ierr); 1382452736bSFande Kong /*set data back to every body */ 1392452736bSFande Kong ierr = PetscSFCreate(scomm,&sf);CHKERRQ(ierr); 1402452736bSFande Kong ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr); 1412452736bSFande Kong ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 1422452736bSFande Kong ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 143*ca2fc57aSFande Kong ierr = PetscSFReduceBegin(sf,MPIU_INT,&indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr); 144*ca2fc57aSFande Kong ierr = PetscSFReduceEnd(sf,MPIU_INT,&indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr); 1452452736bSFande Kong ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 1462452736bSFande Kong /* free memory */ 1472452736bSFande Kong ierr = PetscFree2(indices_ov_rd,sources_sc_rd);CHKERRQ(ierr); 1482452736bSFande Kong /*create a index set*/ 1492452736bSFande Kong ierr = ISCreateGeneral(scomm,nroots,indices_recv,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr); 1502452736bSFande Kong /*create a index set for cols */ 1512452736bSFande Kong ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr); 1522452736bSFande Kong /*construct a parallel submatrix */ 1532452736bSFande Kong ierr = PetscCalloc1(1,&smat);CHKERRQ(ierr); 1542452736bSFande Kong ierr = MatGetSubMatricesMPI(mat,1,&is_sc,&allis_sc,MAT_INITIAL_MATRIX,&smat);CHKERRQ(ierr); 1552452736bSFande Kong /* we do not need them any more */ 1562452736bSFande Kong ierr = ISDestroy(&is_sc);CHKERRQ(ierr); 1572452736bSFande Kong ierr = ISDestroy(&allis_sc);CHKERRQ(ierr); 1582452736bSFande Kong /*create a partitioner to repartition the sub-matrix*/ 1592452736bSFande Kong ierr = MatPartitioningCreate(scomm,&part);CHKERRQ(ierr); 1602452736bSFande Kong ierr = MatPartitioningSetAdjacency(part,smat[0]);CHKERRQ(ierr); 1612452736bSFande Kong #if PETSC_HAVE_PARMETIS 1622452736bSFande Kong /* if there exists a ParMETIS installation, we try to use ParMETIS 1632452736bSFande Kong * because a repartition routine possibly work better 1642452736bSFande Kong * */ 1652452736bSFande Kong ierr = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);CHKERRQ(ierr); 1662452736bSFande Kong /*try to use reparition function, instead of partition function */ 1672452736bSFande Kong ierr = MatPartitioningParmetisSetRepartition(part);CHKERRQ(ierr); 1682452736bSFande Kong #else 1692452736bSFande Kong /*we at least provide a default partitioner to rebalance the computation */ 1702452736bSFande Kong ierr = MatPartitioningSetType(part,MATPARTITIONINGAVERAGE);CHKERRQ(ierr); 1712452736bSFande Kong #endif 1722452736bSFande Kong /*user can pick up any partitioner by using an option*/ 1732452736bSFande Kong ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr); 1742452736bSFande Kong /* apply partition */ 1752452736bSFande Kong ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr); 1762452736bSFande Kong ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr); 177*ca2fc57aSFande Kong ierr = MatDestroy(&(smat[0]));CHKERRQ(ierr); 1782452736bSFande Kong ierr = PetscFree(smat);CHKERRQ(ierr); 1792452736bSFande Kong /* get local rows including overlap */ 1802452736bSFande Kong ierr = ISBuildTwoSided(partitioning,is);CHKERRQ(ierr); 1812452736bSFande Kong PetscFunctionReturn(0); 1822452736bSFande Kong } 1832452736bSFande Kong 1842452736bSFande Kong 185