1 2 /* 3 Code for some of the parallel vector primatives. 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> 6 #include <petscblaslapack.h> 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecMDot_MPI" 10 PetscErrorCode VecMDot_MPI(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z) 11 { 12 PetscScalar awork[128],*work = awork; 13 PetscErrorCode ierr; 14 15 PetscFunctionBegin; 16 if (nv > 128) { 17 ierr = PetscMalloc(nv*sizeof(PetscScalar),&work);CHKERRQ(ierr); 18 } 19 ierr = VecMDot_Seq(xin,nv,y,work);CHKERRQ(ierr); 20 ierr = MPI_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 21 if (nv > 128) { 22 ierr = PetscFree(work);CHKERRQ(ierr); 23 } 24 PetscFunctionReturn(0); 25 } 26 27 #undef __FUNCT__ 28 #define __FUNCT__ "VecMTDot_MPI" 29 PetscErrorCode VecMTDot_MPI(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z) 30 { 31 PetscScalar awork[128],*work = awork; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 if (nv > 128) { 36 ierr = PetscMalloc(nv*sizeof(PetscScalar),&work);CHKERRQ(ierr); 37 } 38 ierr = VecMTDot_Seq(xin,nv,y,work);CHKERRQ(ierr); 39 ierr = MPI_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 40 if (nv > 128) { 41 ierr = PetscFree(work);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 #include <../src/vec/vec/impls/seq/ftn-kernels/fnorm.h> 47 #undef __FUNCT__ 48 #define __FUNCT__ "VecNorm_MPI" 49 PetscErrorCode VecNorm_MPI(Vec xin,NormType type,PetscReal *z) 50 { 51 PetscReal sum,work = 0.0; 52 const PetscScalar *xx; 53 PetscErrorCode ierr; 54 PetscInt n = xin->map->n; 55 PetscBLASInt one = 1,bn = PetscBLASIntCast(n); 56 57 PetscFunctionBegin; 58 if (type == NORM_2 || type == NORM_FROBENIUS) { 59 ierr = VecGetArrayRead(xin,&xx);CHKERRQ(ierr); 60 work = BLASdot_(&bn,xx,&one,xx,&one); 61 ierr = VecRestoreArrayRead(xin,&xx);CHKERRQ(ierr); 62 ierr = MPI_Allreduce(&work,&sum,1,MPIU_REAL,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 63 *z = PetscSqrtReal(sum); 64 ierr = PetscLogFlops(2.0*xin->map->n);CHKERRQ(ierr); 65 } else if (type == NORM_1) { 66 /* Find the local part */ 67 ierr = VecNorm_Seq(xin,NORM_1,&work);CHKERRQ(ierr); 68 /* Find the global max */ 69 ierr = MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 70 } else if (type == NORM_INFINITY) { 71 /* Find the local max */ 72 ierr = VecNorm_Seq(xin,NORM_INFINITY,&work);CHKERRQ(ierr); 73 /* Find the global max */ 74 ierr = MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,((PetscObject)xin)->comm);CHKERRQ(ierr); 75 } else if (type == NORM_1_AND_2) { 76 PetscReal temp[2]; 77 ierr = VecNorm_Seq(xin,NORM_1,temp);CHKERRQ(ierr); 78 ierr = VecNorm_Seq(xin,NORM_2,temp+1);CHKERRQ(ierr); 79 temp[1] = temp[1]*temp[1]; 80 ierr = MPI_Allreduce(temp,z,2,MPIU_REAL,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 81 z[1] = PetscSqrtReal(z[1]); 82 } 83 PetscFunctionReturn(0); 84 } 85 86 /* 87 These two functions are the MPI reduction operation used for max and min with index 88 The call below to MPI_Op_create() converts the function Vec[Max,Min]_Local() to the 89 MPI operator Vec[Max,Min]_Local_Op. 90 */ 91 MPI_Op VecMax_Local_Op = 0; 92 MPI_Op VecMin_Local_Op = 0; 93 94 EXTERN_C_BEGIN 95 #undef __FUNCT__ 96 #define __FUNCT__ "VecMax_Local" 97 void MPIAPI VecMax_Local(void *in,void *out,PetscMPIInt *cnt,MPI_Datatype *datatype) 98 { 99 PetscReal *xin = (PetscReal *)in,*xout = (PetscReal*)out; 100 101 PetscFunctionBegin; 102 if (*datatype != MPIU_REAL) { 103 (*PetscErrorPrintf)("Can only handle MPIU_REAL data types"); 104 MPI_Abort(MPI_COMM_WORLD,1); 105 } 106 if (xin[0] > xout[0]) { 107 xout[0] = xin[0]; 108 xout[1] = xin[1]; 109 } else if (xin[0] == xout[0]) { 110 xout[1] = PetscMin(xin[1],xout[1]); 111 } 112 PetscFunctionReturnVoid(); /* cannot return a value */ 113 } 114 EXTERN_C_END 115 116 EXTERN_C_BEGIN 117 #undef __FUNCT__ 118 #define __FUNCT__ "VecMin_Local" 119 void MPIAPI VecMin_Local(void *in,void *out,PetscMPIInt *cnt,MPI_Datatype *datatype) 120 { 121 PetscReal *xin = (PetscReal *)in,*xout = (PetscReal*)out; 122 123 PetscFunctionBegin; 124 if (*datatype != MPIU_REAL) { 125 (*PetscErrorPrintf)("Can only handle MPIU_REAL data types"); 126 MPI_Abort(MPI_COMM_WORLD,1); 127 } 128 if (xin[0] < xout[0]) { 129 xout[0] = xin[0]; 130 xout[1] = xin[1]; 131 } else if (xin[0] == xout[0]) { 132 xout[1] = PetscMin(xin[1],xout[1]); 133 } 134 PetscFunctionReturnVoid(); 135 } 136 EXTERN_C_END 137 138 #undef __FUNCT__ 139 #define __FUNCT__ "VecMax_MPI" 140 PetscErrorCode VecMax_MPI(Vec xin,PetscInt *idx,PetscReal *z) 141 { 142 PetscErrorCode ierr; 143 PetscReal work; 144 145 PetscFunctionBegin; 146 /* Find the local max */ 147 ierr = VecMax_Seq(xin,idx,&work);CHKERRQ(ierr); 148 149 /* Find the global max */ 150 if (!idx) { 151 ierr = MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,((PetscObject)xin)->comm);CHKERRQ(ierr); 152 } else { 153 PetscReal work2[2],z2[2]; 154 PetscInt rstart; 155 rstart = xin->map->rstart; 156 work2[0] = work; 157 work2[1] = *idx + rstart; 158 ierr = MPI_Allreduce(work2,z2,2,MPIU_REAL,VecMax_Local_Op,((PetscObject)xin)->comm);CHKERRQ(ierr); 159 *z = z2[0]; 160 *idx = (PetscInt)z2[1]; 161 } 162 PetscFunctionReturn(0); 163 } 164 165 #undef __FUNCT__ 166 #define __FUNCT__ "VecMin_MPI" 167 PetscErrorCode VecMin_MPI(Vec xin,PetscInt *idx,PetscReal *z) 168 { 169 PetscErrorCode ierr; 170 PetscReal work; 171 172 PetscFunctionBegin; 173 /* Find the local Min */ 174 ierr = VecMin_Seq(xin,idx,&work);CHKERRQ(ierr); 175 176 /* Find the global Min */ 177 if (!idx) { 178 ierr = MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_MIN,((PetscObject)xin)->comm);CHKERRQ(ierr); 179 } else { 180 PetscReal work2[2],z2[2]; 181 PetscInt rstart; 182 183 ierr = VecGetOwnershipRange(xin,&rstart,PETSC_NULL);CHKERRQ(ierr); 184 work2[0] = work; 185 work2[1] = *idx + rstart; 186 ierr = MPI_Allreduce(work2,z2,2,MPIU_REAL,VecMin_Local_Op,((PetscObject)xin)->comm);CHKERRQ(ierr); 187 *z = z2[0]; 188 *idx = (PetscInt)z2[1]; 189 } 190 PetscFunctionReturn(0); 191 } 192 193 194 195 196 197 198 199 200