1 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include <petscsys.h> 6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 7 8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 9 { 10 PetscScalar sum,work; 11 PetscErrorCode ierr; 12 13 PetscFunctionBegin; 14 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 15 ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 16 *z = sum; 17 PetscFunctionReturn(0); 18 } 19 20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 21 { 22 PetscScalar sum,work; 23 PetscErrorCode ierr; 24 25 PetscFunctionBegin; 26 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 27 ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 28 *z = sum; 29 PetscFunctionReturn(0); 30 } 31 32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 33 34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 35 { 36 PetscErrorCode ierr; 37 Vec_MPI *v = (Vec_MPI*)vin->data; 38 39 PetscFunctionBegin; 40 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 41 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 42 v->array = (PetscScalar*)a; 43 if (v->localrep) { 44 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 45 } 46 PetscFunctionReturn(0); 47 } 48 49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 50 { 51 PetscErrorCode ierr; 52 Vec_MPI *vw,*w = (Vec_MPI*)win->data; 53 PetscScalar *array; 54 55 PetscFunctionBegin; 56 ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr); 57 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 58 59 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 60 vw = (Vec_MPI*)(*v)->data; 61 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 62 63 /* save local representation of the parallel vector (and scatter) if it exists */ 64 if (w->localrep) { 65 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 66 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 67 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 68 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 69 ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr); 70 71 vw->localupdate = w->localupdate; 72 if (vw->localupdate) { 73 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 74 } 75 } 76 77 /* New vector should inherit stashing property of parent */ 78 (*v)->stash.donotstash = win->stash.donotstash; 79 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 80 81 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 82 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 83 84 (*v)->map->bs = PetscAbs(win->map->bs); 85 (*v)->bstash.bs = win->bstash.bs; 86 PetscFunctionReturn(0); 87 } 88 89 90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag) 91 { 92 Vec_MPI *v = (Vec_MPI*)V->data; 93 PetscErrorCode ierr; 94 95 PetscFunctionBegin; 96 switch (op) { 97 case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag; 98 break; 99 case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag; 100 break; 101 case VEC_SUBSET_OFF_PROC_ENTRIES: 102 v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */ 103 if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */ 104 ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */ 105 v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */ 106 } 107 break; 108 } 109 110 PetscFunctionReturn(0); 111 } 112 113 114 static PetscErrorCode VecResetArray_MPI(Vec vin) 115 { 116 Vec_MPI *v = (Vec_MPI*)vin->data; 117 PetscErrorCode ierr; 118 119 PetscFunctionBegin; 120 v->array = v->unplacedarray; 121 v->unplacedarray = 0; 122 if (v->localrep) { 123 ierr = VecResetArray(v->localrep);CHKERRQ(ierr); 124 } 125 PetscFunctionReturn(0); 126 } 127 128 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx) 129 { 130 Vec X = (Vec)ctx; 131 Vec_MPI *x = (Vec_MPI*)X->data; 132 VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata; 133 PetscInt bs = X->map->bs; 134 PetscErrorCode ierr; 135 136 PetscFunctionBegin; 137 /* x->first_assembly_done indicates we are reusing a communication network. In that case, some 138 messages can be empty, but we have to send them this time if we sent them before because the 139 receiver is expecting them. 140 */ 141 if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) { 142 ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr); 143 ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr); 144 } 145 if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) { 146 ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr); 147 ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr); 148 } 149 PetscFunctionReturn(0); 150 } 151 152 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx) 153 { 154 Vec X = (Vec)ctx; 155 Vec_MPI *x = (Vec_MPI*)X->data; 156 VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata; 157 PetscErrorCode ierr; 158 PetscInt bs = X->map->bs; 159 VecAssemblyFrame *frame; 160 161 PetscFunctionBegin; 162 ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr); 163 164 if (hdr->count) { 165 ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr); 166 ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr); 167 ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr); 168 ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr); 169 frame->pendings = 2; 170 } else { 171 frame->ints = NULL; 172 frame->scalars = NULL; 173 frame->pendings = 0; 174 } 175 176 if (hdr->bcount) { 177 ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr); 178 ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr); 179 ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr); 180 ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr); 181 frame->pendingb = 2; 182 } else { 183 frame->intb = NULL; 184 frame->scalarb = NULL; 185 frame->pendingb = 0; 186 } 187 PetscFunctionReturn(0); 188 } 189 190 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X) 191 { 192 Vec_MPI *x = (Vec_MPI*)X->data; 193 PetscErrorCode ierr; 194 MPI_Comm comm; 195 PetscInt i,j,jb,bs; 196 197 PetscFunctionBegin; 198 if (X->stash.donotstash) PetscFunctionReturn(0); 199 200 ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr); 201 ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr); 202 #if defined(PETSC_USE_DEBUG) 203 { 204 InsertMode addv; 205 ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr); 206 if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added"); 207 } 208 #endif 209 X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */ 210 211 ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr); 212 ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr); 213 214 if (!x->sendranks) { 215 PetscMPIInt nowners,bnowners,*owners,*bowners; 216 PetscInt ntmp; 217 ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr); 218 ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr); 219 ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr); 220 x->nsendranks = ntmp; 221 ierr = PetscFree(owners);CHKERRQ(ierr); 222 ierr = PetscFree(bowners);CHKERRQ(ierr); 223 ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr); 224 ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr); 225 } 226 for (i=0,j=0,jb=0; i<x->nsendranks; i++) { 227 PetscMPIInt rank = x->sendranks[i]; 228 x->sendhdr[i].insertmode = X->stash.insertmode; 229 /* Initialize pointers for non-empty stashes the first time around. Subsequent assemblies with 230 * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when 231 * there is nothing new to send, so that size-zero messages get sent instead. */ 232 x->sendhdr[i].count = 0; 233 if (X->stash.n) { 234 x->sendptrs[i].ints = &X->stash.idx[j]; 235 x->sendptrs[i].scalars = &X->stash.array[j]; 236 for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++; 237 } 238 x->sendhdr[i].bcount = 0; 239 if (X->bstash.n) { 240 x->sendptrs[i].intb = &X->bstash.idx[jb]; 241 x->sendptrs[i].scalarb = &X->bstash.array[jb*bs]; 242 for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++; 243 } 244 } 245 246 if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);} 247 if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);} 248 if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);} 249 if (x->first_assembly_done) { /* this is not the first assembly */ 250 PetscMPIInt tag[4]; 251 for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);} 252 for (i=0; i<x->nsendranks; i++) { 253 ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr); 254 } 255 for (i=0; i<x->nrecvranks; i++) { 256 ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr); 257 } 258 x->use_status = PETSC_TRUE; 259 } else { /* First time assembly */ 260 ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr); 261 x->use_status = PETSC_FALSE; 262 } 263 264 /* The first_assembly_done flag is only meaningful when x->assembly_subset is set. 265 This line says when assembly_subset is set, then we mark that the first assembly is done. 266 */ 267 x->first_assembly_done = x->assembly_subset; 268 269 { 270 PetscInt nstash,reallocs; 271 ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr); 272 ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 273 ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr); 274 ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 275 } 276 PetscFunctionReturn(0); 277 } 278 279 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X) 280 { 281 Vec_MPI *x = (Vec_MPI*)X->data; 282 PetscInt bs = X->map->bs; 283 PetscMPIInt npending,*some_indices,r; 284 MPI_Status *some_statuses; 285 PetscScalar *xarray; 286 PetscErrorCode ierr; 287 VecAssemblyFrame *frame; 288 289 PetscFunctionBegin; 290 if (X->stash.donotstash) { 291 X->stash.insertmode = NOT_SET_VALUES; 292 X->bstash.insertmode = NOT_SET_VALUES; 293 PetscFunctionReturn(0); 294 } 295 296 if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first"); 297 ierr = VecGetArray(X,&xarray);CHKERRQ(ierr); 298 ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr); 299 ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr); 300 for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb; 301 while (npending>0) { 302 PetscMPIInt ndone=0,ii; 303 /* Filling MPI_Status fields requires some resources from the MPI library. We skip it on the first assembly, or 304 * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial 305 * rendezvous. When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that 306 * subsequent assembly can set a proper subset of the values. */ 307 ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr); 308 for (ii=0; ii<ndone; ii++) { 309 PetscInt i = some_indices[ii]/4,j,k; 310 InsertMode imode = (InsertMode)x->recvhdr[i].insertmode; 311 PetscInt *recvint; 312 PetscScalar *recvscalar; 313 PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0); 314 PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1); 315 npending--; 316 if (!blockmsg) { /* Scalar stash */ 317 PetscMPIInt count; 318 if (--frame[i].pendings > 0) continue; 319 if (x->use_status) { 320 ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr); 321 } else count = x->recvhdr[i].count; 322 for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) { 323 PetscInt loc = *recvint - X->map->rstart; 324 if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend); 325 switch (imode) { 326 case ADD_VALUES: 327 xarray[loc] += *recvscalar++; 328 break; 329 case INSERT_VALUES: 330 xarray[loc] = *recvscalar++; 331 break; 332 default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode); 333 } 334 } 335 } else { /* Block stash */ 336 PetscMPIInt count; 337 if (--frame[i].pendingb > 0) continue; 338 if (x->use_status) { 339 ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr); 340 if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */ 341 } else count = x->recvhdr[i].bcount; 342 for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) { 343 PetscInt loc = (*recvint)*bs - X->map->rstart; 344 switch (imode) { 345 case ADD_VALUES: 346 for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++; 347 break; 348 case INSERT_VALUES: 349 for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++; 350 break; 351 default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode); 352 } 353 } 354 } 355 } 356 } 357 ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr); 358 ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr); 359 ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr); 360 if (x->assembly_subset) { 361 void *dummy; /* reset segbuffers */ 362 ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr); 363 ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr); 364 } else { 365 ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr); 366 } 367 368 X->stash.insertmode = NOT_SET_VALUES; 369 X->bstash.insertmode = NOT_SET_VALUES; 370 ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr); 371 ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr); 372 PetscFunctionReturn(0); 373 } 374 375 PetscErrorCode VecAssemblyReset_MPI(Vec X) 376 { 377 Vec_MPI *x = (Vec_MPI*)X->data; 378 PetscErrorCode ierr; 379 380 PetscFunctionBegin; 381 ierr = PetscFree(x->sendreqs);CHKERRQ(ierr); 382 ierr = PetscFree(x->recvreqs);CHKERRQ(ierr); 383 ierr = PetscFree(x->sendranks);CHKERRQ(ierr); 384 ierr = PetscFree(x->recvranks);CHKERRQ(ierr); 385 ierr = PetscFree(x->sendhdr);CHKERRQ(ierr); 386 ierr = PetscFree(x->recvhdr);CHKERRQ(ierr); 387 ierr = PetscFree(x->sendptrs);CHKERRQ(ierr); 388 ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr); 389 ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr); 390 ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr); 391 PetscFunctionReturn(0); 392 } 393 394 395 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X) 396 { 397 #if !defined(PETSC_HAVE_MPIUNI) 398 PetscErrorCode ierr; 399 PetscBool flg = PETSC_FALSE,set; 400 401 PetscFunctionBegin; 402 ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr); 403 ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr); 404 if (set) { 405 X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS; 406 X->ops->assemblyend = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS; 407 } 408 ierr = PetscOptionsTail();CHKERRQ(ierr); 409 #else 410 X->ops->assemblybegin = VecAssemblyBegin_MPI; 411 X->ops->assemblyend = VecAssemblyEnd_MPI; 412 #endif 413 PetscFunctionReturn(0); 414 } 415 416 417 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 418 VecDuplicateVecs_Default, 419 VecDestroyVecs_Default, 420 VecDot_MPI, 421 VecMDot_MPI, 422 VecNorm_MPI, 423 VecTDot_MPI, 424 VecMTDot_MPI, 425 VecScale_Seq, 426 VecCopy_Seq, /* 10 */ 427 VecSet_Seq, 428 VecSwap_Seq, 429 VecAXPY_Seq, 430 VecAXPBY_Seq, 431 VecMAXPY_Seq, 432 VecAYPX_Seq, 433 VecWAXPY_Seq, 434 VecAXPBYPCZ_Seq, 435 VecPointwiseMult_Seq, 436 VecPointwiseDivide_Seq, 437 VecSetValues_MPI, /* 20 */ 438 VecAssemblyBegin_MPI_BTS, 439 VecAssemblyEnd_MPI_BTS, 440 0, 441 VecGetSize_MPI, 442 VecGetSize_Seq, 443 0, 444 VecMax_MPI, 445 VecMin_MPI, 446 VecSetRandom_Seq, 447 VecSetOption_MPI, 448 VecSetValuesBlocked_MPI, 449 VecDestroy_MPI, 450 VecView_MPI, 451 VecPlaceArray_MPI, 452 VecReplaceArray_Seq, 453 VecDot_Seq, 454 VecTDot_Seq, 455 VecNorm_Seq, 456 VecMDot_Seq, 457 VecMTDot_Seq, 458 VecLoad_Default, 459 VecReciprocal_Default, 460 VecConjugate_Seq, 461 0, 462 0, 463 VecResetArray_MPI, 464 VecSetFromOptions_MPI,/*set from options */ 465 VecMaxPointwiseDivide_Seq, 466 VecPointwiseMax_Seq, 467 VecPointwiseMaxAbs_Seq, 468 VecPointwiseMin_Seq, 469 VecGetValues_MPI, 470 0, 471 0, 472 0, 473 0, 474 0, 475 0, 476 VecStrideGather_Default, 477 VecStrideScatter_Default, 478 0, 479 0, 480 0, 481 0, 482 0, 483 VecStrideSubSetGather_Default, 484 VecStrideSubSetScatter_Default, 485 0, 486 0 487 }; 488 489 /* 490 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 491 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 492 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 493 494 If alloc is true and array is NULL then this routine allocates the space, otherwise 495 no space is allocated. 496 */ 497 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[]) 498 { 499 Vec_MPI *s; 500 PetscErrorCode ierr; 501 502 PetscFunctionBegin; 503 ierr = PetscNewLog(v,&s);CHKERRQ(ierr); 504 v->data = (void*)s; 505 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 506 s->nghost = nghost; 507 v->petscnative = PETSC_TRUE; 508 509 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 510 511 s->array = (PetscScalar*)array; 512 s->array_allocated = 0; 513 if (alloc && !array) { 514 PetscInt n = v->map->n+nghost; 515 ierr = PetscMalloc1(n,&s->array);CHKERRQ(ierr); 516 ierr = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr); 517 ierr = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr); 518 s->array_allocated = s->array; 519 } 520 521 /* By default parallel vectors do not have local representation */ 522 s->localrep = 0; 523 s->localupdate = 0; 524 525 v->stash.insertmode = NOT_SET_VALUES; 526 v->bstash.insertmode = NOT_SET_VALUES; 527 /* create the stashes. The block-size for bstash is set later when 528 VecSetValuesBlocked is called. 529 */ 530 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr); 531 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr); 532 533 #if defined(PETSC_HAVE_MATLAB_ENGINE) 534 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr); 535 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr); 536 #endif 537 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 538 PetscFunctionReturn(0); 539 } 540 541 /*MC 542 VECMPI - VECMPI = "mpi" - The basic parallel vector 543 544 Options Database Keys: 545 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 546 547 Level: beginner 548 549 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 550 M*/ 551 552 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv) 553 { 554 PetscErrorCode ierr; 555 556 PetscFunctionBegin; 557 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 558 PetscFunctionReturn(0); 559 } 560 561 /*MC 562 VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process 563 564 Options Database Keys: 565 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions() 566 567 Level: beginner 568 569 .seealso: VecCreateSeq(), VecCreateMPI() 570 M*/ 571 572 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) 573 { 574 PetscErrorCode ierr; 575 PetscMPIInt size; 576 577 PetscFunctionBegin; 578 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr); 579 if (size == 1) { 580 ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr); 581 } else { 582 ierr = VecSetType(v,VECMPI);CHKERRQ(ierr); 583 } 584 PetscFunctionReturn(0); 585 } 586 587 /*@C 588 VecCreateMPIWithArray - Creates a parallel, array-style vector, 589 where the user provides the array space to store the vector values. 590 591 Collective on MPI_Comm 592 593 Input Parameters: 594 + comm - the MPI communicator to use 595 . bs - block size, same meaning as VecSetBlockSize() 596 . n - local vector length, cannot be PETSC_DECIDE 597 . N - global vector length (or PETSC_DECIDE to have calculated) 598 - array - the user provided array to store the vector values 599 600 Output Parameter: 601 . vv - the vector 602 603 Notes: 604 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 605 same type as an existing vector. 606 607 If the user-provided array is NULL, then VecPlaceArray() can be used 608 at a later stage to SET the array for storing the vector values. 609 610 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 611 The user should not free the array until the vector is destroyed. 612 613 Level: intermediate 614 615 Concepts: vectors^creating with array 616 617 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 618 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 619 620 @*/ 621 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 622 { 623 PetscErrorCode ierr; 624 625 PetscFunctionBegin; 626 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 627 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 628 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 629 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 630 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 631 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 632 PetscFunctionReturn(0); 633 } 634 635 /*@C 636 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 637 the caller allocates the array space. 638 639 Collective on MPI_Comm 640 641 Input Parameters: 642 + comm - the MPI communicator to use 643 . n - local vector length 644 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 645 . nghost - number of local ghost points 646 . ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted) 647 - array - the space to store the vector values (as long as n + nghost) 648 649 Output Parameter: 650 . vv - the global vector representation (without ghost points as part of vector) 651 652 Notes: 653 Use VecGhostGetLocalForm() to access the local, ghosted representation 654 of the vector. 655 656 This also automatically sets the ISLocalToGlobalMapping() for this vector. 657 658 Level: advanced 659 660 Concepts: vectors^creating with array 661 Concepts: vectors^ghosted 662 663 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 664 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 665 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 666 667 @*/ 668 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 669 { 670 PetscErrorCode ierr; 671 Vec_MPI *w; 672 PetscScalar *larray; 673 IS from,to; 674 ISLocalToGlobalMapping ltog; 675 PetscInt rstart,i,*indices; 676 677 PetscFunctionBegin; 678 *vv = 0; 679 680 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 681 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 682 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 683 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 684 /* Create global representation */ 685 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 686 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 687 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 688 w = (Vec_MPI*)(*vv)->data; 689 /* Create local representation */ 690 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 691 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 692 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 693 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 694 695 /* 696 Create scatter context for scattering (updating) ghost values 697 */ 698 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 699 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 700 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 701 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 702 ierr = ISDestroy(&to);CHKERRQ(ierr); 703 ierr = ISDestroy(&from);CHKERRQ(ierr); 704 705 /* set local to global mapping for ghosted vector */ 706 ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr); 707 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 708 for (i=0; i<n; i++) { 709 indices[i] = rstart + i; 710 } 711 for (i=0; i<nghost; i++) { 712 indices[n+i] = ghosts[i]; 713 } 714 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 715 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 716 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 717 PetscFunctionReturn(0); 718 } 719 720 /*@ 721 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 722 723 Collective on MPI_Comm 724 725 Input Parameters: 726 + comm - the MPI communicator to use 727 . n - local vector length 728 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 729 . nghost - number of local ghost points 730 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 731 732 Output Parameter: 733 . vv - the global vector representation (without ghost points as part of vector) 734 735 Notes: 736 Use VecGhostGetLocalForm() to access the local, ghosted representation 737 of the vector. 738 739 This also automatically sets the ISLocalToGlobalMapping() for this vector. 740 741 Level: advanced 742 743 Concepts: vectors^ghosted 744 745 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 746 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 747 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 748 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 749 750 @*/ 751 PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 752 { 753 PetscErrorCode ierr; 754 755 PetscFunctionBegin; 756 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 757 PetscFunctionReturn(0); 758 } 759 760 /*@ 761 VecMPISetGhost - Sets the ghost points for an MPI ghost vector 762 763 Collective on Vec 764 765 Input Parameters: 766 + vv - the MPI vector 767 . nghost - number of local ghost points 768 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 769 770 771 Notes: 772 Use VecGhostGetLocalForm() to access the local, ghosted representation 773 of the vector. 774 775 This also automatically sets the ISLocalToGlobalMapping() for this vector. 776 777 You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()). 778 779 Level: advanced 780 781 Concepts: vectors^ghosted 782 783 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 784 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 785 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 786 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 787 788 @*/ 789 PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[]) 790 { 791 PetscErrorCode ierr; 792 PetscBool flg; 793 794 PetscFunctionBegin; 795 ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr); 796 /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */ 797 if (flg) { 798 PetscInt n,N; 799 Vec_MPI *w; 800 PetscScalar *larray; 801 IS from,to; 802 ISLocalToGlobalMapping ltog; 803 PetscInt rstart,i,*indices; 804 MPI_Comm comm; 805 806 ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr); 807 n = vv->map->n; 808 N = vv->map->N; 809 ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr); 810 ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr); 811 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr); 812 w = (Vec_MPI*)(vv)->data; 813 /* Create local representation */ 814 ierr = VecGetArray(vv,&larray);CHKERRQ(ierr); 815 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 816 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr); 817 ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr); 818 819 /* 820 Create scatter context for scattering (updating) ghost values 821 */ 822 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 823 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 824 ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 825 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 826 ierr = ISDestroy(&to);CHKERRQ(ierr); 827 ierr = ISDestroy(&from);CHKERRQ(ierr); 828 829 /* set local to global mapping for ghosted vector */ 830 ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr); 831 ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr); 832 833 for (i=0; i<n; i++) indices[i] = rstart + i; 834 for (i=0; i<nghost; i++) indices[n+i] = ghosts[i]; 835 836 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 837 ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr); 838 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 839 } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting"); 840 else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting"); 841 PetscFunctionReturn(0); 842 } 843 844 845 /* ------------------------------------------------------------------------------------------*/ 846 /*@C 847 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 848 the caller allocates the array space. Indices in the ghost region are based on blocks. 849 850 Collective on MPI_Comm 851 852 Input Parameters: 853 + comm - the MPI communicator to use 854 . bs - block size 855 . n - local vector length 856 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 857 . nghost - number of local ghost blocks 858 . ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted) 859 - array - the space to store the vector values (as long as n + nghost*bs) 860 861 Output Parameter: 862 . vv - the global vector representation (without ghost points as part of vector) 863 864 Notes: 865 Use VecGhostGetLocalForm() to access the local, ghosted representation 866 of the vector. 867 868 n is the local vector size (total local size not the number of blocks) while nghost 869 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 870 portion is bs*nghost 871 872 Level: advanced 873 874 Concepts: vectors^creating ghosted 875 Concepts: vectors^creating with array 876 877 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 878 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 879 VecCreateGhostWithArray(), VecCreateGhostBlock() 880 881 @*/ 882 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 883 { 884 PetscErrorCode ierr; 885 Vec_MPI *w; 886 PetscScalar *larray; 887 IS from,to; 888 ISLocalToGlobalMapping ltog; 889 PetscInt rstart,i,nb,*indices; 890 891 PetscFunctionBegin; 892 *vv = 0; 893 894 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 895 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 896 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 897 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 898 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 899 /* Create global representation */ 900 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 901 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 902 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 903 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 904 w = (Vec_MPI*)(*vv)->data; 905 /* Create local representation */ 906 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 907 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 908 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 909 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 910 911 /* 912 Create scatter context for scattering (updating) ghost values 913 */ 914 ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 915 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 916 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 917 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 918 ierr = ISDestroy(&to);CHKERRQ(ierr); 919 ierr = ISDestroy(&from);CHKERRQ(ierr); 920 921 /* set local to global mapping for ghosted vector */ 922 nb = n/bs; 923 ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr); 924 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 925 rstart = rstart/bs; 926 927 for (i=0; i<nb; i++) indices[i] = rstart + i; 928 for (i=0; i<nghost; i++) indices[nb+i] = ghosts[i]; 929 930 ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 931 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 932 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 933 PetscFunctionReturn(0); 934 } 935 936 /*@ 937 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 938 The indicing of the ghost points is done with blocks. 939 940 Collective on MPI_Comm 941 942 Input Parameters: 943 + comm - the MPI communicator to use 944 . bs - the block size 945 . n - local vector length 946 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 947 . nghost - number of local ghost blocks 948 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted) 949 950 Output Parameter: 951 . vv - the global vector representation (without ghost points as part of vector) 952 953 Notes: 954 Use VecGhostGetLocalForm() to access the local, ghosted representation 955 of the vector. 956 957 n is the local vector size (total local size not the number of blocks) while nghost 958 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 959 portion is bs*nghost 960 961 Level: advanced 962 963 Concepts: vectors^ghosted 964 965 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 966 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 967 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 968 969 @*/ 970 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 971 { 972 PetscErrorCode ierr; 973 974 PetscFunctionBegin; 975 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 976 PetscFunctionReturn(0); 977 } 978