xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 3a61192c58448b12cde16ea84d8948c8e04fc9e4)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11 
12   PetscFunctionBegin;
13   PetscCall(VecDot_Seq(xin,yin,&work));
14   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
15   *z   = sum;
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22 
23   PetscFunctionBegin;
24   PetscCall(VecTDot_Seq(xin,yin,&work));
25   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
26   *z   = sum;
27   PetscFunctionReturn(0);
28 }
29 
30 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
31 
32 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
33 {
34   Vec_MPI        *v = (Vec_MPI*)vin->data;
35 
36   PetscFunctionBegin;
37   PetscCheck(!v->unplacedarray,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
38   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
39   v->array         = (PetscScalar*)a;
40   if (v->localrep) {
41     PetscCall(VecPlaceArray(v->localrep,a));
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
47 {
48   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
49   PetscScalar    *array;
50 
51   PetscFunctionBegin;
52   PetscCall(VecCreate(PetscObjectComm((PetscObject)win),v));
53   PetscCall(PetscLayoutReference(win->map,&(*v)->map));
54 
55   PetscCall(VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL));
56   vw   = (Vec_MPI*)(*v)->data;
57   PetscCall(PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps)));
58 
59   /* save local representation of the parallel vector (and scatter) if it exists */
60   if (w->localrep) {
61     PetscCall(VecGetArray(*v,&array));
62     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep));
63     PetscCall(PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps)));
64     PetscCall(VecRestoreArray(*v,&array));
65     PetscCall(PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep));
66 
67     vw->localupdate = w->localupdate;
68     if (vw->localupdate) {
69       PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
70     }
71   }
72 
73   /* New vector should inherit stashing property of parent */
74   (*v)->stash.donotstash   = win->stash.donotstash;
75   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
76 
77   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist));
78   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist));
79 
80   (*v)->map->bs   = PetscAbs(win->map->bs);
81   (*v)->bstash.bs = win->bstash.bs;
82   PetscFunctionReturn(0);
83 }
84 
85 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
86 {
87   Vec_MPI        *v = (Vec_MPI*)V->data;
88 
89   PetscFunctionBegin;
90   switch (op) {
91   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
92     break;
93   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
94     break;
95   case VEC_SUBSET_OFF_PROC_ENTRIES:
96     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
97     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
98       PetscCall(VecAssemblyReset_MPI(V)); /* Reset existing pattern to free memory */
99       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
100     }
101     break;
102   }
103 
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode VecResetArray_MPI(Vec vin)
108 {
109   Vec_MPI        *v = (Vec_MPI*)vin->data;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = NULL;
114   if (v->localrep) {
115     PetscCall(VecResetArray(v->localrep));
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126 
127   PetscFunctionBegin;
128   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
129      messages can be empty, but we have to send them this time if we sent them before because the
130      receiver is expecting them.
131    */
132   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
133     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
134     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
135   }
136   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
137     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
138     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
139   }
140   PetscFunctionReturn(0);
141 }
142 
143 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
144 {
145   Vec X = (Vec)ctx;
146   Vec_MPI *x = (Vec_MPI*)X->data;
147   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   PetscCall(PetscSegBufferGet(x->segrecvframe,1,&frame));
153 
154   if (hdr->count) {
155     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints));
156     PetscCallMPI(MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
157     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars));
158     PetscCallMPI(MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb));
168     PetscCallMPI(MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
169     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb));
170     PetscCallMPI(MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   MPI_Comm       comm;
184   PetscInt       i,j,jb,bs;
185 
186   PetscFunctionBegin;
187   if (X->stash.donotstash) PetscFunctionReturn(0);
188 
189   PetscCall(PetscObjectGetComm((PetscObject)X,&comm));
190   PetscCall(VecGetBlockSize(X,&bs));
191   if (PetscDefined(USE_DEBUG)) {
192     InsertMode addv;
193     PetscCall(MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm));
194     PetscCheck(addv != (ADD_VALUES|INSERT_VALUES),comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
195   }
196   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
197 
198   PetscCall(VecStashSortCompress_Private(&X->stash));
199   PetscCall(VecStashSortCompress_Private(&X->bstash));
200 
201   if (!x->sendranks) {
202     PetscMPIInt nowners,bnowners,*owners,*bowners;
203     PetscInt ntmp;
204     PetscCall(VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners));
205     PetscCall(VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners));
206     PetscCall(PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks));
207     x->nsendranks = ntmp;
208     PetscCall(PetscFree(owners));
209     PetscCall(PetscFree(bowners));
210     PetscCall(PetscMalloc1(x->nsendranks,&x->sendhdr));
211     PetscCall(PetscCalloc1(x->nsendranks,&x->sendptrs));
212   }
213   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
214     PetscMPIInt rank = x->sendranks[i];
215     x->sendhdr[i].insertmode = X->stash.insertmode;
216     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
217      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
218      * there is nothing new to send, so that size-zero messages get sent instead. */
219     x->sendhdr[i].count = 0;
220     if (X->stash.n) {
221       x->sendptrs[i].ints    = &X->stash.idx[j];
222       x->sendptrs[i].scalars = &X->stash.array[j];
223       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
224     }
225     x->sendhdr[i].bcount = 0;
226     if (X->bstash.n) {
227       x->sendptrs[i].intb    = &X->bstash.idx[jb];
228       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
229       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
230     }
231   }
232 
233   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint));
234   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar));
235   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe));
236   if (x->first_assembly_done) { /* this is not the first assembly */
237     PetscMPIInt tag[4];
238     for (i=0; i<4; i++) PetscCall(PetscCommGetNewTag(comm,&tag[i]));
239     for (i=0; i<x->nsendranks; i++) {
240       PetscCall(VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X));
241     }
242     for (i=0; i<x->nrecvranks; i++) {
243       PetscCall(VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X));
244     }
245     x->use_status = PETSC_TRUE;
246   } else { /* First time assembly */
247     PetscCall(PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X));
248     x->use_status = PETSC_FALSE;
249   }
250 
251   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
252      This line says when assembly_subset is set, then we mark that the first assembly is done.
253    */
254   x->first_assembly_done = x->assembly_subset;
255 
256   {
257     PetscInt nstash,reallocs;
258     PetscCall(VecStashGetInfo_Private(&X->stash,&nstash,&reallocs));
259     PetscCall(PetscInfo(X,"Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
260     PetscCall(VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs));
261     PetscCall(PetscInfo(X,"Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
267 {
268   Vec_MPI *x = (Vec_MPI*)X->data;
269   PetscInt bs = X->map->bs;
270   PetscMPIInt npending,*some_indices,r;
271   MPI_Status  *some_statuses;
272   PetscScalar *xarray;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   PetscCheck(x->segrecvframe,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
283   PetscCall(VecGetArray(X,&xarray));
284   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe,&frame));
285   PetscCall(PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses));
286   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
287   while (npending>0) {
288     PetscMPIInt ndone=0,ii;
289     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
290      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
291      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
292      * subsequent assembly can set a proper subset of the values. */
293     PetscCallMPI(MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE));
294     for (ii=0; ii<ndone; ii++) {
295       PetscInt i = some_indices[ii]/4,j,k;
296       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
297       PetscInt *recvint;
298       PetscScalar *recvscalar;
299       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
300       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
301       npending--;
302       if (!blockmsg) { /* Scalar stash */
303         PetscMPIInt count;
304         if (--frame[i].pendings > 0) continue;
305         if (x->use_status) {
306           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
307         } else count = x->recvhdr[i].count;
308         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
309           PetscInt loc = *recvint - X->map->rstart;
310           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]",*recvint,X->map->rstart,X->map->rend);
311           switch (imode) {
312           case ADD_VALUES:
313             xarray[loc] += *recvscalar++;
314             break;
315           case INSERT_VALUES:
316             xarray[loc] = *recvscalar++;
317             break;
318           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
319           }
320         }
321       } else {                  /* Block stash */
322         PetscMPIInt count;
323         if (--frame[i].pendingb > 0) continue;
324         if (x->use_status) {
325           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
326           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
327         } else count = x->recvhdr[i].bcount;
328         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
329           PetscInt loc = (*recvint)*bs - X->map->rstart;
330           switch (imode) {
331           case ADD_VALUES:
332             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
333             break;
334           case INSERT_VALUES:
335             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
336             break;
337           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
338           }
339         }
340       }
341     }
342   }
343   PetscCall(VecRestoreArray(X,&xarray));
344   PetscCallMPI(MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE));
345   PetscCall(PetscFree2(some_indices,some_statuses));
346   if (x->assembly_subset) {
347     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint,NULL));
348     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar,NULL));
349   } else {
350     PetscCall(VecAssemblyReset_MPI(X));
351   }
352 
353   X->stash.insertmode = NOT_SET_VALUES;
354   X->bstash.insertmode = NOT_SET_VALUES;
355   PetscCall(VecStashScatterEnd_Private(&X->stash));
356   PetscCall(VecStashScatterEnd_Private(&X->bstash));
357   PetscFunctionReturn(0);
358 }
359 
360 PetscErrorCode VecAssemblyReset_MPI(Vec X)
361 {
362   Vec_MPI *x = (Vec_MPI*)X->data;
363 
364   PetscFunctionBegin;
365   PetscCall(PetscFree(x->sendreqs));
366   PetscCall(PetscFree(x->recvreqs));
367   PetscCall(PetscFree(x->sendranks));
368   PetscCall(PetscFree(x->recvranks));
369   PetscCall(PetscFree(x->sendhdr));
370   PetscCall(PetscFree(x->recvhdr));
371   PetscCall(PetscFree(x->sendptrs));
372   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
373   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
374   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
375   PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
379 {
380 #if !defined(PETSC_HAVE_MPIUNI)
381   PetscBool      flg = PETSC_FALSE,set;
382 
383   PetscFunctionBegin;
384   PetscOptionsHeadBegin(PetscOptionsObject,"VecMPI Options");
385   PetscCall(PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set));
386   if (set) {
387     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
388     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
389   }
390   PetscOptionsHeadEnd();
391 #else
392   PetscFunctionBegin;
393   X->ops->assemblybegin = VecAssemblyBegin_MPI;
394   X->ops->assemblyend   = VecAssemblyEnd_MPI;
395 #endif
396   PetscFunctionReturn(0);
397 }
398 
399 static struct _VecOps DvOps = {
400   PetscDesignatedInitializer(duplicate,VecDuplicate_MPI), /* 1 */
401   PetscDesignatedInitializer(duplicatevecs,VecDuplicateVecs_Default),
402   PetscDesignatedInitializer(destroyvecs,VecDestroyVecs_Default),
403   PetscDesignatedInitializer(dot,VecDot_MPI),
404   PetscDesignatedInitializer(mdot,VecMDot_MPI),
405   PetscDesignatedInitializer(norm,VecNorm_MPI),
406   PetscDesignatedInitializer(tdot,VecTDot_MPI),
407   PetscDesignatedInitializer(mtdot,VecMTDot_MPI),
408   PetscDesignatedInitializer(scale,VecScale_Seq),
409   PetscDesignatedInitializer(copy,VecCopy_Seq), /* 10 */
410   PetscDesignatedInitializer(set,VecSet_Seq),
411   PetscDesignatedInitializer(swap,VecSwap_Seq),
412   PetscDesignatedInitializer(axpy,VecAXPY_Seq),
413   PetscDesignatedInitializer(axpby,VecAXPBY_Seq),
414   PetscDesignatedInitializer(maxpy,VecMAXPY_Seq),
415   PetscDesignatedInitializer(aypx,VecAYPX_Seq),
416   PetscDesignatedInitializer(waxpy,VecWAXPY_Seq),
417   PetscDesignatedInitializer(axpbypcz,VecAXPBYPCZ_Seq),
418   PetscDesignatedInitializer(pointwisemult,VecPointwiseMult_Seq),
419   PetscDesignatedInitializer(pointwisedivide,VecPointwiseDivide_Seq),
420   PetscDesignatedInitializer(setvalues,VecSetValues_MPI), /* 20 */
421   PetscDesignatedInitializer(assemblybegin,VecAssemblyBegin_MPI_BTS),
422   PetscDesignatedInitializer(assemblyend,VecAssemblyEnd_MPI_BTS),
423   PetscDesignatedInitializer(getarray,NULL),
424   PetscDesignatedInitializer(getsize,VecGetSize_MPI),
425   PetscDesignatedInitializer(getlocalsize,VecGetSize_Seq),
426   PetscDesignatedInitializer(restorearray,NULL),
427   PetscDesignatedInitializer(max,VecMax_MPI),
428   PetscDesignatedInitializer(min,VecMin_MPI),
429   PetscDesignatedInitializer(setrandom,VecSetRandom_Seq),
430   PetscDesignatedInitializer(setoption,VecSetOption_MPI),
431   PetscDesignatedInitializer(setvaluesblocked,VecSetValuesBlocked_MPI),
432   PetscDesignatedInitializer(destroy,VecDestroy_MPI),
433   PetscDesignatedInitializer(view,VecView_MPI),
434   PetscDesignatedInitializer(placearray,VecPlaceArray_MPI),
435   PetscDesignatedInitializer(replacearray,VecReplaceArray_Seq),
436   PetscDesignatedInitializer(dot_local,VecDot_Seq),
437   PetscDesignatedInitializer(tdot_local,VecTDot_Seq),
438   PetscDesignatedInitializer(norm_local,VecNorm_Seq),
439   PetscDesignatedInitializer(mdot_local,VecMDot_Seq),
440   PetscDesignatedInitializer(mtdot_local,VecMTDot_Seq),
441   PetscDesignatedInitializer(load,VecLoad_Default),
442   PetscDesignatedInitializer(reciprocal,VecReciprocal_Default),
443   PetscDesignatedInitializer(conjugate,VecConjugate_Seq),
444   PetscDesignatedInitializer(setlocaltoglobalmapping,NULL),
445   PetscDesignatedInitializer(setvalueslocal,NULL),
446   PetscDesignatedInitializer(resetarray,VecResetArray_MPI),
447   PetscDesignatedInitializer(setfromoptions,VecSetFromOptions_MPI),/*set from options */
448   PetscDesignatedInitializer(maxpointwisedivide,VecMaxPointwiseDivide_Seq),
449   PetscDesignatedInitializer(pointwisemax,VecPointwiseMax_Seq),
450   PetscDesignatedInitializer(pointwisemaxabs,VecPointwiseMaxAbs_Seq),
451   PetscDesignatedInitializer(pointwisemin,VecPointwiseMin_Seq),
452   PetscDesignatedInitializer(getvalues,VecGetValues_MPI),
453   PetscDesignatedInitializer(sqrt,NULL),
454   PetscDesignatedInitializer(abs,NULL),
455   PetscDesignatedInitializer(exp,NULL),
456   PetscDesignatedInitializer(log,NULL),
457   PetscDesignatedInitializer(shift,NULL),
458   PetscDesignatedInitializer(create,NULL), /* really? */
459   PetscDesignatedInitializer(stridegather,VecStrideGather_Default),
460   PetscDesignatedInitializer(stridescatter,VecStrideScatter_Default),
461   PetscDesignatedInitializer(dotnorm2,NULL),
462   PetscDesignatedInitializer(getsubvector,NULL),
463   PetscDesignatedInitializer(restoresubvector,NULL),
464   PetscDesignatedInitializer(getarrayread,NULL),
465   PetscDesignatedInitializer(restorearrayread,NULL),
466   PetscDesignatedInitializer(stridesubsetgather,VecStrideSubSetGather_Default),
467   PetscDesignatedInitializer(stridesubsetscatter,VecStrideSubSetScatter_Default),
468   PetscDesignatedInitializer(viewnative,VecView_MPI),
469   PetscDesignatedInitializer(loadnative,NULL),
470   PetscDesignatedInitializer(getlocalvector,NULL),
471   PetscDesignatedInitializer(restorelocalvector,NULL),
472   PetscDesignatedInitializer(getlocalvectorread,NULL),
473   PetscDesignatedInitializer(restorelocalvectorread,NULL),
474   PetscDesignatedInitializer(bindtocpu,NULL),
475   PetscDesignatedInitializer(getarraywrite,NULL),
476   PetscDesignatedInitializer(restorearraywrite,NULL),
477   PetscDesignatedInitializer(getarrayandmemtype,NULL),
478   PetscDesignatedInitializer(restorearrayandmemtype,NULL),
479   PetscDesignatedInitializer(getarrayreadandmemtype,NULL),
480   PetscDesignatedInitializer(restorearrayreadandmemtype,NULL),
481   PetscDesignatedInitializer(getarraywriteandmemtype,NULL),
482   PetscDesignatedInitializer(restorearraywriteandmemtype,NULL),
483   PetscDesignatedInitializer(concatenate,NULL),
484   PetscDesignatedInitializer(sum,NULL),
485   PetscDesignatedInitializer(setpreallocationcoo,VecSetPreallocationCOO_MPI),
486   PetscDesignatedInitializer(setvaluescoo,VecSetValuesCOO_MPI)
487 };
488 
489 /*
490     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
491     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
492     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
493 
494     If alloc is true and array is NULL then this routine allocates the space, otherwise
495     no space is allocated.
496 */
497 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
498 {
499   Vec_MPI        *s;
500 
501   PetscFunctionBegin;
502   PetscCall(PetscNewLog(v,&s));
503   v->data        = (void*)s;
504   PetscCall(PetscMemcpy(v->ops,&DvOps,sizeof(DvOps)));
505   s->nghost      = nghost;
506   v->petscnative = PETSC_TRUE;
507   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
508 
509   PetscCall(PetscLayoutSetUp(v->map));
510 
511   s->array           = (PetscScalar*)array;
512   s->array_allocated = NULL;
513   if (alloc && !array) {
514     PetscInt n = v->map->n+nghost;
515     PetscCall(PetscCalloc1(n,&s->array));
516     PetscCall(PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar)));
517     s->array_allocated = s->array;
518   }
519 
520   /* By default parallel vectors do not have local representation */
521   s->localrep    = NULL;
522   s->localupdate = NULL;
523 
524   v->stash.insertmode = NOT_SET_VALUES;
525   v->bstash.insertmode = NOT_SET_VALUES;
526   /* create the stashes. The block-size for bstash is set later when
527      VecSetValuesBlocked is called.
528   */
529   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash));
530   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash));
531 
532 #if defined(PETSC_HAVE_MATLAB_ENGINE)
533   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default));
534   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default));
535 #endif
536   PetscCall(PetscObjectChangeTypeName((PetscObject)v,VECMPI));
537   PetscFunctionReturn(0);
538 }
539 
540 /*MC
541    VECMPI - VECMPI = "mpi" - The basic parallel vector
542 
543    Options Database Keys:
544 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
545 
546   Level: beginner
547 
548 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
549 M*/
550 
551 PetscErrorCode VecCreate_MPI(Vec vv)
552 {
553   PetscFunctionBegin;
554   PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL));
555   PetscFunctionReturn(0);
556 }
557 
558 /*MC
559    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
560 
561    Options Database Keys:
562 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
563 
564   Level: beginner
565 
566 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
567 M*/
568 
569 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
570 {
571   PetscMPIInt    size;
572 
573   PetscFunctionBegin;
574   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v),&size));
575   if (size == 1) {
576     PetscCall(VecSetType(v,VECSEQ));
577   } else {
578     PetscCall(VecSetType(v,VECMPI));
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 /*@C
584    VecCreateMPIWithArray - Creates a parallel, array-style vector,
585    where the user provides the array space to store the vector values.
586 
587    Collective
588 
589    Input Parameters:
590 +  comm  - the MPI communicator to use
591 .  bs    - block size, same meaning as VecSetBlockSize()
592 .  n     - local vector length, cannot be PETSC_DECIDE
593 .  N     - global vector length (or PETSC_DECIDE to have calculated)
594 -  array - the user provided array to store the vector values
595 
596    Output Parameter:
597 .  vv - the vector
598 
599    Notes:
600    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
601    same type as an existing vector.
602 
603    If the user-provided array is NULL, then VecPlaceArray() can be used
604    at a later stage to SET the array for storing the vector values.
605 
606    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
607    The user should not free the array until the vector is destroyed.
608 
609    Level: intermediate
610 
611 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
612           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
613 
614 @*/
615 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
616 {
617   PetscFunctionBegin;
618   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
619   PetscCall(PetscSplitOwnership(comm,&n,&N));
620   PetscCall(VecCreate(comm,vv));
621   PetscCall(VecSetSizes(*vv,n,N));
622   PetscCall(VecSetBlockSize(*vv,bs));
623   PetscCall(VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array));
624   PetscFunctionReturn(0);
625 }
626 
627 /*@C
628    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
629    the caller allocates the array space.
630 
631    Collective
632 
633    Input Parameters:
634 +  comm - the MPI communicator to use
635 .  n - local vector length
636 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
637 .  nghost - number of local ghost points
638 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
639 -  array - the space to store the vector values (as long as n + nghost)
640 
641    Output Parameter:
642 .  vv - the global vector representation (without ghost points as part of vector)
643 
644    Notes:
645    Use VecGhostGetLocalForm() to access the local, ghosted representation
646    of the vector.
647 
648    This also automatically sets the ISLocalToGlobalMapping() for this vector.
649 
650    Level: advanced
651 
652 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
653           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
654           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
655 
656 @*/
657 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
658 {
659   Vec_MPI                *w;
660   PetscScalar            *larray;
661   IS                     from,to;
662   ISLocalToGlobalMapping ltog;
663   PetscInt               rstart,i,*indices;
664 
665   PetscFunctionBegin;
666   *vv = NULL;
667 
668   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
669   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
670   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
671   PetscCall(PetscSplitOwnership(comm,&n,&N));
672   /* Create global representation */
673   PetscCall(VecCreate(comm,vv));
674   PetscCall(VecSetSizes(*vv,n,N));
675   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array));
676   w    = (Vec_MPI*)(*vv)->data;
677   /* Create local representation */
678   PetscCall(VecGetArray(*vv,&larray));
679   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
680   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
681   PetscCall(VecRestoreArray(*vv,&larray));
682 
683   /*
684        Create scatter context for scattering (updating) ghost values
685   */
686   PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
687   PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
688   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
689   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
690   PetscCall(ISDestroy(&to));
691   PetscCall(ISDestroy(&from));
692 
693   /* set local to global mapping for ghosted vector */
694   PetscCall(PetscMalloc1(n+nghost,&indices));
695   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
696   for (i=0; i<n; i++) {
697     indices[i] = rstart + i;
698   }
699   for (i=0; i<nghost; i++) {
700     indices[n+i] = ghosts[i];
701   }
702   PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
703   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
704   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
705   PetscFunctionReturn(0);
706 }
707 
708 /*@
709    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
710 
711    Collective
712 
713    Input Parameters:
714 +  comm - the MPI communicator to use
715 .  n - local vector length
716 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
717 .  nghost - number of local ghost points
718 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
719 
720    Output Parameter:
721 .  vv - the global vector representation (without ghost points as part of vector)
722 
723    Notes:
724    Use VecGhostGetLocalForm() to access the local, ghosted representation
725    of the vector.
726 
727    This also automatically sets the ISLocalToGlobalMapping() for this vector.
728 
729    Level: advanced
730 
731 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
732           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
733           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
734           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
735 
736 @*/
737 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
738 {
739   PetscFunctionBegin;
740   PetscCall(VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv));
741   PetscFunctionReturn(0);
742 }
743 
744 /*@
745    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
746 
747    Collective on Vec
748 
749    Input Parameters:
750 +  vv - the MPI vector
751 .  nghost - number of local ghost points
752 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
753 
754    Notes:
755    Use VecGhostGetLocalForm() to access the local, ghosted representation
756    of the vector.
757 
758    This also automatically sets the ISLocalToGlobalMapping() for this vector.
759 
760    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
761 
762    Level: advanced
763 
764 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
765           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
766           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
767           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
768 
769 @*/
770 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
771 {
772   PetscBool      flg;
773 
774   PetscFunctionBegin;
775   PetscCall(PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg));
776   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
777   if (flg) {
778     PetscInt               n,N;
779     Vec_MPI                *w;
780     PetscScalar            *larray;
781     IS                     from,to;
782     ISLocalToGlobalMapping ltog;
783     PetscInt               rstart,i,*indices;
784     MPI_Comm               comm;
785 
786     PetscCall(PetscObjectGetComm((PetscObject)vv,&comm));
787     n    = vv->map->n;
788     N    = vv->map->N;
789     PetscCall((*vv->ops->destroy)(vv));
790     PetscCall(VecSetSizes(vv,n,N));
791     PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL));
792     w    = (Vec_MPI*)(vv)->data;
793     /* Create local representation */
794     PetscCall(VecGetArray(vv,&larray));
795     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
796     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep));
797     PetscCall(VecRestoreArray(vv,&larray));
798 
799     /*
800      Create scatter context for scattering (updating) ghost values
801      */
802     PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
803     PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
804     PetscCall(VecScatterCreate(vv,from,w->localrep,to,&w->localupdate));
805     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate));
806     PetscCall(ISDestroy(&to));
807     PetscCall(ISDestroy(&from));
808 
809     /* set local to global mapping for ghosted vector */
810     PetscCall(PetscMalloc1(n+nghost,&indices));
811     PetscCall(VecGetOwnershipRange(vv,&rstart,NULL));
812 
813     for (i=0; i<n; i++)      indices[i]   = rstart + i;
814     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
815 
816     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
817     PetscCall(VecSetLocalToGlobalMapping(vv,ltog));
818     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
819   } else PetscCheck(vv->ops->create != VecCreate_MPI,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
820   else PetscCheck(((PetscObject)vv)->type_name,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
821   PetscFunctionReturn(0);
822 }
823 
824 /* ------------------------------------------------------------------------------------------*/
825 /*@C
826    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
827    the caller allocates the array space. Indices in the ghost region are based on blocks.
828 
829    Collective
830 
831    Input Parameters:
832 +  comm - the MPI communicator to use
833 .  bs - block size
834 .  n - local vector length
835 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
836 .  nghost - number of local ghost blocks
837 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
838 -  array - the space to store the vector values (as long as n + nghost*bs)
839 
840    Output Parameter:
841 .  vv - the global vector representation (without ghost points as part of vector)
842 
843    Notes:
844    Use VecGhostGetLocalForm() to access the local, ghosted representation
845    of the vector.
846 
847    n is the local vector size (total local size not the number of blocks) while nghost
848    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
849    portion is bs*nghost
850 
851    Level: advanced
852 
853 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
854           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
855           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
856 
857 @*/
858 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
859 {
860   Vec_MPI                *w;
861   PetscScalar            *larray;
862   IS                     from,to;
863   ISLocalToGlobalMapping ltog;
864   PetscInt               rstart,i,nb,*indices;
865 
866   PetscFunctionBegin;
867   *vv = NULL;
868 
869   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
870   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
871   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
872   PetscCheck(n % bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
873   PetscCall(PetscSplitOwnership(comm,&n,&N));
874   /* Create global representation */
875   PetscCall(VecCreate(comm,vv));
876   PetscCall(VecSetSizes(*vv,n,N));
877   PetscCall(VecSetBlockSize(*vv,bs));
878   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array));
879   w    = (Vec_MPI*)(*vv)->data;
880   /* Create local representation */
881   PetscCall(VecGetArray(*vv,&larray));
882   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep));
883   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
884   PetscCall(VecRestoreArray(*vv,&larray));
885 
886   /*
887        Create scatter context for scattering (updating) ghost values
888   */
889   PetscCall(ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from));
890   PetscCall(ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to));
891   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
892   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
893   PetscCall(ISDestroy(&to));
894   PetscCall(ISDestroy(&from));
895 
896   /* set local to global mapping for ghosted vector */
897   nb     = n/bs;
898   PetscCall(PetscMalloc1(nb+nghost,&indices));
899   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
900   rstart = rstart/bs;
901 
902   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
903   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
904 
905   PetscCall(ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog));
906   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
907   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
908   PetscFunctionReturn(0);
909 }
910 
911 /*@
912    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
913         The indicing of the ghost points is done with blocks.
914 
915    Collective
916 
917    Input Parameters:
918 +  comm - the MPI communicator to use
919 .  bs - the block size
920 .  n - local vector length
921 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
922 .  nghost - number of local ghost blocks
923 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
924 
925    Output Parameter:
926 .  vv - the global vector representation (without ghost points as part of vector)
927 
928    Notes:
929    Use VecGhostGetLocalForm() to access the local, ghosted representation
930    of the vector.
931 
932    n is the local vector size (total local size not the number of blocks) while nghost
933    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
934    portion is bs*nghost
935 
936    Level: advanced
937 
938 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
939           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
940           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
941 
942 @*/
943 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
944 {
945   PetscFunctionBegin;
946   PetscCall(VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv));
947   PetscFunctionReturn(0);
948 }
949