xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11 
12   PetscFunctionBegin;
13   PetscCall(VecDot_Seq(xin,yin,&work));
14   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
15   *z   = sum;
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22 
23   PetscFunctionBegin;
24   PetscCall(VecTDot_Seq(xin,yin,&work));
25   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
26   *z   = sum;
27   PetscFunctionReturn(0);
28 }
29 
30 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
31 
32 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
33 {
34   Vec_MPI        *v = (Vec_MPI*)vin->data;
35 
36   PetscFunctionBegin;
37   PetscCheck(!v->unplacedarray,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
38   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
39   v->array         = (PetscScalar*)a;
40   if (v->localrep) PetscCall(VecPlaceArray(v->localrep,a));
41   PetscFunctionReturn(0);
42 }
43 
44 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
45 {
46   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
47   PetscScalar    *array;
48 
49   PetscFunctionBegin;
50   PetscCall(VecCreate(PetscObjectComm((PetscObject)win),v));
51   PetscCall(PetscLayoutReference(win->map,&(*v)->map));
52 
53   PetscCall(VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL));
54   vw   = (Vec_MPI*)(*v)->data;
55   PetscCall(PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps)));
56 
57   /* save local representation of the parallel vector (and scatter) if it exists */
58   if (w->localrep) {
59     PetscCall(VecGetArray(*v,&array));
60     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep));
61     PetscCall(PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps)));
62     PetscCall(VecRestoreArray(*v,&array));
63     PetscCall(PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep));
64 
65     vw->localupdate = w->localupdate;
66     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
67   }
68 
69   /* New vector should inherit stashing property of parent */
70   (*v)->stash.donotstash   = win->stash.donotstash;
71   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
72 
73   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist));
74   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist));
75 
76   (*v)->map->bs   = PetscAbs(win->map->bs);
77   (*v)->bstash.bs = win->bstash.bs;
78   PetscFunctionReturn(0);
79 }
80 
81 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
82 {
83   Vec_MPI        *v = (Vec_MPI*)V->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
88     break;
89   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
90     break;
91   case VEC_SUBSET_OFF_PROC_ENTRIES:
92     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
93     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
94       PetscCall(VecAssemblyReset_MPI(V)); /* Reset existing pattern to free memory */
95       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
96     }
97     break;
98   }
99 
100   PetscFunctionReturn(0);
101 }
102 
103 static PetscErrorCode VecResetArray_MPI(Vec vin)
104 {
105   Vec_MPI        *v = (Vec_MPI*)vin->data;
106 
107   PetscFunctionBegin;
108   v->array         = v->unplacedarray;
109   v->unplacedarray = NULL;
110   if (v->localrep) PetscCall(VecResetArray(v->localrep));
111   PetscFunctionReturn(0);
112 }
113 
114 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
115 {
116   Vec X = (Vec)ctx;
117   Vec_MPI *x = (Vec_MPI*)X->data;
118   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
119   PetscInt bs = X->map->bs;
120 
121   PetscFunctionBegin;
122   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
123      messages can be empty, but we have to send them this time if we sent them before because the
124      receiver is expecting them.
125    */
126   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
127     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
128     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
129   }
130   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
131     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
132     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
138 {
139   Vec X = (Vec)ctx;
140   Vec_MPI *x = (Vec_MPI*)X->data;
141   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
142   PetscInt bs = X->map->bs;
143   VecAssemblyFrame *frame;
144 
145   PetscFunctionBegin;
146   PetscCall(PetscSegBufferGet(x->segrecvframe,1,&frame));
147 
148   if (hdr->count) {
149     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints));
150     PetscCallMPI(MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
151     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars));
152     PetscCallMPI(MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
153     frame->pendings = 2;
154   } else {
155     frame->ints = NULL;
156     frame->scalars = NULL;
157     frame->pendings = 0;
158   }
159 
160   if (hdr->bcount) {
161     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb));
162     PetscCallMPI(MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
163     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb));
164     PetscCallMPI(MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
165     frame->pendingb = 2;
166   } else {
167     frame->intb = NULL;
168     frame->scalarb = NULL;
169     frame->pendingb = 0;
170   }
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
175 {
176   Vec_MPI        *x = (Vec_MPI*)X->data;
177   MPI_Comm       comm;
178   PetscInt       i,j,jb,bs;
179 
180   PetscFunctionBegin;
181   if (X->stash.donotstash) PetscFunctionReturn(0);
182 
183   PetscCall(PetscObjectGetComm((PetscObject)X,&comm));
184   PetscCall(VecGetBlockSize(X,&bs));
185   if (PetscDefined(USE_DEBUG)) {
186     InsertMode addv;
187     PetscCall(MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm));
188     PetscCheck(addv != (ADD_VALUES|INSERT_VALUES),comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
189   }
190   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
191 
192   PetscCall(VecStashSortCompress_Private(&X->stash));
193   PetscCall(VecStashSortCompress_Private(&X->bstash));
194 
195   if (!x->sendranks) {
196     PetscMPIInt nowners,bnowners,*owners,*bowners;
197     PetscInt ntmp;
198     PetscCall(VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners));
199     PetscCall(VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners));
200     PetscCall(PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks));
201     x->nsendranks = ntmp;
202     PetscCall(PetscFree(owners));
203     PetscCall(PetscFree(bowners));
204     PetscCall(PetscMalloc1(x->nsendranks,&x->sendhdr));
205     PetscCall(PetscCalloc1(x->nsendranks,&x->sendptrs));
206   }
207   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
208     PetscMPIInt rank = x->sendranks[i];
209     x->sendhdr[i].insertmode = X->stash.insertmode;
210     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
211      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
212      * there is nothing new to send, so that size-zero messages get sent instead. */
213     x->sendhdr[i].count = 0;
214     if (X->stash.n) {
215       x->sendptrs[i].ints    = &X->stash.idx[j];
216       x->sendptrs[i].scalars = &X->stash.array[j];
217       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
218     }
219     x->sendhdr[i].bcount = 0;
220     if (X->bstash.n) {
221       x->sendptrs[i].intb    = &X->bstash.idx[jb];
222       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
223       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
224     }
225   }
226 
227   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint));
228   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar));
229   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe));
230   if (x->first_assembly_done) { /* this is not the first assembly */
231     PetscMPIInt tag[4];
232     for (i=0; i<4; i++) PetscCall(PetscCommGetNewTag(comm,&tag[i]));
233     for (i=0; i<x->nsendranks; i++) {
234       PetscCall(VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X));
235     }
236     for (i=0; i<x->nrecvranks; i++) {
237       PetscCall(VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X));
238     }
239     x->use_status = PETSC_TRUE;
240   } else { /* First time assembly */
241     PetscCall(PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X));
242     x->use_status = PETSC_FALSE;
243   }
244 
245   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
246      This line says when assembly_subset is set, then we mark that the first assembly is done.
247    */
248   x->first_assembly_done = x->assembly_subset;
249 
250   {
251     PetscInt nstash,reallocs;
252     PetscCall(VecStashGetInfo_Private(&X->stash,&nstash,&reallocs));
253     PetscCall(PetscInfo(X,"Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
254     PetscCall(VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs));
255     PetscCall(PetscInfo(X,"Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
256   }
257   PetscFunctionReturn(0);
258 }
259 
260 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
261 {
262   Vec_MPI *x = (Vec_MPI*)X->data;
263   PetscInt bs = X->map->bs;
264   PetscMPIInt npending,*some_indices,r;
265   MPI_Status  *some_statuses;
266   PetscScalar *xarray;
267   VecAssemblyFrame *frame;
268 
269   PetscFunctionBegin;
270   if (X->stash.donotstash) {
271     X->stash.insertmode = NOT_SET_VALUES;
272     X->bstash.insertmode = NOT_SET_VALUES;
273     PetscFunctionReturn(0);
274   }
275 
276   PetscCheck(x->segrecvframe,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
277   PetscCall(VecGetArray(X,&xarray));
278   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe,&frame));
279   PetscCall(PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses));
280   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
281   while (npending>0) {
282     PetscMPIInt ndone=0,ii;
283     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
284      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
285      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
286      * subsequent assembly can set a proper subset of the values. */
287     PetscCallMPI(MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE));
288     for (ii=0; ii<ndone; ii++) {
289       PetscInt i = some_indices[ii]/4,j,k;
290       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
291       PetscInt *recvint;
292       PetscScalar *recvscalar;
293       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
294       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
295       npending--;
296       if (!blockmsg) { /* Scalar stash */
297         PetscMPIInt count;
298         if (--frame[i].pendings > 0) continue;
299         if (x->use_status) {
300           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
301         } else count = x->recvhdr[i].count;
302         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
303           PetscInt loc = *recvint - X->map->rstart;
304           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]",*recvint,X->map->rstart,X->map->rend);
305           switch (imode) {
306           case ADD_VALUES:
307             xarray[loc] += *recvscalar++;
308             break;
309           case INSERT_VALUES:
310             xarray[loc] = *recvscalar++;
311             break;
312           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
313           }
314         }
315       } else {                  /* Block stash */
316         PetscMPIInt count;
317         if (--frame[i].pendingb > 0) continue;
318         if (x->use_status) {
319           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
320           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
321         } else count = x->recvhdr[i].bcount;
322         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
323           PetscInt loc = (*recvint)*bs - X->map->rstart;
324           switch (imode) {
325           case ADD_VALUES:
326             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
327             break;
328           case INSERT_VALUES:
329             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
330             break;
331           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
332           }
333         }
334       }
335     }
336   }
337   PetscCall(VecRestoreArray(X,&xarray));
338   PetscCallMPI(MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE));
339   PetscCall(PetscFree2(some_indices,some_statuses));
340   if (x->assembly_subset) {
341     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint,NULL));
342     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar,NULL));
343   } else {
344     PetscCall(VecAssemblyReset_MPI(X));
345   }
346 
347   X->stash.insertmode = NOT_SET_VALUES;
348   X->bstash.insertmode = NOT_SET_VALUES;
349   PetscCall(VecStashScatterEnd_Private(&X->stash));
350   PetscCall(VecStashScatterEnd_Private(&X->bstash));
351   PetscFunctionReturn(0);
352 }
353 
354 PetscErrorCode VecAssemblyReset_MPI(Vec X)
355 {
356   Vec_MPI *x = (Vec_MPI*)X->data;
357 
358   PetscFunctionBegin;
359   PetscCall(PetscFree(x->sendreqs));
360   PetscCall(PetscFree(x->recvreqs));
361   PetscCall(PetscFree(x->sendranks));
362   PetscCall(PetscFree(x->recvranks));
363   PetscCall(PetscFree(x->sendhdr));
364   PetscCall(PetscFree(x->recvhdr));
365   PetscCall(PetscFree(x->sendptrs));
366   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
367   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
368   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
369   PetscFunctionReturn(0);
370 }
371 
372 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
373 {
374 #if !defined(PETSC_HAVE_MPIUNI)
375   PetscBool      flg = PETSC_FALSE,set;
376 
377   PetscFunctionBegin;
378   PetscOptionsHeadBegin(PetscOptionsObject,"VecMPI Options");
379   PetscCall(PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set));
380   if (set) {
381     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
382     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
383   }
384   PetscOptionsHeadEnd();
385 #else
386   PetscFunctionBegin;
387   X->ops->assemblybegin = VecAssemblyBegin_MPI;
388   X->ops->assemblyend   = VecAssemblyEnd_MPI;
389 #endif
390   PetscFunctionReturn(0);
391 }
392 
393 static struct _VecOps DvOps = {
394   PetscDesignatedInitializer(duplicate,VecDuplicate_MPI), /* 1 */
395   PetscDesignatedInitializer(duplicatevecs,VecDuplicateVecs_Default),
396   PetscDesignatedInitializer(destroyvecs,VecDestroyVecs_Default),
397   PetscDesignatedInitializer(dot,VecDot_MPI),
398   PetscDesignatedInitializer(mdot,VecMDot_MPI),
399   PetscDesignatedInitializer(norm,VecNorm_MPI),
400   PetscDesignatedInitializer(tdot,VecTDot_MPI),
401   PetscDesignatedInitializer(mtdot,VecMTDot_MPI),
402   PetscDesignatedInitializer(scale,VecScale_Seq),
403   PetscDesignatedInitializer(copy,VecCopy_Seq), /* 10 */
404   PetscDesignatedInitializer(set,VecSet_Seq),
405   PetscDesignatedInitializer(swap,VecSwap_Seq),
406   PetscDesignatedInitializer(axpy,VecAXPY_Seq),
407   PetscDesignatedInitializer(axpby,VecAXPBY_Seq),
408   PetscDesignatedInitializer(maxpy,VecMAXPY_Seq),
409   PetscDesignatedInitializer(aypx,VecAYPX_Seq),
410   PetscDesignatedInitializer(waxpy,VecWAXPY_Seq),
411   PetscDesignatedInitializer(axpbypcz,VecAXPBYPCZ_Seq),
412   PetscDesignatedInitializer(pointwisemult,VecPointwiseMult_Seq),
413   PetscDesignatedInitializer(pointwisedivide,VecPointwiseDivide_Seq),
414   PetscDesignatedInitializer(setvalues,VecSetValues_MPI), /* 20 */
415   PetscDesignatedInitializer(assemblybegin,VecAssemblyBegin_MPI_BTS),
416   PetscDesignatedInitializer(assemblyend,VecAssemblyEnd_MPI_BTS),
417   PetscDesignatedInitializer(getarray,NULL),
418   PetscDesignatedInitializer(getsize,VecGetSize_MPI),
419   PetscDesignatedInitializer(getlocalsize,VecGetSize_Seq),
420   PetscDesignatedInitializer(restorearray,NULL),
421   PetscDesignatedInitializer(max,VecMax_MPI),
422   PetscDesignatedInitializer(min,VecMin_MPI),
423   PetscDesignatedInitializer(setrandom,VecSetRandom_Seq),
424   PetscDesignatedInitializer(setoption,VecSetOption_MPI),
425   PetscDesignatedInitializer(setvaluesblocked,VecSetValuesBlocked_MPI),
426   PetscDesignatedInitializer(destroy,VecDestroy_MPI),
427   PetscDesignatedInitializer(view,VecView_MPI),
428   PetscDesignatedInitializer(placearray,VecPlaceArray_MPI),
429   PetscDesignatedInitializer(replacearray,VecReplaceArray_Seq),
430   PetscDesignatedInitializer(dot_local,VecDot_Seq),
431   PetscDesignatedInitializer(tdot_local,VecTDot_Seq),
432   PetscDesignatedInitializer(norm_local,VecNorm_Seq),
433   PetscDesignatedInitializer(mdot_local,VecMDot_Seq),
434   PetscDesignatedInitializer(mtdot_local,VecMTDot_Seq),
435   PetscDesignatedInitializer(load,VecLoad_Default),
436   PetscDesignatedInitializer(reciprocal,VecReciprocal_Default),
437   PetscDesignatedInitializer(conjugate,VecConjugate_Seq),
438   PetscDesignatedInitializer(setlocaltoglobalmapping,NULL),
439   PetscDesignatedInitializer(setvalueslocal,NULL),
440   PetscDesignatedInitializer(resetarray,VecResetArray_MPI),
441   PetscDesignatedInitializer(setfromoptions,VecSetFromOptions_MPI),/*set from options */
442   PetscDesignatedInitializer(maxpointwisedivide,VecMaxPointwiseDivide_Seq),
443   PetscDesignatedInitializer(pointwisemax,VecPointwiseMax_Seq),
444   PetscDesignatedInitializer(pointwisemaxabs,VecPointwiseMaxAbs_Seq),
445   PetscDesignatedInitializer(pointwisemin,VecPointwiseMin_Seq),
446   PetscDesignatedInitializer(getvalues,VecGetValues_MPI),
447   PetscDesignatedInitializer(sqrt,NULL),
448   PetscDesignatedInitializer(abs,NULL),
449   PetscDesignatedInitializer(exp,NULL),
450   PetscDesignatedInitializer(log,NULL),
451   PetscDesignatedInitializer(shift,NULL),
452   PetscDesignatedInitializer(create,NULL), /* really? */
453   PetscDesignatedInitializer(stridegather,VecStrideGather_Default),
454   PetscDesignatedInitializer(stridescatter,VecStrideScatter_Default),
455   PetscDesignatedInitializer(dotnorm2,NULL),
456   PetscDesignatedInitializer(getsubvector,NULL),
457   PetscDesignatedInitializer(restoresubvector,NULL),
458   PetscDesignatedInitializer(getarrayread,NULL),
459   PetscDesignatedInitializer(restorearrayread,NULL),
460   PetscDesignatedInitializer(stridesubsetgather,VecStrideSubSetGather_Default),
461   PetscDesignatedInitializer(stridesubsetscatter,VecStrideSubSetScatter_Default),
462   PetscDesignatedInitializer(viewnative,VecView_MPI),
463   PetscDesignatedInitializer(loadnative,NULL),
464   PetscDesignatedInitializer(getlocalvector,NULL),
465   PetscDesignatedInitializer(restorelocalvector,NULL),
466   PetscDesignatedInitializer(getlocalvectorread,NULL),
467   PetscDesignatedInitializer(restorelocalvectorread,NULL),
468   PetscDesignatedInitializer(bindtocpu,NULL),
469   PetscDesignatedInitializer(getarraywrite,NULL),
470   PetscDesignatedInitializer(restorearraywrite,NULL),
471   PetscDesignatedInitializer(getarrayandmemtype,NULL),
472   PetscDesignatedInitializer(restorearrayandmemtype,NULL),
473   PetscDesignatedInitializer(getarrayreadandmemtype,NULL),
474   PetscDesignatedInitializer(restorearrayreadandmemtype,NULL),
475   PetscDesignatedInitializer(getarraywriteandmemtype,NULL),
476   PetscDesignatedInitializer(restorearraywriteandmemtype,NULL),
477   PetscDesignatedInitializer(concatenate,NULL),
478   PetscDesignatedInitializer(sum,NULL),
479   PetscDesignatedInitializer(setpreallocationcoo,VecSetPreallocationCOO_MPI),
480   PetscDesignatedInitializer(setvaluescoo,VecSetValuesCOO_MPI)
481 };
482 
483 /*
484     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
485     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
486     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
487 
488     If alloc is true and array is NULL then this routine allocates the space, otherwise
489     no space is allocated.
490 */
491 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
492 {
493   Vec_MPI        *s;
494 
495   PetscFunctionBegin;
496   PetscCall(PetscNewLog(v,&s));
497   v->data        = (void*)s;
498   PetscCall(PetscMemcpy(v->ops,&DvOps,sizeof(DvOps)));
499   s->nghost      = nghost;
500   v->petscnative = PETSC_TRUE;
501   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
502 
503   PetscCall(PetscLayoutSetUp(v->map));
504 
505   s->array           = (PetscScalar*)array;
506   s->array_allocated = NULL;
507   if (alloc && !array) {
508     PetscInt n = v->map->n+nghost;
509     PetscCall(PetscCalloc1(n,&s->array));
510     PetscCall(PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar)));
511     s->array_allocated = s->array;
512   }
513 
514   /* By default parallel vectors do not have local representation */
515   s->localrep    = NULL;
516   s->localupdate = NULL;
517 
518   v->stash.insertmode = NOT_SET_VALUES;
519   v->bstash.insertmode = NOT_SET_VALUES;
520   /* create the stashes. The block-size for bstash is set later when
521      VecSetValuesBlocked is called.
522   */
523   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash));
524   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash));
525 
526 #if defined(PETSC_HAVE_MATLAB_ENGINE)
527   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default));
528   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default));
529 #endif
530   PetscCall(PetscObjectChangeTypeName((PetscObject)v,VECMPI));
531   PetscFunctionReturn(0);
532 }
533 
534 /*MC
535    VECMPI - VECMPI = "mpi" - The basic parallel vector
536 
537    Options Database Keys:
538 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
539 
540   Level: beginner
541 
542 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
543 M*/
544 
545 PetscErrorCode VecCreate_MPI(Vec vv)
546 {
547   PetscFunctionBegin;
548   PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL));
549   PetscFunctionReturn(0);
550 }
551 
552 /*MC
553    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
554 
555    Options Database Keys:
556 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
557 
558   Level: beginner
559 
560 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
561 M*/
562 
563 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
564 {
565   PetscMPIInt    size;
566 
567   PetscFunctionBegin;
568   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v),&size));
569   if (size == 1) {
570     PetscCall(VecSetType(v,VECSEQ));
571   } else {
572     PetscCall(VecSetType(v,VECMPI));
573   }
574   PetscFunctionReturn(0);
575 }
576 
577 /*@C
578    VecCreateMPIWithArray - Creates a parallel, array-style vector,
579    where the user provides the array space to store the vector values.
580 
581    Collective
582 
583    Input Parameters:
584 +  comm  - the MPI communicator to use
585 .  bs    - block size, same meaning as VecSetBlockSize()
586 .  n     - local vector length, cannot be PETSC_DECIDE
587 .  N     - global vector length (or PETSC_DECIDE to have calculated)
588 -  array - the user provided array to store the vector values
589 
590    Output Parameter:
591 .  vv - the vector
592 
593    Notes:
594    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
595    same type as an existing vector.
596 
597    If the user-provided array is NULL, then VecPlaceArray() can be used
598    at a later stage to SET the array for storing the vector values.
599 
600    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
601    The user should not free the array until the vector is destroyed.
602 
603    Level: intermediate
604 
605 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
606           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
607 
608 @*/
609 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
610 {
611   PetscFunctionBegin;
612   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
613   PetscCall(PetscSplitOwnership(comm,&n,&N));
614   PetscCall(VecCreate(comm,vv));
615   PetscCall(VecSetSizes(*vv,n,N));
616   PetscCall(VecSetBlockSize(*vv,bs));
617   PetscCall(VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array));
618   PetscFunctionReturn(0);
619 }
620 
621 /*@C
622    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
623    the caller allocates the array space.
624 
625    Collective
626 
627    Input Parameters:
628 +  comm - the MPI communicator to use
629 .  n - local vector length
630 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
631 .  nghost - number of local ghost points
632 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
633 -  array - the space to store the vector values (as long as n + nghost)
634 
635    Output Parameter:
636 .  vv - the global vector representation (without ghost points as part of vector)
637 
638    Notes:
639    Use VecGhostGetLocalForm() to access the local, ghosted representation
640    of the vector.
641 
642    This also automatically sets the ISLocalToGlobalMapping() for this vector.
643 
644    Level: advanced
645 
646 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
647           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
648           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
649 
650 @*/
651 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
652 {
653   Vec_MPI                *w;
654   PetscScalar            *larray;
655   IS                     from,to;
656   ISLocalToGlobalMapping ltog;
657   PetscInt               rstart,i,*indices;
658 
659   PetscFunctionBegin;
660   *vv = NULL;
661 
662   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
663   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
664   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
665   PetscCall(PetscSplitOwnership(comm,&n,&N));
666   /* Create global representation */
667   PetscCall(VecCreate(comm,vv));
668   PetscCall(VecSetSizes(*vv,n,N));
669   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array));
670   w    = (Vec_MPI*)(*vv)->data;
671   /* Create local representation */
672   PetscCall(VecGetArray(*vv,&larray));
673   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
674   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
675   PetscCall(VecRestoreArray(*vv,&larray));
676 
677   /*
678        Create scatter context for scattering (updating) ghost values
679   */
680   PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
681   PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
682   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
683   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
684   PetscCall(ISDestroy(&to));
685   PetscCall(ISDestroy(&from));
686 
687   /* set local to global mapping for ghosted vector */
688   PetscCall(PetscMalloc1(n+nghost,&indices));
689   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
690   for (i=0; i<n; i++) {
691     indices[i] = rstart + i;
692   }
693   for (i=0; i<nghost; i++) {
694     indices[n+i] = ghosts[i];
695   }
696   PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
697   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
698   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
699   PetscFunctionReturn(0);
700 }
701 
702 /*@
703    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
704 
705    Collective
706 
707    Input Parameters:
708 +  comm - the MPI communicator to use
709 .  n - local vector length
710 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
711 .  nghost - number of local ghost points
712 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
713 
714    Output Parameter:
715 .  vv - the global vector representation (without ghost points as part of vector)
716 
717    Notes:
718    Use VecGhostGetLocalForm() to access the local, ghosted representation
719    of the vector.
720 
721    This also automatically sets the ISLocalToGlobalMapping() for this vector.
722 
723    Level: advanced
724 
725 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
726           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
727           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
728           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
729 
730 @*/
731 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
732 {
733   PetscFunctionBegin;
734   PetscCall(VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv));
735   PetscFunctionReturn(0);
736 }
737 
738 /*@
739    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
740 
741    Collective on Vec
742 
743    Input Parameters:
744 +  vv - the MPI vector
745 .  nghost - number of local ghost points
746 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
747 
748    Notes:
749    Use VecGhostGetLocalForm() to access the local, ghosted representation
750    of the vector.
751 
752    This also automatically sets the ISLocalToGlobalMapping() for this vector.
753 
754    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
755 
756    Level: advanced
757 
758 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
759           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
760           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
761           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
762 
763 @*/
764 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
765 {
766   PetscBool      flg;
767 
768   PetscFunctionBegin;
769   PetscCall(PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg));
770   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
771   if (flg) {
772     PetscInt               n,N;
773     Vec_MPI                *w;
774     PetscScalar            *larray;
775     IS                     from,to;
776     ISLocalToGlobalMapping ltog;
777     PetscInt               rstart,i,*indices;
778     MPI_Comm               comm;
779 
780     PetscCall(PetscObjectGetComm((PetscObject)vv,&comm));
781     n    = vv->map->n;
782     N    = vv->map->N;
783     PetscCall((*vv->ops->destroy)(vv));
784     PetscCall(VecSetSizes(vv,n,N));
785     PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL));
786     w    = (Vec_MPI*)(vv)->data;
787     /* Create local representation */
788     PetscCall(VecGetArray(vv,&larray));
789     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
790     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep));
791     PetscCall(VecRestoreArray(vv,&larray));
792 
793     /*
794      Create scatter context for scattering (updating) ghost values
795      */
796     PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
797     PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
798     PetscCall(VecScatterCreate(vv,from,w->localrep,to,&w->localupdate));
799     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate));
800     PetscCall(ISDestroy(&to));
801     PetscCall(ISDestroy(&from));
802 
803     /* set local to global mapping for ghosted vector */
804     PetscCall(PetscMalloc1(n+nghost,&indices));
805     PetscCall(VecGetOwnershipRange(vv,&rstart,NULL));
806 
807     for (i=0; i<n; i++)      indices[i]   = rstart + i;
808     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
809 
810     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
811     PetscCall(VecSetLocalToGlobalMapping(vv,ltog));
812     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
813   } else {
814     PetscCheck(vv->ops->create != VecCreate_MPI,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
815     PetscCheck(((PetscObject)vv)->type_name,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
816   }
817   PetscFunctionReturn(0);
818 }
819 
820 /* ------------------------------------------------------------------------------------------*/
821 /*@C
822    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
823    the caller allocates the array space. Indices in the ghost region are based on blocks.
824 
825    Collective
826 
827    Input Parameters:
828 +  comm - the MPI communicator to use
829 .  bs - block size
830 .  n - local vector length
831 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
832 .  nghost - number of local ghost blocks
833 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
834 -  array - the space to store the vector values (as long as n + nghost*bs)
835 
836    Output Parameter:
837 .  vv - the global vector representation (without ghost points as part of vector)
838 
839    Notes:
840    Use VecGhostGetLocalForm() to access the local, ghosted representation
841    of the vector.
842 
843    n is the local vector size (total local size not the number of blocks) while nghost
844    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
845    portion is bs*nghost
846 
847    Level: advanced
848 
849 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
850           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
851           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
852 
853 @*/
854 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
855 {
856   Vec_MPI                *w;
857   PetscScalar            *larray;
858   IS                     from,to;
859   ISLocalToGlobalMapping ltog;
860   PetscInt               rstart,i,nb,*indices;
861 
862   PetscFunctionBegin;
863   *vv = NULL;
864 
865   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
866   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
867   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
868   PetscCheck(n % bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
869   PetscCall(PetscSplitOwnership(comm,&n,&N));
870   /* Create global representation */
871   PetscCall(VecCreate(comm,vv));
872   PetscCall(VecSetSizes(*vv,n,N));
873   PetscCall(VecSetBlockSize(*vv,bs));
874   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array));
875   w    = (Vec_MPI*)(*vv)->data;
876   /* Create local representation */
877   PetscCall(VecGetArray(*vv,&larray));
878   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep));
879   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
880   PetscCall(VecRestoreArray(*vv,&larray));
881 
882   /*
883        Create scatter context for scattering (updating) ghost values
884   */
885   PetscCall(ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from));
886   PetscCall(ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to));
887   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
888   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
889   PetscCall(ISDestroy(&to));
890   PetscCall(ISDestroy(&from));
891 
892   /* set local to global mapping for ghosted vector */
893   nb     = n/bs;
894   PetscCall(PetscMalloc1(nb+nghost,&indices));
895   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
896   rstart = rstart/bs;
897 
898   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
899   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
900 
901   PetscCall(ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog));
902   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
903   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
904   PetscFunctionReturn(0);
905 }
906 
907 /*@
908    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
909         The indicing of the ghost points is done with blocks.
910 
911    Collective
912 
913    Input Parameters:
914 +  comm - the MPI communicator to use
915 .  bs - the block size
916 .  n - local vector length
917 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
918 .  nghost - number of local ghost blocks
919 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
920 
921    Output Parameter:
922 .  vv - the global vector representation (without ghost points as part of vector)
923 
924    Notes:
925    Use VecGhostGetLocalForm() to access the local, ghosted representation
926    of the vector.
927 
928    n is the local vector size (total local size not the number of blocks) while nghost
929    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
930    portion is bs*nghost
931 
932    Level: advanced
933 
934 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
935           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
936           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
937 
938 @*/
939 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
940 {
941   PetscFunctionBegin;
942   PetscCall(VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv));
943   PetscFunctionReturn(0);
944 }
945