xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 77804d843e837c0d3c90b021041a0b1c56b8222a)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11 
12   PetscFunctionBegin;
13   PetscCall(VecDot_Seq(xin,yin,&work));
14   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
15   *z   = sum;
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22 
23   PetscFunctionBegin;
24   PetscCall(VecTDot_Seq(xin,yin,&work));
25   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
26   *z   = sum;
27   PetscFunctionReturn(0);
28 }
29 
30 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
31 
32 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
33 {
34   Vec_MPI        *v = (Vec_MPI*)vin->data;
35 
36   PetscFunctionBegin;
37   PetscCheck(!v->unplacedarray,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
38   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
39   v->array         = (PetscScalar*)a;
40   if (v->localrep) {
41     PetscCall(VecPlaceArray(v->localrep,a));
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
47 {
48   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
49   PetscScalar    *array;
50 
51   PetscFunctionBegin;
52   PetscCall(VecCreate(PetscObjectComm((PetscObject)win),v));
53   PetscCall(PetscLayoutReference(win->map,&(*v)->map));
54 
55   PetscCall(VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL));
56   vw   = (Vec_MPI*)(*v)->data;
57   PetscCall(PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps)));
58 
59   /* save local representation of the parallel vector (and scatter) if it exists */
60   if (w->localrep) {
61     PetscCall(VecGetArray(*v,&array));
62     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep));
63     PetscCall(PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps)));
64     PetscCall(VecRestoreArray(*v,&array));
65     PetscCall(PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep));
66 
67     vw->localupdate = w->localupdate;
68     if (vw->localupdate) {
69       PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
70     }
71   }
72 
73   /* New vector should inherit stashing property of parent */
74   (*v)->stash.donotstash   = win->stash.donotstash;
75   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
76 
77   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist));
78   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist));
79 
80   (*v)->map->bs   = PetscAbs(win->map->bs);
81   (*v)->bstash.bs = win->bstash.bs;
82   PetscFunctionReturn(0);
83 }
84 
85 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
86 {
87   Vec_MPI        *v = (Vec_MPI*)V->data;
88 
89   PetscFunctionBegin;
90   switch (op) {
91   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
92     break;
93   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
94     break;
95   case VEC_SUBSET_OFF_PROC_ENTRIES:
96     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
97     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
98       PetscCall(VecAssemblyReset_MPI(V)); /* Reset existing pattern to free memory */
99       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
100     }
101     break;
102   }
103 
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode VecResetArray_MPI(Vec vin)
108 {
109   Vec_MPI        *v = (Vec_MPI*)vin->data;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = NULL;
114   if (v->localrep) {
115     PetscCall(VecResetArray(v->localrep));
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126 
127   PetscFunctionBegin;
128   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
129      messages can be empty, but we have to send them this time if we sent them before because the
130      receiver is expecting them.
131    */
132   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
133     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
134     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
135   }
136   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
137     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
138     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
139   }
140   PetscFunctionReturn(0);
141 }
142 
143 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
144 {
145   Vec X = (Vec)ctx;
146   Vec_MPI *x = (Vec_MPI*)X->data;
147   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   PetscCall(PetscSegBufferGet(x->segrecvframe,1,&frame));
153 
154   if (hdr->count) {
155     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints));
156     PetscCallMPI(MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
157     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars));
158     PetscCallMPI(MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb));
168     PetscCallMPI(MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
169     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb));
170     PetscCallMPI(MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   MPI_Comm       comm;
184   PetscInt       i,j,jb,bs;
185 
186   PetscFunctionBegin;
187   if (X->stash.donotstash) PetscFunctionReturn(0);
188 
189   PetscCall(PetscObjectGetComm((PetscObject)X,&comm));
190   PetscCall(VecGetBlockSize(X,&bs));
191   if (PetscDefined(USE_DEBUG)) {
192     InsertMode addv;
193     PetscCall(MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm));
194     PetscCheck(addv != (ADD_VALUES|INSERT_VALUES),comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
195   }
196   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
197 
198   PetscCall(VecStashSortCompress_Private(&X->stash));
199   PetscCall(VecStashSortCompress_Private(&X->bstash));
200 
201   if (!x->sendranks) {
202     PetscMPIInt nowners,bnowners,*owners,*bowners;
203     PetscInt ntmp;
204     PetscCall(VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners));
205     PetscCall(VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners));
206     PetscCall(PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks));
207     x->nsendranks = ntmp;
208     PetscCall(PetscFree(owners));
209     PetscCall(PetscFree(bowners));
210     PetscCall(PetscMalloc1(x->nsendranks,&x->sendhdr));
211     PetscCall(PetscCalloc1(x->nsendranks,&x->sendptrs));
212   }
213   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
214     PetscMPIInt rank = x->sendranks[i];
215     x->sendhdr[i].insertmode = X->stash.insertmode;
216     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
217      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
218      * there is nothing new to send, so that size-zero messages get sent instead. */
219     x->sendhdr[i].count = 0;
220     if (X->stash.n) {
221       x->sendptrs[i].ints    = &X->stash.idx[j];
222       x->sendptrs[i].scalars = &X->stash.array[j];
223       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
224     }
225     x->sendhdr[i].bcount = 0;
226     if (X->bstash.n) {
227       x->sendptrs[i].intb    = &X->bstash.idx[jb];
228       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
229       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
230     }
231   }
232 
233   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint));
234   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar));
235   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe));
236   if (x->first_assembly_done) { /* this is not the first assembly */
237     PetscMPIInt tag[4];
238     for (i=0; i<4; i++) PetscCall(PetscCommGetNewTag(comm,&tag[i]));
239     for (i=0; i<x->nsendranks; i++) {
240       PetscCall(VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X));
241     }
242     for (i=0; i<x->nrecvranks; i++) {
243       PetscCall(VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X));
244     }
245     x->use_status = PETSC_TRUE;
246   } else { /* First time assembly */
247     PetscCall(PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X));
248     x->use_status = PETSC_FALSE;
249   }
250 
251   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
252      This line says when assembly_subset is set, then we mark that the first assembly is done.
253    */
254   x->first_assembly_done = x->assembly_subset;
255 
256   {
257     PetscInt nstash,reallocs;
258     PetscCall(VecStashGetInfo_Private(&X->stash,&nstash,&reallocs));
259     PetscCall(PetscInfo(X,"Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
260     PetscCall(VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs));
261     PetscCall(PetscInfo(X,"Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
267 {
268   Vec_MPI *x = (Vec_MPI*)X->data;
269   PetscInt bs = X->map->bs;
270   PetscMPIInt npending,*some_indices,r;
271   MPI_Status  *some_statuses;
272   PetscScalar *xarray;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   PetscCheck(x->segrecvframe,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
283   PetscCall(VecGetArray(X,&xarray));
284   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe,&frame));
285   PetscCall(PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses));
286   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
287   while (npending>0) {
288     PetscMPIInt ndone=0,ii;
289     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
290      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
291      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
292      * subsequent assembly can set a proper subset of the values. */
293     PetscCallMPI(MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE));
294     for (ii=0; ii<ndone; ii++) {
295       PetscInt i = some_indices[ii]/4,j,k;
296       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
297       PetscInt *recvint;
298       PetscScalar *recvscalar;
299       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
300       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
301       npending--;
302       if (!blockmsg) { /* Scalar stash */
303         PetscMPIInt count;
304         if (--frame[i].pendings > 0) continue;
305         if (x->use_status) {
306           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
307         } else count = x->recvhdr[i].count;
308         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
309           PetscInt loc = *recvint - X->map->rstart;
310           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]",*recvint,X->map->rstart,X->map->rend);
311           switch (imode) {
312           case ADD_VALUES:
313             xarray[loc] += *recvscalar++;
314             break;
315           case INSERT_VALUES:
316             xarray[loc] = *recvscalar++;
317             break;
318           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
319           }
320         }
321       } else {                  /* Block stash */
322         PetscMPIInt count;
323         if (--frame[i].pendingb > 0) continue;
324         if (x->use_status) {
325           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
326           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
327         } else count = x->recvhdr[i].bcount;
328         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
329           PetscInt loc = (*recvint)*bs - X->map->rstart;
330           switch (imode) {
331           case ADD_VALUES:
332             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
333             break;
334           case INSERT_VALUES:
335             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
336             break;
337           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
338           }
339         }
340       }
341     }
342   }
343   PetscCall(VecRestoreArray(X,&xarray));
344   PetscCallMPI(MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE));
345   PetscCall(PetscFree2(some_indices,some_statuses));
346   if (x->assembly_subset) {
347     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint,NULL));
348     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar,NULL));
349   } else {
350     PetscCall(VecAssemblyReset_MPI(X));
351   }
352 
353   X->stash.insertmode = NOT_SET_VALUES;
354   X->bstash.insertmode = NOT_SET_VALUES;
355   PetscCall(VecStashScatterEnd_Private(&X->stash));
356   PetscCall(VecStashScatterEnd_Private(&X->bstash));
357   PetscFunctionReturn(0);
358 }
359 
360 PetscErrorCode VecAssemblyReset_MPI(Vec X)
361 {
362   Vec_MPI *x = (Vec_MPI*)X->data;
363 
364   PetscFunctionBegin;
365   PetscCall(PetscFree(x->sendreqs));
366   PetscCall(PetscFree(x->recvreqs));
367   PetscCall(PetscFree(x->sendranks));
368   PetscCall(PetscFree(x->recvranks));
369   PetscCall(PetscFree(x->sendhdr));
370   PetscCall(PetscFree(x->recvhdr));
371   PetscCall(PetscFree(x->sendptrs));
372   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
373   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
374   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
375   PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
379 {
380 #if !defined(PETSC_HAVE_MPIUNI)
381   PetscBool      flg = PETSC_FALSE,set;
382 
383   PetscFunctionBegin;
384   PetscOptionsHeadBegin(PetscOptionsObject,"VecMPI Options");
385   PetscCall(PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set));
386   if (set) {
387     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
388     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
389   }
390   PetscOptionsHeadEnd();
391 #else
392   PetscFunctionBegin;
393   X->ops->assemblybegin = VecAssemblyBegin_MPI;
394   X->ops->assemblyend   = VecAssemblyEnd_MPI;
395 #endif
396   PetscFunctionReturn(0);
397 }
398 
399 static struct _VecOps DvOps = {
400   PetscDesignatedInitializer(duplicate,VecDuplicate_MPI), /* 1 */
401   PetscDesignatedInitializer(duplicatevecs,VecDuplicateVecs_Default),
402   PetscDesignatedInitializer(destroyvecs,VecDestroyVecs_Default),
403   PetscDesignatedInitializer(dot,VecDot_MPI),
404   PetscDesignatedInitializer(mdot,VecMDot_MPI),
405   PetscDesignatedInitializer(norm,VecNorm_MPI),
406   PetscDesignatedInitializer(tdot,VecTDot_MPI),
407   PetscDesignatedInitializer(mtdot,VecMTDot_MPI),
408   PetscDesignatedInitializer(scale,VecScale_Seq),
409   PetscDesignatedInitializer(copy,VecCopy_Seq), /* 10 */
410   PetscDesignatedInitializer(set,VecSet_Seq),
411   PetscDesignatedInitializer(swap,VecSwap_Seq),
412   PetscDesignatedInitializer(axpy,VecAXPY_Seq),
413   PetscDesignatedInitializer(axpby,VecAXPBY_Seq),
414   PetscDesignatedInitializer(maxpy,VecMAXPY_Seq),
415   PetscDesignatedInitializer(aypx,VecAYPX_Seq),
416   PetscDesignatedInitializer(waxpy,VecWAXPY_Seq),
417   PetscDesignatedInitializer(axpbypcz,VecAXPBYPCZ_Seq),
418   PetscDesignatedInitializer(pointwisemult,VecPointwiseMult_Seq),
419   PetscDesignatedInitializer(pointwisedivide,VecPointwiseDivide_Seq),
420   PetscDesignatedInitializer(setvalues,VecSetValues_MPI), /* 20 */
421   PetscDesignatedInitializer(assemblybegin,VecAssemblyBegin_MPI_BTS),
422   PetscDesignatedInitializer(assemblyend,VecAssemblyEnd_MPI_BTS),
423   PetscDesignatedInitializer(getarray,NULL),
424   PetscDesignatedInitializer(getsize,VecGetSize_MPI),
425   PetscDesignatedInitializer(getlocalsize,VecGetSize_Seq),
426   PetscDesignatedInitializer(restorearray,NULL),
427   PetscDesignatedInitializer(max,VecMax_MPI),
428   PetscDesignatedInitializer(min,VecMin_MPI),
429   PetscDesignatedInitializer(setrandom,VecSetRandom_Seq),
430   PetscDesignatedInitializer(setoption,VecSetOption_MPI),
431   PetscDesignatedInitializer(setvaluesblocked,VecSetValuesBlocked_MPI),
432   PetscDesignatedInitializer(destroy,VecDestroy_MPI),
433   PetscDesignatedInitializer(view,VecView_MPI),
434   PetscDesignatedInitializer(placearray,VecPlaceArray_MPI),
435   PetscDesignatedInitializer(replacearray,VecReplaceArray_Seq),
436   PetscDesignatedInitializer(dot_local,VecDot_Seq),
437   PetscDesignatedInitializer(tdot_local,VecTDot_Seq),
438   PetscDesignatedInitializer(norm_local,VecNorm_Seq),
439   PetscDesignatedInitializer(mdot_local,VecMDot_Seq),
440   PetscDesignatedInitializer(mtdot_local,VecMTDot_Seq),
441   PetscDesignatedInitializer(load,VecLoad_Default),
442   PetscDesignatedInitializer(reciprocal,VecReciprocal_Default),
443   PetscDesignatedInitializer(conjugate,VecConjugate_Seq),
444   PetscDesignatedInitializer(setlocaltoglobalmapping,NULL),
445   PetscDesignatedInitializer(setvalueslocal,NULL),
446   PetscDesignatedInitializer(resetarray,VecResetArray_MPI),
447   PetscDesignatedInitializer(setfromoptions,VecSetFromOptions_MPI),/*set from options */
448   PetscDesignatedInitializer(maxpointwisedivide,VecMaxPointwiseDivide_Seq),
449   PetscDesignatedInitializer(pointwisemax,VecPointwiseMax_Seq),
450   PetscDesignatedInitializer(pointwisemaxabs,VecPointwiseMaxAbs_Seq),
451   PetscDesignatedInitializer(pointwisemin,VecPointwiseMin_Seq),
452   PetscDesignatedInitializer(getvalues,VecGetValues_MPI),
453   PetscDesignatedInitializer(sqrt,NULL),
454   PetscDesignatedInitializer(abs,NULL),
455   PetscDesignatedInitializer(exp,NULL),
456   PetscDesignatedInitializer(log,NULL),
457   PetscDesignatedInitializer(shift,NULL),
458   PetscDesignatedInitializer(create,NULL), /* really? */
459   PetscDesignatedInitializer(stridegather,VecStrideGather_Default),
460   PetscDesignatedInitializer(stridescatter,VecStrideScatter_Default),
461   PetscDesignatedInitializer(dotnorm2,NULL),
462   PetscDesignatedInitializer(getsubvector,NULL),
463   PetscDesignatedInitializer(restoresubvector,NULL),
464   PetscDesignatedInitializer(getarrayread,NULL),
465   PetscDesignatedInitializer(restorearrayread,NULL),
466   PetscDesignatedInitializer(stridesubsetgather,VecStrideSubSetGather_Default),
467   PetscDesignatedInitializer(stridesubsetscatter,VecStrideSubSetScatter_Default),
468   PetscDesignatedInitializer(viewnative,VecView_MPI),
469   PetscDesignatedInitializer(loadnative,NULL),
470   PetscDesignatedInitializer(getlocalvector,NULL),
471 };
472 
473 /*
474     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
475     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
476     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
477 
478     If alloc is true and array is NULL then this routine allocates the space, otherwise
479     no space is allocated.
480 */
481 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
482 {
483   Vec_MPI        *s;
484 
485   PetscFunctionBegin;
486   PetscCall(PetscNewLog(v,&s));
487   v->data        = (void*)s;
488   PetscCall(PetscMemcpy(v->ops,&DvOps,sizeof(DvOps)));
489   s->nghost      = nghost;
490   v->petscnative = PETSC_TRUE;
491   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
492 
493   PetscCall(PetscLayoutSetUp(v->map));
494 
495   s->array           = (PetscScalar*)array;
496   s->array_allocated = NULL;
497   if (alloc && !array) {
498     PetscInt n = v->map->n+nghost;
499     PetscCall(PetscCalloc1(n,&s->array));
500     PetscCall(PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar)));
501     s->array_allocated = s->array;
502   }
503 
504   /* By default parallel vectors do not have local representation */
505   s->localrep    = NULL;
506   s->localupdate = NULL;
507 
508   v->stash.insertmode = NOT_SET_VALUES;
509   v->bstash.insertmode = NOT_SET_VALUES;
510   /* create the stashes. The block-size for bstash is set later when
511      VecSetValuesBlocked is called.
512   */
513   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash));
514   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash));
515 
516 #if defined(PETSC_HAVE_MATLAB_ENGINE)
517   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default));
518   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default));
519 #endif
520   PetscCall(PetscObjectChangeTypeName((PetscObject)v,VECMPI));
521   PetscFunctionReturn(0);
522 }
523 
524 /*MC
525    VECMPI - VECMPI = "mpi" - The basic parallel vector
526 
527    Options Database Keys:
528 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
529 
530   Level: beginner
531 
532 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
533 M*/
534 
535 PetscErrorCode VecCreate_MPI(Vec vv)
536 {
537   PetscFunctionBegin;
538   PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL));
539   PetscFunctionReturn(0);
540 }
541 
542 /*MC
543    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
544 
545    Options Database Keys:
546 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
547 
548   Level: beginner
549 
550 .seealso: VecCreateSeq(), VecCreateMPI()
551 M*/
552 
553 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
554 {
555   PetscMPIInt    size;
556 
557   PetscFunctionBegin;
558   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v),&size));
559   if (size == 1) {
560     PetscCall(VecSetType(v,VECSEQ));
561   } else {
562     PetscCall(VecSetType(v,VECMPI));
563   }
564   PetscFunctionReturn(0);
565 }
566 
567 /*@C
568    VecCreateMPIWithArray - Creates a parallel, array-style vector,
569    where the user provides the array space to store the vector values.
570 
571    Collective
572 
573    Input Parameters:
574 +  comm  - the MPI communicator to use
575 .  bs    - block size, same meaning as VecSetBlockSize()
576 .  n     - local vector length, cannot be PETSC_DECIDE
577 .  N     - global vector length (or PETSC_DECIDE to have calculated)
578 -  array - the user provided array to store the vector values
579 
580    Output Parameter:
581 .  vv - the vector
582 
583    Notes:
584    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
585    same type as an existing vector.
586 
587    If the user-provided array is NULL, then VecPlaceArray() can be used
588    at a later stage to SET the array for storing the vector values.
589 
590    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
591    The user should not free the array until the vector is destroyed.
592 
593    Level: intermediate
594 
595 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
596           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
597 
598 @*/
599 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
600 {
601   PetscFunctionBegin;
602   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
603   PetscCall(PetscSplitOwnership(comm,&n,&N));
604   PetscCall(VecCreate(comm,vv));
605   PetscCall(VecSetSizes(*vv,n,N));
606   PetscCall(VecSetBlockSize(*vv,bs));
607   PetscCall(VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array));
608   PetscFunctionReturn(0);
609 }
610 
611 /*@C
612    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
613    the caller allocates the array space.
614 
615    Collective
616 
617    Input Parameters:
618 +  comm - the MPI communicator to use
619 .  n - local vector length
620 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
621 .  nghost - number of local ghost points
622 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
623 -  array - the space to store the vector values (as long as n + nghost)
624 
625    Output Parameter:
626 .  vv - the global vector representation (without ghost points as part of vector)
627 
628    Notes:
629    Use VecGhostGetLocalForm() to access the local, ghosted representation
630    of the vector.
631 
632    This also automatically sets the ISLocalToGlobalMapping() for this vector.
633 
634    Level: advanced
635 
636 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
637           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
638           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
639 
640 @*/
641 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
642 {
643   Vec_MPI                *w;
644   PetscScalar            *larray;
645   IS                     from,to;
646   ISLocalToGlobalMapping ltog;
647   PetscInt               rstart,i,*indices;
648 
649   PetscFunctionBegin;
650   *vv = NULL;
651 
652   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
653   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
654   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
655   PetscCall(PetscSplitOwnership(comm,&n,&N));
656   /* Create global representation */
657   PetscCall(VecCreate(comm,vv));
658   PetscCall(VecSetSizes(*vv,n,N));
659   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array));
660   w    = (Vec_MPI*)(*vv)->data;
661   /* Create local representation */
662   PetscCall(VecGetArray(*vv,&larray));
663   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
664   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
665   PetscCall(VecRestoreArray(*vv,&larray));
666 
667   /*
668        Create scatter context for scattering (updating) ghost values
669   */
670   PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
671   PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
672   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
673   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
674   PetscCall(ISDestroy(&to));
675   PetscCall(ISDestroy(&from));
676 
677   /* set local to global mapping for ghosted vector */
678   PetscCall(PetscMalloc1(n+nghost,&indices));
679   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
680   for (i=0; i<n; i++) {
681     indices[i] = rstart + i;
682   }
683   for (i=0; i<nghost; i++) {
684     indices[n+i] = ghosts[i];
685   }
686   PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
687   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
688   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
689   PetscFunctionReturn(0);
690 }
691 
692 /*@
693    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
694 
695    Collective
696 
697    Input Parameters:
698 +  comm - the MPI communicator to use
699 .  n - local vector length
700 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
701 .  nghost - number of local ghost points
702 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
703 
704    Output Parameter:
705 .  vv - the global vector representation (without ghost points as part of vector)
706 
707    Notes:
708    Use VecGhostGetLocalForm() to access the local, ghosted representation
709    of the vector.
710 
711    This also automatically sets the ISLocalToGlobalMapping() for this vector.
712 
713    Level: advanced
714 
715 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
716           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
717           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
718           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
719 
720 @*/
721 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
722 {
723   PetscFunctionBegin;
724   PetscCall(VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv));
725   PetscFunctionReturn(0);
726 }
727 
728 /*@
729    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
730 
731    Collective on Vec
732 
733    Input Parameters:
734 +  vv - the MPI vector
735 .  nghost - number of local ghost points
736 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
737 
738    Notes:
739    Use VecGhostGetLocalForm() to access the local, ghosted representation
740    of the vector.
741 
742    This also automatically sets the ISLocalToGlobalMapping() for this vector.
743 
744    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
745 
746    Level: advanced
747 
748 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
749           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
750           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
751           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
752 
753 @*/
754 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
755 {
756   PetscBool      flg;
757 
758   PetscFunctionBegin;
759   PetscCall(PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg));
760   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
761   if (flg) {
762     PetscInt               n,N;
763     Vec_MPI                *w;
764     PetscScalar            *larray;
765     IS                     from,to;
766     ISLocalToGlobalMapping ltog;
767     PetscInt               rstart,i,*indices;
768     MPI_Comm               comm;
769 
770     PetscCall(PetscObjectGetComm((PetscObject)vv,&comm));
771     n    = vv->map->n;
772     N    = vv->map->N;
773     PetscCall((*vv->ops->destroy)(vv));
774     PetscCall(VecSetSizes(vv,n,N));
775     PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL));
776     w    = (Vec_MPI*)(vv)->data;
777     /* Create local representation */
778     PetscCall(VecGetArray(vv,&larray));
779     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
780     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep));
781     PetscCall(VecRestoreArray(vv,&larray));
782 
783     /*
784      Create scatter context for scattering (updating) ghost values
785      */
786     PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
787     PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
788     PetscCall(VecScatterCreate(vv,from,w->localrep,to,&w->localupdate));
789     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate));
790     PetscCall(ISDestroy(&to));
791     PetscCall(ISDestroy(&from));
792 
793     /* set local to global mapping for ghosted vector */
794     PetscCall(PetscMalloc1(n+nghost,&indices));
795     PetscCall(VecGetOwnershipRange(vv,&rstart,NULL));
796 
797     for (i=0; i<n; i++)      indices[i]   = rstart + i;
798     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
799 
800     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
801     PetscCall(VecSetLocalToGlobalMapping(vv,ltog));
802     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
803   } else PetscCheck(vv->ops->create != VecCreate_MPI,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
804   else PetscCheck(((PetscObject)vv)->type_name,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
805   PetscFunctionReturn(0);
806 }
807 
808 /* ------------------------------------------------------------------------------------------*/
809 /*@C
810    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
811    the caller allocates the array space. Indices in the ghost region are based on blocks.
812 
813    Collective
814 
815    Input Parameters:
816 +  comm - the MPI communicator to use
817 .  bs - block size
818 .  n - local vector length
819 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
820 .  nghost - number of local ghost blocks
821 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
822 -  array - the space to store the vector values (as long as n + nghost*bs)
823 
824    Output Parameter:
825 .  vv - the global vector representation (without ghost points as part of vector)
826 
827    Notes:
828    Use VecGhostGetLocalForm() to access the local, ghosted representation
829    of the vector.
830 
831    n is the local vector size (total local size not the number of blocks) while nghost
832    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
833    portion is bs*nghost
834 
835    Level: advanced
836 
837 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
838           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
839           VecCreateGhostWithArray(), VecCreateGhostBlock()
840 
841 @*/
842 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
843 {
844   Vec_MPI                *w;
845   PetscScalar            *larray;
846   IS                     from,to;
847   ISLocalToGlobalMapping ltog;
848   PetscInt               rstart,i,nb,*indices;
849 
850   PetscFunctionBegin;
851   *vv = NULL;
852 
853   PetscCheck(n != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
854   PetscCheck(nghost != PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
855   PetscCheck(nghost >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
856   PetscCheck(n % bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
857   PetscCall(PetscSplitOwnership(comm,&n,&N));
858   /* Create global representation */
859   PetscCall(VecCreate(comm,vv));
860   PetscCall(VecSetSizes(*vv,n,N));
861   PetscCall(VecSetBlockSize(*vv,bs));
862   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array));
863   w    = (Vec_MPI*)(*vv)->data;
864   /* Create local representation */
865   PetscCall(VecGetArray(*vv,&larray));
866   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep));
867   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
868   PetscCall(VecRestoreArray(*vv,&larray));
869 
870   /*
871        Create scatter context for scattering (updating) ghost values
872   */
873   PetscCall(ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from));
874   PetscCall(ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to));
875   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
876   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
877   PetscCall(ISDestroy(&to));
878   PetscCall(ISDestroy(&from));
879 
880   /* set local to global mapping for ghosted vector */
881   nb     = n/bs;
882   PetscCall(PetscMalloc1(nb+nghost,&indices));
883   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
884   rstart = rstart/bs;
885 
886   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
887   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
888 
889   PetscCall(ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog));
890   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
891   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
892   PetscFunctionReturn(0);
893 }
894 
895 /*@
896    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
897         The indicing of the ghost points is done with blocks.
898 
899    Collective
900 
901    Input Parameters:
902 +  comm - the MPI communicator to use
903 .  bs - the block size
904 .  n - local vector length
905 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
906 .  nghost - number of local ghost blocks
907 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
908 
909    Output Parameter:
910 .  vv - the global vector representation (without ghost points as part of vector)
911 
912    Notes:
913    Use VecGhostGetLocalForm() to access the local, ghosted representation
914    of the vector.
915 
916    n is the local vector size (total local size not the number of blocks) while nghost
917    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
918    portion is bs*nghost
919 
920    Level: advanced
921 
922 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
923           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
924           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
925 
926 @*/
927 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
928 {
929   PetscFunctionBegin;
930   PetscCall(VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv));
931   PetscFunctionReturn(0);
932 }
933