xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 70a7d78aacfbd24b2e31399a3d8e056944bb7de3)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRMPI(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRMPI(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
90 {
91   Vec_MPI        *v = (Vec_MPI*)V->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
97     break;
98   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
99     break;
100   case VEC_SUBSET_OFF_PROC_ENTRIES:
101     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
102     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
103       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
104       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
105     }
106     break;
107   }
108 
109   PetscFunctionReturn(0);
110 }
111 
112 static PetscErrorCode VecResetArray_MPI(Vec vin)
113 {
114   Vec_MPI        *v = (Vec_MPI*)vin->data;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   v->array         = v->unplacedarray;
119   v->unplacedarray = NULL;
120   if (v->localrep) {
121     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
127 {
128   Vec X = (Vec)ctx;
129   Vec_MPI *x = (Vec_MPI*)X->data;
130   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
131   PetscInt bs = X->map->bs;
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
136      messages can be empty, but we have to send them this time if we sent them before because the
137      receiver is expecting them.
138    */
139   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
140     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
141     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
142   }
143   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
144     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
145     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
151 {
152   Vec X = (Vec)ctx;
153   Vec_MPI *x = (Vec_MPI*)X->data;
154   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
155   PetscErrorCode ierr;
156   PetscInt bs = X->map->bs;
157   VecAssemblyFrame *frame;
158 
159   PetscFunctionBegin;
160   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
161 
162   if (hdr->count) {
163     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
164     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
165     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
167     frame->pendings = 2;
168   } else {
169     frame->ints = NULL;
170     frame->scalars = NULL;
171     frame->pendings = 0;
172   }
173 
174   if (hdr->bcount) {
175     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
176     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
177     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
179     frame->pendingb = 2;
180   } else {
181     frame->intb = NULL;
182     frame->scalarb = NULL;
183     frame->pendingb = 0;
184   }
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
189 {
190   Vec_MPI        *x = (Vec_MPI*)X->data;
191   PetscErrorCode ierr;
192   MPI_Comm       comm;
193   PetscInt       i,j,jb,bs;
194 
195   PetscFunctionBegin;
196   if (X->stash.donotstash) PetscFunctionReturn(0);
197 
198   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
199   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
200   if (PetscDefined(USE_DEBUG)) {
201     InsertMode addv;
202     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRMPI(ierr);
203     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
204   }
205   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
206 
207   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
208   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
209 
210   if (!x->sendranks) {
211     PetscMPIInt nowners,bnowners,*owners,*bowners;
212     PetscInt ntmp;
213     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
214     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
215     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
216     x->nsendranks = ntmp;
217     ierr = PetscFree(owners);CHKERRQ(ierr);
218     ierr = PetscFree(bowners);CHKERRQ(ierr);
219     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
220     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
221   }
222   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
223     PetscMPIInt rank = x->sendranks[i];
224     x->sendhdr[i].insertmode = X->stash.insertmode;
225     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
226      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
227      * there is nothing new to send, so that size-zero messages get sent instead. */
228     x->sendhdr[i].count = 0;
229     if (X->stash.n) {
230       x->sendptrs[i].ints    = &X->stash.idx[j];
231       x->sendptrs[i].scalars = &X->stash.array[j];
232       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
233     }
234     x->sendhdr[i].bcount = 0;
235     if (X->bstash.n) {
236       x->sendptrs[i].intb    = &X->bstash.idx[jb];
237       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
238       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
239     }
240   }
241 
242   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
243   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
244   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
245   if (x->first_assembly_done) { /* this is not the first assembly */
246     PetscMPIInt tag[4];
247     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
248     for (i=0; i<x->nsendranks; i++) {
249       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
250     }
251     for (i=0; i<x->nrecvranks; i++) {
252       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
253     }
254     x->use_status = PETSC_TRUE;
255   } else { /* First time assembly */
256     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
257     x->use_status = PETSC_FALSE;
258   }
259 
260   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
261      This line says when assembly_subset is set, then we mark that the first assembly is done.
262    */
263   x->first_assembly_done = x->assembly_subset;
264 
265   {
266     PetscInt nstash,reallocs;
267     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
268     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
269     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
270     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
271   }
272   PetscFunctionReturn(0);
273 }
274 
275 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
276 {
277   Vec_MPI *x = (Vec_MPI*)X->data;
278   PetscInt bs = X->map->bs;
279   PetscMPIInt npending,*some_indices,r;
280   MPI_Status  *some_statuses;
281   PetscScalar *xarray;
282   PetscErrorCode ierr;
283   VecAssemblyFrame *frame;
284 
285   PetscFunctionBegin;
286   if (X->stash.donotstash) {
287     X->stash.insertmode = NOT_SET_VALUES;
288     X->bstash.insertmode = NOT_SET_VALUES;
289     PetscFunctionReturn(0);
290   }
291 
292   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
293   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
294   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
295   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
296   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
297   while (npending>0) {
298     PetscMPIInt ndone=0,ii;
299     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
300      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
301      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
302      * subsequent assembly can set a proper subset of the values. */
303     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
304     for (ii=0; ii<ndone; ii++) {
305       PetscInt i = some_indices[ii]/4,j,k;
306       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
307       PetscInt *recvint;
308       PetscScalar *recvscalar;
309       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
310       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
311       npending--;
312       if (!blockmsg) { /* Scalar stash */
313         PetscMPIInt count;
314         if (--frame[i].pendings > 0) continue;
315         if (x->use_status) {
316           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
317         } else count = x->recvhdr[i].count;
318         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
319           PetscInt loc = *recvint - X->map->rstart;
320           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
321           switch (imode) {
322           case ADD_VALUES:
323             xarray[loc] += *recvscalar++;
324             break;
325           case INSERT_VALUES:
326             xarray[loc] = *recvscalar++;
327             break;
328           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
329           }
330         }
331       } else {                  /* Block stash */
332         PetscMPIInt count;
333         if (--frame[i].pendingb > 0) continue;
334         if (x->use_status) {
335           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
336           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
337         } else count = x->recvhdr[i].bcount;
338         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
339           PetscInt loc = (*recvint)*bs - X->map->rstart;
340           switch (imode) {
341           case ADD_VALUES:
342             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
343             break;
344           case INSERT_VALUES:
345             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
346             break;
347           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
348           }
349         }
350       }
351     }
352   }
353   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
354   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
355   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
356   if (x->assembly_subset) {
357     void *dummy;                /* reset segbuffers */
358     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
359     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
360   } else {
361     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
362   }
363 
364   X->stash.insertmode = NOT_SET_VALUES;
365   X->bstash.insertmode = NOT_SET_VALUES;
366   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
367   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 PetscErrorCode VecAssemblyReset_MPI(Vec X)
372 {
373   Vec_MPI *x = (Vec_MPI*)X->data;
374   PetscErrorCode ierr;
375 
376   PetscFunctionBegin;
377   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
378   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
379   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
380   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
381   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
384   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
385   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
386   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
391 {
392 #if !defined(PETSC_HAVE_MPIUNI)
393   PetscErrorCode ierr;
394   PetscBool      flg = PETSC_FALSE,set;
395 
396   PetscFunctionBegin;
397   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
398   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
399   if (set) {
400     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
401     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
402   }
403   ierr = PetscOptionsTail();CHKERRQ(ierr);
404 #else
405   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
406   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
407 #endif
408   PetscFunctionReturn(0);
409 }
410 
411 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
412                                 VecDuplicateVecs_Default,
413                                 VecDestroyVecs_Default,
414                                 VecDot_MPI,
415                                 VecMDot_MPI,
416                                 VecNorm_MPI,
417                                 VecTDot_MPI,
418                                 VecMTDot_MPI,
419                                 VecScale_Seq,
420                                 VecCopy_Seq, /* 10 */
421                                 VecSet_Seq,
422                                 VecSwap_Seq,
423                                 VecAXPY_Seq,
424                                 VecAXPBY_Seq,
425                                 VecMAXPY_Seq,
426                                 VecAYPX_Seq,
427                                 VecWAXPY_Seq,
428                                 VecAXPBYPCZ_Seq,
429                                 VecPointwiseMult_Seq,
430                                 VecPointwiseDivide_Seq,
431                                 VecSetValues_MPI, /* 20 */
432                                 VecAssemblyBegin_MPI_BTS,
433                                 VecAssemblyEnd_MPI_BTS,
434                                 NULL,
435                                 VecGetSize_MPI,
436                                 VecGetSize_Seq,
437                                 NULL,
438                                 VecMax_MPI,
439                                 VecMin_MPI,
440                                 VecSetRandom_Seq,
441                                 VecSetOption_MPI,
442                                 VecSetValuesBlocked_MPI,
443                                 VecDestroy_MPI,
444                                 VecView_MPI,
445                                 VecPlaceArray_MPI,
446                                 VecReplaceArray_Seq,
447                                 VecDot_Seq,
448                                 VecTDot_Seq,
449                                 VecNorm_Seq,
450                                 VecMDot_Seq,
451                                 VecMTDot_Seq,
452                                 VecLoad_Default,
453                                 VecReciprocal_Default,
454                                 VecConjugate_Seq,
455                                 NULL,
456                                 NULL,
457                                 VecResetArray_MPI,
458                                 VecSetFromOptions_MPI,/*set from options */
459                                 VecMaxPointwiseDivide_Seq,
460                                 VecPointwiseMax_Seq,
461                                 VecPointwiseMaxAbs_Seq,
462                                 VecPointwiseMin_Seq,
463                                 VecGetValues_MPI,
464                                 NULL,
465                                 NULL,
466                                 NULL,
467                                 NULL,
468                                 NULL,
469                                 NULL,
470                                 VecStrideGather_Default,
471                                 VecStrideScatter_Default,
472                                 NULL,
473                                 NULL,
474                                 NULL,
475                                 NULL,
476                                 NULL,
477                                 VecStrideSubSetGather_Default,
478                                 VecStrideSubSetScatter_Default,
479                                 NULL,
480                                 NULL,
481                                 NULL
482 };
483 
484 /*
485     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
486     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
487     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
488 
489     If alloc is true and array is NULL then this routine allocates the space, otherwise
490     no space is allocated.
491 */
492 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
493 {
494   Vec_MPI        *s;
495   PetscErrorCode ierr;
496 
497   PetscFunctionBegin;
498   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
499   v->data        = (void*)s;
500   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
501   s->nghost      = nghost;
502   v->petscnative = PETSC_TRUE;
503   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
504 
505   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
506 
507   s->array           = (PetscScalar*)array;
508   s->array_allocated = NULL;
509   if (alloc && !array) {
510     PetscInt n = v->map->n+nghost;
511     ierr               = PetscCalloc1(n,&s->array);CHKERRQ(ierr);
512     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
513     s->array_allocated = s->array;
514   }
515 
516   /* By default parallel vectors do not have local representation */
517   s->localrep    = NULL;
518   s->localupdate = NULL;
519 
520   v->stash.insertmode = NOT_SET_VALUES;
521   v->bstash.insertmode = NOT_SET_VALUES;
522   /* create the stashes. The block-size for bstash is set later when
523      VecSetValuesBlocked is called.
524   */
525   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
526   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
527 
528 #if defined(PETSC_HAVE_MATLAB_ENGINE)
529   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
531 #endif
532   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
533   PetscFunctionReturn(0);
534 }
535 
536 /*MC
537    VECMPI - VECMPI = "mpi" - The basic parallel vector
538 
539    Options Database Keys:
540 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
541 
542   Level: beginner
543 
544 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
545 M*/
546 
547 PetscErrorCode VecCreate_MPI(Vec vv)
548 {
549   PetscErrorCode ierr;
550 
551   PetscFunctionBegin;
552   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL);CHKERRQ(ierr);
553   PetscFunctionReturn(0);
554 }
555 
556 /*MC
557    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
558 
559    Options Database Keys:
560 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
561 
562   Level: beginner
563 
564 .seealso: VecCreateSeq(), VecCreateMPI()
565 M*/
566 
567 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
568 {
569   PetscErrorCode ierr;
570   PetscMPIInt    size;
571 
572   PetscFunctionBegin;
573   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRMPI(ierr);
574   if (size == 1) {
575     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
576   } else {
577     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
578   }
579   PetscFunctionReturn(0);
580 }
581 
582 /*@C
583    VecCreateMPIWithArray - Creates a parallel, array-style vector,
584    where the user provides the array space to store the vector values.
585 
586    Collective
587 
588    Input Parameters:
589 +  comm  - the MPI communicator to use
590 .  bs    - block size, same meaning as VecSetBlockSize()
591 .  n     - local vector length, cannot be PETSC_DECIDE
592 .  N     - global vector length (or PETSC_DECIDE to have calculated)
593 -  array - the user provided array to store the vector values
594 
595    Output Parameter:
596 .  vv - the vector
597 
598    Notes:
599    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
600    same type as an existing vector.
601 
602    If the user-provided array is NULL, then VecPlaceArray() can be used
603    at a later stage to SET the array for storing the vector values.
604 
605    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
606    The user should not free the array until the vector is destroyed.
607 
608    Level: intermediate
609 
610 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
611           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
612 
613 @*/
614 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
615 {
616   PetscErrorCode ierr;
617 
618   PetscFunctionBegin;
619   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
620   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
621   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
622   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
623   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
624   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
625   PetscFunctionReturn(0);
626 }
627 
628 /*@C
629    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
630    the caller allocates the array space.
631 
632    Collective
633 
634    Input Parameters:
635 +  comm - the MPI communicator to use
636 .  n - local vector length
637 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
638 .  nghost - number of local ghost points
639 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
640 -  array - the space to store the vector values (as long as n + nghost)
641 
642    Output Parameter:
643 .  vv - the global vector representation (without ghost points as part of vector)
644 
645    Notes:
646    Use VecGhostGetLocalForm() to access the local, ghosted representation
647    of the vector.
648 
649    This also automatically sets the ISLocalToGlobalMapping() for this vector.
650 
651    Level: advanced
652 
653 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
654           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
655           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
656 
657 @*/
658 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
659 {
660   PetscErrorCode         ierr;
661   Vec_MPI                *w;
662   PetscScalar            *larray;
663   IS                     from,to;
664   ISLocalToGlobalMapping ltog;
665   PetscInt               rstart,i,*indices;
666 
667   PetscFunctionBegin;
668   *vv = NULL;
669 
670   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
671   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
672   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
673   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
674   /* Create global representation */
675   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
676   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
677   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
678   w    = (Vec_MPI*)(*vv)->data;
679   /* Create local representation */
680   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
681   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
682   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
683   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
684 
685   /*
686        Create scatter context for scattering (updating) ghost values
687   */
688   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
689   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
690   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
691   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
692   ierr = ISDestroy(&to);CHKERRQ(ierr);
693   ierr = ISDestroy(&from);CHKERRQ(ierr);
694 
695   /* set local to global mapping for ghosted vector */
696   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
697   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
698   for (i=0; i<n; i++) {
699     indices[i] = rstart + i;
700   }
701   for (i=0; i<nghost; i++) {
702     indices[n+i] = ghosts[i];
703   }
704   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
705   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
706   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 /*@
711    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
712 
713    Collective
714 
715    Input Parameters:
716 +  comm - the MPI communicator to use
717 .  n - local vector length
718 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
719 .  nghost - number of local ghost points
720 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
721 
722    Output Parameter:
723 .  vv - the global vector representation (without ghost points as part of vector)
724 
725    Notes:
726    Use VecGhostGetLocalForm() to access the local, ghosted representation
727    of the vector.
728 
729    This also automatically sets the ISLocalToGlobalMapping() for this vector.
730 
731    Level: advanced
732 
733 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
734           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
735           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
736           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
737 
738 @*/
739 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
740 {
741   PetscErrorCode ierr;
742 
743   PetscFunctionBegin;
744   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 /*@
749    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
750 
751    Collective on Vec
752 
753    Input Parameters:
754 +  vv - the MPI vector
755 .  nghost - number of local ghost points
756 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
757 
758    Notes:
759    Use VecGhostGetLocalForm() to access the local, ghosted representation
760    of the vector.
761 
762    This also automatically sets the ISLocalToGlobalMapping() for this vector.
763 
764    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
765 
766    Level: advanced
767 
768 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
769           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
770           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
771           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
772 
773 @*/
774 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
775 {
776   PetscErrorCode ierr;
777   PetscBool      flg;
778 
779   PetscFunctionBegin;
780   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
781   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
782   if (flg) {
783     PetscInt               n,N;
784     Vec_MPI                *w;
785     PetscScalar            *larray;
786     IS                     from,to;
787     ISLocalToGlobalMapping ltog;
788     PetscInt               rstart,i,*indices;
789     MPI_Comm               comm;
790 
791     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
792     n    = vv->map->n;
793     N    = vv->map->N;
794     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
795     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
796     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
797     w    = (Vec_MPI*)(vv)->data;
798     /* Create local representation */
799     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
800     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
801     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
802     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
803 
804     /*
805      Create scatter context for scattering (updating) ghost values
806      */
807     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
808     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
809     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
810     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
811     ierr = ISDestroy(&to);CHKERRQ(ierr);
812     ierr = ISDestroy(&from);CHKERRQ(ierr);
813 
814     /* set local to global mapping for ghosted vector */
815     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
816     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
817 
818     for (i=0; i<n; i++)      indices[i]   = rstart + i;
819     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
820 
821     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
822     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
823     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
824   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
825   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
826   PetscFunctionReturn(0);
827 }
828 
829 /* ------------------------------------------------------------------------------------------*/
830 /*@C
831    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
832    the caller allocates the array space. Indices in the ghost region are based on blocks.
833 
834    Collective
835 
836    Input Parameters:
837 +  comm - the MPI communicator to use
838 .  bs - block size
839 .  n - local vector length
840 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
841 .  nghost - number of local ghost blocks
842 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
843 -  array - the space to store the vector values (as long as n + nghost*bs)
844 
845    Output Parameter:
846 .  vv - the global vector representation (without ghost points as part of vector)
847 
848    Notes:
849    Use VecGhostGetLocalForm() to access the local, ghosted representation
850    of the vector.
851 
852    n is the local vector size (total local size not the number of blocks) while nghost
853    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
854    portion is bs*nghost
855 
856    Level: advanced
857 
858 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
859           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
860           VecCreateGhostWithArray(), VecCreateGhostBlock()
861 
862 @*/
863 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
864 {
865   PetscErrorCode         ierr;
866   Vec_MPI                *w;
867   PetscScalar            *larray;
868   IS                     from,to;
869   ISLocalToGlobalMapping ltog;
870   PetscInt               rstart,i,nb,*indices;
871 
872   PetscFunctionBegin;
873   *vv = NULL;
874 
875   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
876   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
877   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
878   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
879   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
880   /* Create global representation */
881   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
882   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
883   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
884   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
885   w    = (Vec_MPI*)(*vv)->data;
886   /* Create local representation */
887   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
888   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
889   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
890   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
891 
892   /*
893        Create scatter context for scattering (updating) ghost values
894   */
895   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
896   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
897   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
898   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
899   ierr = ISDestroy(&to);CHKERRQ(ierr);
900   ierr = ISDestroy(&from);CHKERRQ(ierr);
901 
902   /* set local to global mapping for ghosted vector */
903   nb     = n/bs;
904   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
905   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
906   rstart = rstart/bs;
907 
908   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
909   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
910 
911   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
912   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
913   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 /*@
918    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
919         The indicing of the ghost points is done with blocks.
920 
921    Collective
922 
923    Input Parameters:
924 +  comm - the MPI communicator to use
925 .  bs - the block size
926 .  n - local vector length
927 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
928 .  nghost - number of local ghost blocks
929 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
930 
931    Output Parameter:
932 .  vv - the global vector representation (without ghost points as part of vector)
933 
934    Notes:
935    Use VecGhostGetLocalForm() to access the local, ghosted representation
936    of the vector.
937 
938    n is the local vector size (total local size not the number of blocks) while nghost
939    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
940    portion is bs*nghost
941 
942    Level: advanced
943 
944 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
945           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
946           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
947 
948 @*/
949 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957