xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d5c9c0c4eebc2f2a01a1bd0c86fca87e2acd2a03)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   switch (op) {
97   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
98     break;
99   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
100     break;
101   case VEC_SUBSET_OFF_PROC_ENTRIES:
102     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
103     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
104       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
105       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
106     }
107     break;
108   }
109 
110   PetscFunctionReturn(0);
111 }
112 
113 
114 static PetscErrorCode VecResetArray_MPI(Vec vin)
115 {
116   Vec_MPI        *v = (Vec_MPI*)vin->data;
117   PetscErrorCode ierr;
118 
119   PetscFunctionBegin;
120   v->array         = v->unplacedarray;
121   v->unplacedarray = 0;
122   if (v->localrep) {
123     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
129 {
130   Vec X = (Vec)ctx;
131   Vec_MPI *x = (Vec_MPI*)X->data;
132   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
133   PetscInt bs = X->map->bs;
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
138      messages can be empty, but we have to send them this time if we sent them before because the
139      receiver is expecting them.
140    */
141   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
142     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
143     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144   }
145   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
146     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
147     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
153 {
154   Vec X = (Vec)ctx;
155   Vec_MPI *x = (Vec_MPI*)X->data;
156   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
157   PetscErrorCode ierr;
158   PetscInt bs = X->map->bs;
159   VecAssemblyFrame *frame;
160 
161   PetscFunctionBegin;
162   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
163 
164   if (hdr->count) {
165     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
167     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
169     frame->pendings = 2;
170   } else {
171     frame->ints = NULL;
172     frame->scalars = NULL;
173     frame->pendings = 0;
174   }
175 
176   if (hdr->bcount) {
177     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
179     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
180     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
181     frame->pendingb = 2;
182   } else {
183     frame->intb = NULL;
184     frame->scalarb = NULL;
185     frame->pendingb = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
191 {
192   Vec_MPI        *x = (Vec_MPI*)X->data;
193   PetscErrorCode ierr;
194   MPI_Comm       comm;
195   PetscInt       i,j,jb,bs;
196 
197   PetscFunctionBegin;
198   if (X->stash.donotstash) PetscFunctionReturn(0);
199 
200   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
201   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
202   if (PetscDefined(USE_DEBUG)) {
203     InsertMode addv;
204     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
205     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
206   }
207   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
208 
209   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
210   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
211 
212   if (!x->sendranks) {
213     PetscMPIInt nowners,bnowners,*owners,*bowners;
214     PetscInt ntmp;
215     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
216     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
217     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
218     x->nsendranks = ntmp;
219     ierr = PetscFree(owners);CHKERRQ(ierr);
220     ierr = PetscFree(bowners);CHKERRQ(ierr);
221     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
222     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
223   }
224   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
225     PetscMPIInt rank = x->sendranks[i];
226     x->sendhdr[i].insertmode = X->stash.insertmode;
227     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
228      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
229      * there is nothing new to send, so that size-zero messages get sent instead. */
230     x->sendhdr[i].count = 0;
231     if (X->stash.n) {
232       x->sendptrs[i].ints    = &X->stash.idx[j];
233       x->sendptrs[i].scalars = &X->stash.array[j];
234       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
235     }
236     x->sendhdr[i].bcount = 0;
237     if (X->bstash.n) {
238       x->sendptrs[i].intb    = &X->bstash.idx[jb];
239       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
240       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
241     }
242   }
243 
244   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
245   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
246   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
247   if (x->first_assembly_done) { /* this is not the first assembly */
248     PetscMPIInt tag[4];
249     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
250     for (i=0; i<x->nsendranks; i++) {
251       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
252     }
253     for (i=0; i<x->nrecvranks; i++) {
254       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
255     }
256     x->use_status = PETSC_TRUE;
257   } else { /* First time assembly */
258     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
259     x->use_status = PETSC_FALSE;
260   }
261 
262   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
263      This line says when assembly_subset is set, then we mark that the first assembly is done.
264    */
265   x->first_assembly_done = x->assembly_subset;
266 
267   {
268     PetscInt nstash,reallocs;
269     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
270     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
271     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
272     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
273   }
274   PetscFunctionReturn(0);
275 }
276 
277 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
278 {
279   Vec_MPI *x = (Vec_MPI*)X->data;
280   PetscInt bs = X->map->bs;
281   PetscMPIInt npending,*some_indices,r;
282   MPI_Status  *some_statuses;
283   PetscScalar *xarray;
284   PetscErrorCode ierr;
285   VecAssemblyFrame *frame;
286 
287   PetscFunctionBegin;
288   if (X->stash.donotstash) {
289     X->stash.insertmode = NOT_SET_VALUES;
290     X->bstash.insertmode = NOT_SET_VALUES;
291     PetscFunctionReturn(0);
292   }
293 
294   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
295   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
296   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
297   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
298   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
299   while (npending>0) {
300     PetscMPIInt ndone=0,ii;
301     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
302      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
303      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
304      * subsequent assembly can set a proper subset of the values. */
305     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
306     for (ii=0; ii<ndone; ii++) {
307       PetscInt i = some_indices[ii]/4,j,k;
308       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
309       PetscInt *recvint;
310       PetscScalar *recvscalar;
311       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
312       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
313       npending--;
314       if (!blockmsg) { /* Scalar stash */
315         PetscMPIInt count;
316         if (--frame[i].pendings > 0) continue;
317         if (x->use_status) {
318           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
319         } else count = x->recvhdr[i].count;
320         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
321           PetscInt loc = *recvint - X->map->rstart;
322           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
323           switch (imode) {
324           case ADD_VALUES:
325             xarray[loc] += *recvscalar++;
326             break;
327           case INSERT_VALUES:
328             xarray[loc] = *recvscalar++;
329             break;
330           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
331           }
332         }
333       } else {                  /* Block stash */
334         PetscMPIInt count;
335         if (--frame[i].pendingb > 0) continue;
336         if (x->use_status) {
337           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
338           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
339         } else count = x->recvhdr[i].bcount;
340         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
341           PetscInt loc = (*recvint)*bs - X->map->rstart;
342           switch (imode) {
343           case ADD_VALUES:
344             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
345             break;
346           case INSERT_VALUES:
347             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
348             break;
349           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
350           }
351         }
352       }
353     }
354   }
355   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
356   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
357   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
358   if (x->assembly_subset) {
359     void *dummy;                /* reset segbuffers */
360     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
361     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
362   } else {
363     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
364   }
365 
366   X->stash.insertmode = NOT_SET_VALUES;
367   X->bstash.insertmode = NOT_SET_VALUES;
368   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
369   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
370   PetscFunctionReturn(0);
371 }
372 
373 PetscErrorCode VecAssemblyReset_MPI(Vec X)
374 {
375   Vec_MPI *x = (Vec_MPI*)X->data;
376   PetscErrorCode ierr;
377 
378   PetscFunctionBegin;
379   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
380   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
381   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
384   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
385   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
386   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
387   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
388   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
389   PetscFunctionReturn(0);
390 }
391 
392 
393 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
394 {
395 #if !defined(PETSC_HAVE_MPIUNI)
396   PetscErrorCode ierr;
397   PetscBool      flg = PETSC_FALSE,set;
398 
399   PetscFunctionBegin;
400   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
401   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
402   if (set) {
403     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
404     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
405   }
406   ierr = PetscOptionsTail();CHKERRQ(ierr);
407 #else
408   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
409   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
410 #endif
411   PetscFunctionReturn(0);
412 }
413 
414 
415 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
416                                 VecDuplicateVecs_Default,
417                                 VecDestroyVecs_Default,
418                                 VecDot_MPI,
419                                 VecMDot_MPI,
420                                 VecNorm_MPI,
421                                 VecTDot_MPI,
422                                 VecMTDot_MPI,
423                                 VecScale_Seq,
424                                 VecCopy_Seq, /* 10 */
425                                 VecSet_Seq,
426                                 VecSwap_Seq,
427                                 VecAXPY_Seq,
428                                 VecAXPBY_Seq,
429                                 VecMAXPY_Seq,
430                                 VecAYPX_Seq,
431                                 VecWAXPY_Seq,
432                                 VecAXPBYPCZ_Seq,
433                                 VecPointwiseMult_Seq,
434                                 VecPointwiseDivide_Seq,
435                                 VecSetValues_MPI, /* 20 */
436                                 VecAssemblyBegin_MPI_BTS,
437                                 VecAssemblyEnd_MPI_BTS,
438                                 0,
439                                 VecGetSize_MPI,
440                                 VecGetSize_Seq,
441                                 0,
442                                 VecMax_MPI,
443                                 VecMin_MPI,
444                                 VecSetRandom_Seq,
445                                 VecSetOption_MPI,
446                                 VecSetValuesBlocked_MPI,
447                                 VecDestroy_MPI,
448                                 VecView_MPI,
449                                 VecPlaceArray_MPI,
450                                 VecReplaceArray_Seq,
451                                 VecDot_Seq,
452                                 VecTDot_Seq,
453                                 VecNorm_Seq,
454                                 VecMDot_Seq,
455                                 VecMTDot_Seq,
456                                 VecLoad_Default,
457                                 VecReciprocal_Default,
458                                 VecConjugate_Seq,
459                                 0,
460                                 0,
461                                 VecResetArray_MPI,
462                                 VecSetFromOptions_MPI,/*set from options */
463                                 VecMaxPointwiseDivide_Seq,
464                                 VecPointwiseMax_Seq,
465                                 VecPointwiseMaxAbs_Seq,
466                                 VecPointwiseMin_Seq,
467                                 VecGetValues_MPI,
468                                 0,
469                                 0,
470                                 0,
471                                 0,
472                                 0,
473                                 0,
474                                 VecStrideGather_Default,
475                                 VecStrideScatter_Default,
476                                 0,
477                                 0,
478                                 0,
479                                 0,
480                                 0,
481                                 VecStrideSubSetGather_Default,
482                                 VecStrideSubSetScatter_Default,
483                                 0,
484                                 0
485 };
486 
487 /*
488     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
489     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
490     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
491 
492     If alloc is true and array is NULL then this routine allocates the space, otherwise
493     no space is allocated.
494 */
495 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
496 {
497   Vec_MPI        *s;
498   PetscErrorCode ierr;
499 
500   PetscFunctionBegin;
501   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
502   v->data        = (void*)s;
503   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
504   s->nghost      = nghost;
505   v->petscnative = PETSC_TRUE;
506 
507   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
508 
509   s->array           = (PetscScalar*)array;
510   s->array_allocated = 0;
511   if (alloc && !array) {
512     PetscInt n = v->map->n+nghost;
513     ierr               = PetscCalloc1(n,&s->array);CHKERRQ(ierr);
514     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
515     s->array_allocated = s->array;
516   }
517 
518   /* By default parallel vectors do not have local representation */
519   s->localrep    = 0;
520   s->localupdate = 0;
521 
522   v->stash.insertmode = NOT_SET_VALUES;
523   v->bstash.insertmode = NOT_SET_VALUES;
524   /* create the stashes. The block-size for bstash is set later when
525      VecSetValuesBlocked is called.
526   */
527   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
528   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
529 
530 #if defined(PETSC_HAVE_MATLAB_ENGINE)
531   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
532   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
533 #endif
534   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*MC
539    VECMPI - VECMPI = "mpi" - The basic parallel vector
540 
541    Options Database Keys:
542 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
543 
544   Level: beginner
545 
546 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
547 M*/
548 
549 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
550 {
551   PetscErrorCode ierr;
552 
553   PetscFunctionBegin;
554   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
555   PetscFunctionReturn(0);
556 }
557 
558 /*MC
559    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
560 
561    Options Database Keys:
562 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
563 
564   Level: beginner
565 
566 .seealso: VecCreateSeq(), VecCreateMPI()
567 M*/
568 
569 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
570 {
571   PetscErrorCode ierr;
572   PetscMPIInt    size;
573 
574   PetscFunctionBegin;
575   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
576   if (size == 1) {
577     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
578   } else {
579     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
580   }
581   PetscFunctionReturn(0);
582 }
583 
584 /*@C
585    VecCreateMPIWithArray - Creates a parallel, array-style vector,
586    where the user provides the array space to store the vector values.
587 
588    Collective
589 
590    Input Parameters:
591 +  comm  - the MPI communicator to use
592 .  bs    - block size, same meaning as VecSetBlockSize()
593 .  n     - local vector length, cannot be PETSC_DECIDE
594 .  N     - global vector length (or PETSC_DECIDE to have calculated)
595 -  array - the user provided array to store the vector values
596 
597    Output Parameter:
598 .  vv - the vector
599 
600    Notes:
601    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
602    same type as an existing vector.
603 
604    If the user-provided array is NULL, then VecPlaceArray() can be used
605    at a later stage to SET the array for storing the vector values.
606 
607    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
608    The user should not free the array until the vector is destroyed.
609 
610    Level: intermediate
611 
612 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
613           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
614 
615 @*/
616 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
617 {
618   PetscErrorCode ierr;
619 
620   PetscFunctionBegin;
621   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
622   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
623   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
624   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
625   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
626   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
627   PetscFunctionReturn(0);
628 }
629 
630 /*@C
631    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
632    the caller allocates the array space.
633 
634    Collective
635 
636    Input Parameters:
637 +  comm - the MPI communicator to use
638 .  n - local vector length
639 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
640 .  nghost - number of local ghost points
641 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
642 -  array - the space to store the vector values (as long as n + nghost)
643 
644    Output Parameter:
645 .  vv - the global vector representation (without ghost points as part of vector)
646 
647    Notes:
648    Use VecGhostGetLocalForm() to access the local, ghosted representation
649    of the vector.
650 
651    This also automatically sets the ISLocalToGlobalMapping() for this vector.
652 
653    Level: advanced
654 
655 
656 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
657           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
658           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
659 
660 @*/
661 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
662 {
663   PetscErrorCode         ierr;
664   Vec_MPI                *w;
665   PetscScalar            *larray;
666   IS                     from,to;
667   ISLocalToGlobalMapping ltog;
668   PetscInt               rstart,i,*indices;
669 
670   PetscFunctionBegin;
671   *vv = 0;
672 
673   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
674   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
675   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
676   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
677   /* Create global representation */
678   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
679   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
680   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
681   w    = (Vec_MPI*)(*vv)->data;
682   /* Create local representation */
683   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
684   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
685   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
686   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
687 
688   /*
689        Create scatter context for scattering (updating) ghost values
690   */
691   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
692   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
693   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
694   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
695   ierr = ISDestroy(&to);CHKERRQ(ierr);
696   ierr = ISDestroy(&from);CHKERRQ(ierr);
697 
698   /* set local to global mapping for ghosted vector */
699   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
700   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
701   for (i=0; i<n; i++) {
702     indices[i] = rstart + i;
703   }
704   for (i=0; i<nghost; i++) {
705     indices[n+i] = ghosts[i];
706   }
707   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
708   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
709   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 /*@
714    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
715 
716    Collective
717 
718    Input Parameters:
719 +  comm - the MPI communicator to use
720 .  n - local vector length
721 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
722 .  nghost - number of local ghost points
723 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
724 
725    Output Parameter:
726 .  vv - the global vector representation (without ghost points as part of vector)
727 
728    Notes:
729    Use VecGhostGetLocalForm() to access the local, ghosted representation
730    of the vector.
731 
732    This also automatically sets the ISLocalToGlobalMapping() for this vector.
733 
734    Level: advanced
735 
736 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
737           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
738           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
739           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
740 
741 @*/
742 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
743 {
744   PetscErrorCode ierr;
745 
746   PetscFunctionBegin;
747   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 /*@
752    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
753 
754    Collective on Vec
755 
756    Input Parameters:
757 +  vv - the MPI vector
758 .  nghost - number of local ghost points
759 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
760 
761 
762    Notes:
763    Use VecGhostGetLocalForm() to access the local, ghosted representation
764    of the vector.
765 
766    This also automatically sets the ISLocalToGlobalMapping() for this vector.
767 
768    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
769 
770    Level: advanced
771 
772 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
773           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
774           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
775           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
776 
777 @*/
778 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
779 {
780   PetscErrorCode ierr;
781   PetscBool      flg;
782 
783   PetscFunctionBegin;
784   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
785   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
786   if (flg) {
787     PetscInt               n,N;
788     Vec_MPI                *w;
789     PetscScalar            *larray;
790     IS                     from,to;
791     ISLocalToGlobalMapping ltog;
792     PetscInt               rstart,i,*indices;
793     MPI_Comm               comm;
794 
795     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
796     n    = vv->map->n;
797     N    = vv->map->N;
798     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
799     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
800     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
801     w    = (Vec_MPI*)(vv)->data;
802     /* Create local representation */
803     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
804     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
805     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
806     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
807 
808     /*
809      Create scatter context for scattering (updating) ghost values
810      */
811     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
812     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
813     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
814     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
815     ierr = ISDestroy(&to);CHKERRQ(ierr);
816     ierr = ISDestroy(&from);CHKERRQ(ierr);
817 
818     /* set local to global mapping for ghosted vector */
819     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
820     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
821 
822     for (i=0; i<n; i++)      indices[i]   = rstart + i;
823     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
824 
825     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
826     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
827     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
828   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
829   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
830   PetscFunctionReturn(0);
831 }
832 
833 
834 /* ------------------------------------------------------------------------------------------*/
835 /*@C
836    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
837    the caller allocates the array space. Indices in the ghost region are based on blocks.
838 
839    Collective
840 
841    Input Parameters:
842 +  comm - the MPI communicator to use
843 .  bs - block size
844 .  n - local vector length
845 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
846 .  nghost - number of local ghost blocks
847 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
848 -  array - the space to store the vector values (as long as n + nghost*bs)
849 
850    Output Parameter:
851 .  vv - the global vector representation (without ghost points as part of vector)
852 
853    Notes:
854    Use VecGhostGetLocalForm() to access the local, ghosted representation
855    of the vector.
856 
857    n is the local vector size (total local size not the number of blocks) while nghost
858    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
859    portion is bs*nghost
860 
861    Level: advanced
862 
863 
864 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
865           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
866           VecCreateGhostWithArray(), VecCreateGhostBlock()
867 
868 @*/
869 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
870 {
871   PetscErrorCode         ierr;
872   Vec_MPI                *w;
873   PetscScalar            *larray;
874   IS                     from,to;
875   ISLocalToGlobalMapping ltog;
876   PetscInt               rstart,i,nb,*indices;
877 
878   PetscFunctionBegin;
879   *vv = 0;
880 
881   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
882   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
883   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
884   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
885   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
886   /* Create global representation */
887   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
888   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
889   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
890   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
891   w    = (Vec_MPI*)(*vv)->data;
892   /* Create local representation */
893   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
894   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
895   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
896   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
897 
898   /*
899        Create scatter context for scattering (updating) ghost values
900   */
901   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
902   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
903   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
904   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
905   ierr = ISDestroy(&to);CHKERRQ(ierr);
906   ierr = ISDestroy(&from);CHKERRQ(ierr);
907 
908   /* set local to global mapping for ghosted vector */
909   nb     = n/bs;
910   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
911   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
912   rstart = rstart/bs;
913 
914   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
915   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
916 
917   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
918   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
919   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 /*@
924    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
925         The indicing of the ghost points is done with blocks.
926 
927    Collective
928 
929    Input Parameters:
930 +  comm - the MPI communicator to use
931 .  bs - the block size
932 .  n - local vector length
933 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
934 .  nghost - number of local ghost blocks
935 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
936 
937    Output Parameter:
938 .  vv - the global vector representation (without ghost points as part of vector)
939 
940    Notes:
941    Use VecGhostGetLocalForm() to access the local, ghosted representation
942    of the vector.
943 
944    n is the local vector size (total local size not the number of blocks) while nghost
945    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
946    portion is bs*nghost
947 
948    Level: advanced
949 
950 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
951           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
952           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
953 
954 @*/
955 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
956 {
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963