xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 22a86ba054caf416d9c7817d4da4dc122c01da84)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = PetscAbs(win->map->bs);
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "VecAssemblySend_MPI_Private"
103 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
104 {
105   Vec X = (Vec)ctx;
106   Vec_MPI *x = (Vec_MPI*)X->data;
107   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
108   PetscInt bs = X->map->bs;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
113    * we have to send them this time if we sent them before because the receiver is expecting them. */
114   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
115     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
116     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
117   }
118   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
119     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
120     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
121   }
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "VecAssemblyRecv_MPI_Private"
127 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
128 {
129   Vec X = (Vec)ctx;
130   Vec_MPI *x = (Vec_MPI*)X->data;
131   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
132   PetscErrorCode ierr;
133   PetscInt bs = X->map->bs;
134   VecAssemblyFrame *frame;
135 
136   PetscFunctionBegin;
137   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
138 
139   if (hdr->count) {
140     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
141     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
142     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
143     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144     frame->pendings = 2;
145   } else {
146     frame->ints = NULL;
147     frame->scalars = NULL;
148     frame->pendings = 0;
149   }
150 
151   if (hdr->bcount) {
152     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
153     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
154     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
155     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
156     frame->pendingb = 2;
157   } else {
158     frame->intb = NULL;
159     frame->scalarb = NULL;
160     frame->pendingb = 0;
161   }
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "VecAssemblyBegin_MPI_BTS"
167 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
168 {
169   Vec_MPI        *x = (Vec_MPI*)X->data;
170   PetscErrorCode ierr;
171   MPI_Comm       comm;
172   PetscInt       i,j,jb,bs;
173 
174   PetscFunctionBegin;
175   if (X->stash.donotstash) PetscFunctionReturn(0);
176 
177   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
178   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
179 #if defined(PETSC_USE_DEBUG)
180   {
181     InsertMode addv;
182     ierr = MPI_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
183     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
184   }
185 #endif
186   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
187 
188   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
189   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
190 
191   if (!x->sendranks) {
192     PetscMPIInt nowners,bnowners,*owners,*bowners;
193     PetscInt ntmp;
194     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
195     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
196     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
197     x->nsendranks = ntmp;
198     ierr = PetscFree(owners);CHKERRQ(ierr);
199     ierr = PetscFree(bowners);CHKERRQ(ierr);
200     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
201     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
202   }
203   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
204     PetscMPIInt rank = x->sendranks[i];
205     x->sendhdr[i].insertmode = X->stash.insertmode;
206     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
207      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
208      * there is nothing new to send, so that size-zero messages get sent instead. */
209     x->sendhdr[i].count = 0;
210     if (X->stash.n) {
211       x->sendptrs[i].ints    = &X->stash.idx[j];
212       x->sendptrs[i].scalars = &X->stash.array[j];
213       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
214     }
215     x->sendhdr[i].bcount = 0;
216     if (X->bstash.n) {
217       x->sendptrs[i].intb    = &X->bstash.idx[jb];
218       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
219       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
220     }
221   }
222 
223   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
224   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
225   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
226   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
227     PetscMPIInt tag[4];
228     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
229     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
230     for (i=0; i<x->nsendranks; i++) {
231       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
232     }
233     for (i=0; i<x->nrecvranks; i++) {
234       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
235     }
236     x->use_status = PETSC_TRUE;
237   } else {                      /* First time */
238     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
239     x->use_status = PETSC_FALSE;
240   }
241 
242   {
243     PetscInt nstash,reallocs;
244     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
245     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
246     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
247     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
248   }
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "VecAssemblyEnd_MPI_BTS"
254 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
255 {
256   Vec_MPI *x = (Vec_MPI*)X->data;
257   PetscInt bs = X->map->bs;
258   PetscMPIInt npending,*some_indices,r;
259   MPI_Status  *some_statuses;
260   PetscScalar *xarray;
261   PetscErrorCode ierr;
262   VecAssemblyFrame *frame;
263 
264   PetscFunctionBegin;
265   if (X->stash.donotstash) {
266     X->stash.insertmode = NOT_SET_VALUES;
267     X->bstash.insertmode = NOT_SET_VALUES;
268     PetscFunctionReturn(0);
269   }
270 
271   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
272   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
273   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
274   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
275   while (npending>0) {
276     PetscMPIInt ndone,ii;
277     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
278      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
279      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
280      * subsequent assembly can set a proper subset of the values. */
281     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
282     for (ii=0; ii<ndone; ii++) {
283       PetscInt i = some_indices[ii]/4,j,k;
284       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
285       PetscInt *recvint;
286       PetscScalar *recvscalar;
287       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
288       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
289       npending--;
290       if (!blockmsg) { /* Scalar stash */
291         PetscMPIInt count;
292         if (--frame[i].pendings > 0) continue;
293         if (x->use_status) {
294           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
295         } else count = x->recvhdr[i].count;
296         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
297           PetscInt loc = *recvint - X->map->rstart;
298           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
299           switch (imode) {
300           case ADD_VALUES:
301             xarray[loc] += *recvscalar++;
302             break;
303           case INSERT_VALUES:
304             xarray[loc] = *recvscalar++;
305             break;
306           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
307           }
308         }
309       } else {                  /* Block stash */
310         PetscMPIInt count;
311         if (--frame[i].pendingb > 0) continue;
312         if (x->use_status) {
313           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
314           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
315         } else count = x->recvhdr[i].bcount;
316         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
317           PetscInt loc = (*recvint)*bs - X->map->rstart;
318           switch (imode) {
319           case ADD_VALUES:
320             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
321             break;
322           case INSERT_VALUES:
323             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
324             break;
325           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
326           }
327         }
328       }
329     }
330   }
331   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
332   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
333   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
334   if (x->assembly_subset) {
335     void *dummy;                /* reset segbuffers */
336     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
337     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
338   } else {
339     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
340   }
341 
342   X->stash.insertmode = NOT_SET_VALUES;
343   X->bstash.insertmode = NOT_SET_VALUES;
344   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
345   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "VecAssemblyReset_MPI"
351 PetscErrorCode VecAssemblyReset_MPI(Vec X)
352 {
353   Vec_MPI *x = (Vec_MPI*)X->data;
354   PetscErrorCode ierr;
355 
356   PetscFunctionBegin;
357   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
358   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
359   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
360   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
361   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
362   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
363   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
364   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
365   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
366   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "VecSetFromOptions_MPI"
373 static PetscErrorCode VecSetFromOptions_MPI(PetscOptions *PetscOptionsObject,Vec X)
374 {
375   PetscErrorCode ierr;
376   PetscBool      flg = PETSC_FALSE,set;
377 
378   PetscFunctionBegin;
379   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
380   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
381   if (set) {
382     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
383     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
384   }
385   ierr = PetscOptionsTail();CHKERRQ(ierr);
386   PetscFunctionReturn(0);
387 }
388 
389 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
390                                 VecDuplicateVecs_Default,
391                                 VecDestroyVecs_Default,
392                                 VecDot_MPI,
393                                 VecMDot_MPI,
394                                 VecNorm_MPI,
395                                 VecTDot_MPI,
396                                 VecMTDot_MPI,
397                                 VecScale_Seq,
398                                 VecCopy_Seq, /* 10 */
399                                 VecSet_Seq,
400                                 VecSwap_Seq,
401                                 VecAXPY_Seq,
402                                 VecAXPBY_Seq,
403                                 VecMAXPY_Seq,
404                                 VecAYPX_Seq,
405                                 VecWAXPY_Seq,
406                                 VecAXPBYPCZ_Seq,
407                                 VecPointwiseMult_Seq,
408                                 VecPointwiseDivide_Seq,
409                                 VecSetValues_MPI, /* 20 */
410                                 VecAssemblyBegin_MPI_BTS,
411                                 VecAssemblyEnd_MPI_BTS,
412                                 0,
413                                 VecGetSize_MPI,
414                                 VecGetSize_Seq,
415                                 0,
416                                 VecMax_MPI,
417                                 VecMin_MPI,
418                                 VecSetRandom_Seq,
419                                 VecSetOption_MPI,
420                                 VecSetValuesBlocked_MPI,
421                                 VecDestroy_MPI,
422                                 VecView_MPI,
423                                 VecPlaceArray_MPI,
424                                 VecReplaceArray_Seq,
425                                 VecDot_Seq,
426                                 VecTDot_Seq,
427                                 VecNorm_Seq,
428                                 VecMDot_Seq,
429                                 VecMTDot_Seq,
430                                 VecLoad_Default,
431                                 VecReciprocal_Default,
432                                 VecConjugate_Seq,
433                                 0,
434                                 0,
435                                 VecResetArray_MPI,
436                                 VecSetFromOptions_MPI,
437                                 VecMaxPointwiseDivide_Seq,
438                                 VecPointwiseMax_Seq,
439                                 VecPointwiseMaxAbs_Seq,
440                                 VecPointwiseMin_Seq,
441                                 VecGetValues_MPI,
442                                 0,
443                                 0,
444                                 0,
445                                 0,
446                                 0,
447                                 0,
448                                 VecStrideGather_Default,
449                                 VecStrideScatter_Default,
450                                 0,
451                                 0,
452                                 0,
453                                 0,
454                                 0,
455                                 VecStrideSubSetGather_Default,
456                                 VecStrideSubSetScatter_Default,
457                                 0,
458                                 0
459 };
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "VecCreate_MPI_Private"
463 /*
464     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
465     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
466     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
467 
468     If alloc is true and array is NULL then this routine allocates the space, otherwise
469     no space is allocated.
470 */
471 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
472 {
473   Vec_MPI        *s;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
478   v->data        = (void*)s;
479   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
480   s->nghost      = nghost;
481   v->petscnative = PETSC_TRUE;
482 
483   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
484 
485   s->array           = (PetscScalar*)array;
486   s->array_allocated = 0;
487   if (alloc && !array) {
488     PetscInt n = v->map->n+nghost;
489     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
490     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
491     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
492     s->array_allocated = s->array;
493   }
494 
495   /* By default parallel vectors do not have local representation */
496   s->localrep    = 0;
497   s->localupdate = 0;
498 
499   v->stash.insertmode = NOT_SET_VALUES;
500   v->bstash.insertmode = NOT_SET_VALUES;
501   /* create the stashes. The block-size for bstash is set later when
502      VecSetValuesBlocked is called.
503   */
504   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
505   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
506 
507 #if defined(PETSC_HAVE_MATLAB_ENGINE)
508   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
509   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
510 #endif
511   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 /*MC
516    VECMPI - VECMPI = "mpi" - The basic parallel vector
517 
518    Options Database Keys:
519 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
520 
521   Level: beginner
522 
523 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
524 M*/
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "VecCreate_MPI"
528 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
529 {
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537 /*MC
538    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
539 
540    Options Database Keys:
541 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
542 
543   Level: beginner
544 
545 .seealso: VecCreateSeq(), VecCreateMPI()
546 M*/
547 
548 #undef __FUNCT__
549 #define __FUNCT__ "VecCreate_Standard"
550 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
551 {
552   PetscErrorCode ierr;
553   PetscMPIInt    size;
554 
555   PetscFunctionBegin;
556   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
557   if (size == 1) {
558     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
559   } else {
560     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "VecCreateMPIWithArray"
567 /*@C
568    VecCreateMPIWithArray - Creates a parallel, array-style vector,
569    where the user provides the array space to store the vector values.
570 
571    Collective on MPI_Comm
572 
573    Input Parameters:
574 +  comm  - the MPI communicator to use
575 .  bs    - block size, same meaning as VecSetBlockSize()
576 .  n     - local vector length, cannot be PETSC_DECIDE
577 .  N     - global vector length (or PETSC_DECIDE to have calculated)
578 -  array - the user provided array to store the vector values
579 
580    Output Parameter:
581 .  vv - the vector
582 
583    Notes:
584    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
585    same type as an existing vector.
586 
587    If the user-provided array is NULL, then VecPlaceArray() can be used
588    at a later stage to SET the array for storing the vector values.
589 
590    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
591    The user should not free the array until the vector is destroyed.
592 
593    Level: intermediate
594 
595    Concepts: vectors^creating with array
596 
597 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
598           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
599 
600 @*/
601 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
602 {
603   PetscErrorCode ierr;
604 
605   PetscFunctionBegin;
606   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
607   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
608   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
609   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
610   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
611   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "VecCreateGhostWithArray"
617 /*@C
618    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
619    the caller allocates the array space.
620 
621    Collective on MPI_Comm
622 
623    Input Parameters:
624 +  comm - the MPI communicator to use
625 .  n - local vector length
626 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
627 .  nghost - number of local ghost points
628 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
629 -  array - the space to store the vector values (as long as n + nghost)
630 
631    Output Parameter:
632 .  vv - the global vector representation (without ghost points as part of vector)
633 
634    Notes:
635    Use VecGhostGetLocalForm() to access the local, ghosted representation
636    of the vector.
637 
638    This also automatically sets the ISLocalToGlobalMapping() for this vector.
639 
640    Level: advanced
641 
642    Concepts: vectors^creating with array
643    Concepts: vectors^ghosted
644 
645 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
646           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
647           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
648 
649 @*/
650 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
651 {
652   PetscErrorCode         ierr;
653   Vec_MPI                *w;
654   PetscScalar            *larray;
655   IS                     from,to;
656   ISLocalToGlobalMapping ltog;
657   PetscInt               rstart,i,*indices;
658 
659   PetscFunctionBegin;
660   *vv = 0;
661 
662   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
663   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
664   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
665   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
666   /* Create global representation */
667   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
668   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
669   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
670   w    = (Vec_MPI*)(*vv)->data;
671   /* Create local representation */
672   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
673   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
674   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
675   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
676 
677   /*
678        Create scatter context for scattering (updating) ghost values
679   */
680   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
681   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
682   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
683   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
684   ierr = ISDestroy(&to);CHKERRQ(ierr);
685   ierr = ISDestroy(&from);CHKERRQ(ierr);
686 
687   /* set local to global mapping for ghosted vector */
688   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
689   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
690   for (i=0; i<n; i++) {
691     indices[i] = rstart + i;
692   }
693   for (i=0; i<nghost; i++) {
694     indices[n+i] = ghosts[i];
695   }
696   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
697   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
698   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "VecCreateGhost"
704 /*@
705    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
706 
707    Collective on MPI_Comm
708 
709    Input Parameters:
710 +  comm - the MPI communicator to use
711 .  n - local vector length
712 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
713 .  nghost - number of local ghost points
714 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
715 
716    Output Parameter:
717 .  vv - the global vector representation (without ghost points as part of vector)
718 
719    Notes:
720    Use VecGhostGetLocalForm() to access the local, ghosted representation
721    of the vector.
722 
723    This also automatically sets the ISLocalToGlobalMapping() for this vector.
724 
725    Level: advanced
726 
727    Concepts: vectors^ghosted
728 
729 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
730           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
731           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
732           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
733 
734 @*/
735 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
736 {
737   PetscErrorCode ierr;
738 
739   PetscFunctionBegin;
740   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "VecMPISetGhost"
746 /*@
747    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
748 
749    Collective on Vec
750 
751    Input Parameters:
752 +  vv - the MPI vector
753 .  nghost - number of local ghost points
754 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
755 
756 
757    Notes:
758    Use VecGhostGetLocalForm() to access the local, ghosted representation
759    of the vector.
760 
761    This also automatically sets the ISLocalToGlobalMapping() for this vector.
762 
763    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
764 
765    Level: advanced
766 
767    Concepts: vectors^ghosted
768 
769 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
770           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
771           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
772           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
773 
774 @*/
775 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
776 {
777   PetscErrorCode ierr;
778   PetscBool      flg;
779 
780   PetscFunctionBegin;
781   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
782   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
783   if (flg) {
784     PetscInt               n,N;
785     Vec_MPI                *w;
786     PetscScalar            *larray;
787     IS                     from,to;
788     ISLocalToGlobalMapping ltog;
789     PetscInt               rstart,i,*indices;
790     MPI_Comm               comm;
791 
792     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
793     n    = vv->map->n;
794     N    = vv->map->N;
795     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
796     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
797     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
798     w    = (Vec_MPI*)(vv)->data;
799     /* Create local representation */
800     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
801     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
802     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
803     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
804 
805     /*
806      Create scatter context for scattering (updating) ghost values
807      */
808     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
809     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
810     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
811     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
812     ierr = ISDestroy(&to);CHKERRQ(ierr);
813     ierr = ISDestroy(&from);CHKERRQ(ierr);
814 
815     /* set local to global mapping for ghosted vector */
816     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
817     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
818 
819     for (i=0; i<n; i++)      indices[i]   = rstart + i;
820     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
821 
822     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
823     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
824     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
825   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
826   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
827   PetscFunctionReturn(0);
828 }
829 
830 
831 /* ------------------------------------------------------------------------------------------*/
832 #undef __FUNCT__
833 #define __FUNCT__ "VecCreateGhostBlockWithArray"
834 /*@C
835    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
836    the caller allocates the array space. Indices in the ghost region are based on blocks.
837 
838    Collective on MPI_Comm
839 
840    Input Parameters:
841 +  comm - the MPI communicator to use
842 .  bs - block size
843 .  n - local vector length
844 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
845 .  nghost - number of local ghost blocks
846 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
847 -  array - the space to store the vector values (as long as n + nghost*bs)
848 
849    Output Parameter:
850 .  vv - the global vector representation (without ghost points as part of vector)
851 
852    Notes:
853    Use VecGhostGetLocalForm() to access the local, ghosted representation
854    of the vector.
855 
856    n is the local vector size (total local size not the number of blocks) while nghost
857    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
858    portion is bs*nghost
859 
860    Level: advanced
861 
862    Concepts: vectors^creating ghosted
863    Concepts: vectors^creating with array
864 
865 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
866           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
867           VecCreateGhostWithArray(), VecCreateGhostBlock()
868 
869 @*/
870 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
871 {
872   PetscErrorCode         ierr;
873   Vec_MPI                *w;
874   PetscScalar            *larray;
875   IS                     from,to;
876   ISLocalToGlobalMapping ltog;
877   PetscInt               rstart,i,nb,*indices;
878 
879   PetscFunctionBegin;
880   *vv = 0;
881 
882   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
883   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
884   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
885   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
886   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
887   /* Create global representation */
888   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
889   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
890   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
891   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
892   w    = (Vec_MPI*)(*vv)->data;
893   /* Create local representation */
894   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
895   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
896   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
897   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
898 
899   /*
900        Create scatter context for scattering (updating) ghost values
901   */
902   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
903   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
904   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
905   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
906   ierr = ISDestroy(&to);CHKERRQ(ierr);
907   ierr = ISDestroy(&from);CHKERRQ(ierr);
908 
909   /* set local to global mapping for ghosted vector */
910   nb   = n/bs;
911   ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
912   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
913 
914   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
915   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
916 
917   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
918   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
919   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "VecCreateGhostBlock"
925 /*@
926    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
927         The indicing of the ghost points is done with blocks.
928 
929    Collective on MPI_Comm
930 
931    Input Parameters:
932 +  comm - the MPI communicator to use
933 .  bs - the block size
934 .  n - local vector length
935 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
936 .  nghost - number of local ghost blocks
937 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
938 
939    Output Parameter:
940 .  vv - the global vector representation (without ghost points as part of vector)
941 
942    Notes:
943    Use VecGhostGetLocalForm() to access the local, ghosted representation
944    of the vector.
945 
946    n is the local vector size (total local size not the number of blocks) while nghost
947    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
948    portion is bs*nghost
949 
950    Level: advanced
951 
952    Concepts: vectors^ghosted
953 
954 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
955           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
956           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
957 
958 @*/
959 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967