xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision c90b701ee8da86c6fb76d5aea9fecd2a44e0e5b8)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscoptions.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
50 
51 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
52 {
53   PetscErrorCode ierr;
54   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
55   PetscScalar    *array;
56 
57   PetscFunctionBegin;
58   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
59   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
60 
61   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
62   vw   = (Vec_MPI*)(*v)->data;
63   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
64 
65   /* save local representation of the parallel vector (and scatter) if it exists */
66   if (w->localrep) {
67     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
68     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
69     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
70     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
71     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
72 
73     vw->localupdate = w->localupdate;
74     if (vw->localupdate) {
75       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
76     }
77   }
78 
79   /* New vector should inherit stashing property of parent */
80   (*v)->stash.donotstash   = win->stash.donotstash;
81   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
82 
83   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
84   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
85 
86   (*v)->map->bs   = PetscAbs(win->map->bs);
87   (*v)->bstash.bs = win->bstash.bs;
88   PetscFunctionReturn(0);
89 }
90 
91 
92 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
93 {
94   Vec_MPI        *v = (Vec_MPI*)V->data;
95   PetscFunctionBegin;
96   switch (op) {
97   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
98     break;
99   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
100     break;
101   case VEC_SUBSET_OFF_PROC_ENTRIES: v->assembly_subset = flag;
102     break;
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 static PetscErrorCode VecResetArray_MPI(Vec vin)
109 {
110   Vec_MPI        *v = (Vec_MPI*)vin->data;
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   v->array         = v->unplacedarray;
115   v->unplacedarray = 0;
116   if (v->localrep) {
117     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
118   }
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
123 {
124   Vec X = (Vec)ctx;
125   Vec_MPI *x = (Vec_MPI*)X->data;
126   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
127   PetscInt bs = X->map->bs;
128   PetscErrorCode ierr;
129 
130   PetscFunctionBegin;
131   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
132    * we have to send them this time if we sent them before because the receiver is expecting them. */
133   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
134     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
135     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
136   }
137   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
138     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
139     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
140   }
141   PetscFunctionReturn(0);
142 }
143 
144 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
145 {
146   Vec X = (Vec)ctx;
147   Vec_MPI *x = (Vec_MPI*)X->data;
148   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
149   PetscErrorCode ierr;
150   PetscInt bs = X->map->bs;
151   VecAssemblyFrame *frame;
152 
153   PetscFunctionBegin;
154   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
155 
156   if (hdr->count) {
157     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
158     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
159     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
160     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
161     frame->pendings = 2;
162   } else {
163     frame->ints = NULL;
164     frame->scalars = NULL;
165     frame->pendings = 0;
166   }
167 
168   if (hdr->bcount) {
169     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
170     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
171     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
172     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
173     frame->pendingb = 2;
174   } else {
175     frame->intb = NULL;
176     frame->scalarb = NULL;
177     frame->pendingb = 0;
178   }
179   PetscFunctionReturn(0);
180 }
181 
182 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
183 {
184   Vec_MPI        *x = (Vec_MPI*)X->data;
185   PetscErrorCode ierr;
186   MPI_Comm       comm;
187   PetscInt       i,j,jb,bs;
188 
189   PetscFunctionBegin;
190   if (X->stash.donotstash) PetscFunctionReturn(0);
191 
192   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
193   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
194 #if defined(PETSC_USE_DEBUG)
195   {
196     InsertMode addv;
197     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
198     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
199   }
200 #endif
201   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
202 
203   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
204   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
205 
206   if (!x->sendranks) {
207     PetscMPIInt nowners,bnowners,*owners,*bowners;
208     PetscInt ntmp;
209     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
210     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
211     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
212     x->nsendranks = ntmp;
213     ierr = PetscFree(owners);CHKERRQ(ierr);
214     ierr = PetscFree(bowners);CHKERRQ(ierr);
215     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
216     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
217   }
218   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
219     PetscMPIInt rank = x->sendranks[i];
220     x->sendhdr[i].insertmode = X->stash.insertmode;
221     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
222      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
223      * there is nothing new to send, so that size-zero messages get sent instead. */
224     x->sendhdr[i].count = 0;
225     if (X->stash.n) {
226       x->sendptrs[i].ints    = &X->stash.idx[j];
227       x->sendptrs[i].scalars = &X->stash.array[j];
228       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
229     }
230     x->sendhdr[i].bcount = 0;
231     if (X->bstash.n) {
232       x->sendptrs[i].intb    = &X->bstash.idx[jb];
233       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
234       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
235     }
236   }
237 
238   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
239   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
240   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
241   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
242     PetscMPIInt tag[4];
243     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
244     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
245     for (i=0; i<x->nsendranks; i++) {
246       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
247     }
248     for (i=0; i<x->nrecvranks; i++) {
249       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
250     }
251     x->use_status = PETSC_TRUE;
252   } else {                      /* First time */
253     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
254     x->use_status = PETSC_FALSE;
255   }
256 
257   {
258     PetscInt nstash,reallocs;
259     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
260     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
261     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
262     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
268 {
269   Vec_MPI *x = (Vec_MPI*)X->data;
270   PetscInt bs = X->map->bs;
271   PetscMPIInt npending,*some_indices,r;
272   MPI_Status  *some_statuses;
273   PetscScalar *xarray;
274   PetscErrorCode ierr;
275   VecAssemblyFrame *frame;
276 
277   PetscFunctionBegin;
278   if (X->stash.donotstash) {
279     X->stash.insertmode = NOT_SET_VALUES;
280     X->bstash.insertmode = NOT_SET_VALUES;
281     PetscFunctionReturn(0);
282   }
283 
284   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
285   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
286   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
287   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
288   while (npending>0) {
289     PetscMPIInt ndone,ii;
290     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
291      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
292      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
293      * subsequent assembly can set a proper subset of the values. */
294     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
295     for (ii=0; ii<ndone; ii++) {
296       PetscInt i = some_indices[ii]/4,j,k;
297       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
298       PetscInt *recvint;
299       PetscScalar *recvscalar;
300       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
301       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
302       npending--;
303       if (!blockmsg) { /* Scalar stash */
304         PetscMPIInt count;
305         if (--frame[i].pendings > 0) continue;
306         if (x->use_status) {
307           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
308         } else count = x->recvhdr[i].count;
309         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
310           PetscInt loc = *recvint - X->map->rstart;
311           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
312           switch (imode) {
313           case ADD_VALUES:
314             xarray[loc] += *recvscalar++;
315             break;
316           case INSERT_VALUES:
317             xarray[loc] = *recvscalar++;
318             break;
319           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
320           }
321         }
322       } else {                  /* Block stash */
323         PetscMPIInt count;
324         if (--frame[i].pendingb > 0) continue;
325         if (x->use_status) {
326           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
327           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
328         } else count = x->recvhdr[i].bcount;
329         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
330           PetscInt loc = (*recvint)*bs - X->map->rstart;
331           switch (imode) {
332           case ADD_VALUES:
333             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
334             break;
335           case INSERT_VALUES:
336             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
337             break;
338           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
339           }
340         }
341       }
342     }
343   }
344   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
345   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
346   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
347   if (x->assembly_subset) {
348     void *dummy;                /* reset segbuffers */
349     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
350     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
351   } else {
352     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
353   }
354 
355   X->stash.insertmode = NOT_SET_VALUES;
356   X->bstash.insertmode = NOT_SET_VALUES;
357   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
358   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 PetscErrorCode VecAssemblyReset_MPI(Vec X)
363 {
364   Vec_MPI *x = (Vec_MPI*)X->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBegin;
368   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
369   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
370   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
371   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
372   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
373   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
374   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
375   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
376   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
377   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 
381 
382 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
383 {
384   PetscErrorCode ierr;
385   PetscBool      flg = PETSC_FALSE,set;
386 
387   PetscFunctionBegin;
388   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
389   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
390   if (set) {
391     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
392     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
393   }
394   ierr = PetscOptionsTail();CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 
398 
399 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
400                                 VecDuplicateVecs_Default,
401                                 VecDestroyVecs_Default,
402                                 VecDot_MPI,
403                                 VecMDot_MPI,
404                                 VecNorm_MPI,
405                                 VecTDot_MPI,
406                                 VecMTDot_MPI,
407                                 VecScale_Seq,
408                                 VecCopy_Seq, /* 10 */
409                                 VecSet_Seq,
410                                 VecSwap_Seq,
411                                 VecAXPY_Seq,
412                                 VecAXPBY_Seq,
413                                 VecMAXPY_Seq,
414                                 VecAYPX_Seq,
415                                 VecWAXPY_Seq,
416                                 VecAXPBYPCZ_Seq,
417                                 VecPointwiseMult_Seq,
418                                 VecPointwiseDivide_Seq,
419                                 VecSetValues_MPI, /* 20 */
420                                 VecAssemblyBegin_MPI,
421                                 VecAssemblyEnd_MPI,
422                                 0,
423                                 VecGetSize_MPI,
424                                 VecGetSize_Seq,
425                                 0,
426                                 VecMax_MPI,
427                                 VecMin_MPI,
428                                 VecSetRandom_Seq,
429                                 VecSetOption_MPI,
430                                 VecSetValuesBlocked_MPI,
431                                 VecDestroy_MPI,
432                                 VecView_MPI,
433                                 VecPlaceArray_MPI,
434                                 VecReplaceArray_Seq,
435                                 VecDot_Seq,
436                                 VecTDot_Seq,
437                                 VecNorm_Seq,
438                                 VecMDot_Seq,
439                                 VecMTDot_Seq,
440                                 VecLoad_Default,
441                                 VecReciprocal_Default,
442                                 VecConjugate_Seq,
443                                 0,
444                                 0,
445                                 VecResetArray_MPI,
446                                 VecSetFromOptions_MPI,/*set from options */
447                                 VecMaxPointwiseDivide_Seq,
448                                 VecPointwiseMax_Seq,
449                                 VecPointwiseMaxAbs_Seq,
450                                 VecPointwiseMin_Seq,
451                                 VecGetValues_MPI,
452                                 0,
453                                 0,
454                                 0,
455                                 0,
456                                 0,
457                                 0,
458                                 VecStrideGather_Default,
459                                 VecStrideScatter_Default,
460                                 0,
461                                 0,
462                                 0,
463                                 0,
464                                 0,
465                                 VecStrideSubSetGather_Default,
466                                 VecStrideSubSetScatter_Default,
467                                 0,
468                                 0
469 };
470 
471 /*
472     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
473     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
474     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
475 
476     If alloc is true and array is NULL then this routine allocates the space, otherwise
477     no space is allocated.
478 */
479 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
480 {
481   Vec_MPI        *s;
482   PetscErrorCode ierr;
483 
484   PetscFunctionBegin;
485   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
486   v->data        = (void*)s;
487   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
488   s->nghost      = nghost;
489   v->petscnative = PETSC_TRUE;
490 
491   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
492 
493   s->array           = (PetscScalar*)array;
494   s->array_allocated = 0;
495   if (alloc && !array) {
496     PetscInt n = v->map->n+nghost;
497     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
498     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
499     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
500     s->array_allocated = s->array;
501   }
502 
503   /* By default parallel vectors do not have local representation */
504   s->localrep    = 0;
505   s->localupdate = 0;
506 
507   v->stash.insertmode = NOT_SET_VALUES;
508   v->bstash.insertmode = NOT_SET_VALUES;
509   /* create the stashes. The block-size for bstash is set later when
510      VecSetValuesBlocked is called.
511   */
512   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
513   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
514 
515 #if defined(PETSC_HAVE_MATLAB_ENGINE)
516   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
517   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
518 #endif
519   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 /*MC
524    VECMPI - VECMPI = "mpi" - The basic parallel vector
525 
526    Options Database Keys:
527 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
528 
529   Level: beginner
530 
531 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
532 M*/
533 
534 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
535 {
536   PetscErrorCode ierr;
537 
538   PetscFunctionBegin;
539   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
540   PetscFunctionReturn(0);
541 }
542 
543 /*MC
544    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
545 
546    Options Database Keys:
547 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
548 
549   Level: beginner
550 
551 .seealso: VecCreateSeq(), VecCreateMPI()
552 M*/
553 
554 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
555 {
556   PetscErrorCode ierr;
557   PetscMPIInt    size;
558 
559   PetscFunctionBegin;
560   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
561   if (size == 1) {
562     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
563   } else {
564     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
565   }
566   PetscFunctionReturn(0);
567 }
568 
569 /*@C
570    VecCreateMPIWithArray - Creates a parallel, array-style vector,
571    where the user provides the array space to store the vector values.
572 
573    Collective on MPI_Comm
574 
575    Input Parameters:
576 +  comm  - the MPI communicator to use
577 .  bs    - block size, same meaning as VecSetBlockSize()
578 .  n     - local vector length, cannot be PETSC_DECIDE
579 .  N     - global vector length (or PETSC_DECIDE to have calculated)
580 -  array - the user provided array to store the vector values
581 
582    Output Parameter:
583 .  vv - the vector
584 
585    Notes:
586    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
587    same type as an existing vector.
588 
589    If the user-provided array is NULL, then VecPlaceArray() can be used
590    at a later stage to SET the array for storing the vector values.
591 
592    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
593    The user should not free the array until the vector is destroyed.
594 
595    Level: intermediate
596 
597    Concepts: vectors^creating with array
598 
599 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
600           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
601 
602 @*/
603 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
604 {
605   PetscErrorCode ierr;
606 
607   PetscFunctionBegin;
608   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
609   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
610   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
611   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
612   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
613   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 /*@C
618    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
619    the caller allocates the array space.
620 
621    Collective on MPI_Comm
622 
623    Input Parameters:
624 +  comm - the MPI communicator to use
625 .  n - local vector length
626 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
627 .  nghost - number of local ghost points
628 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
629 -  array - the space to store the vector values (as long as n + nghost)
630 
631    Output Parameter:
632 .  vv - the global vector representation (without ghost points as part of vector)
633 
634    Notes:
635    Use VecGhostGetLocalForm() to access the local, ghosted representation
636    of the vector.
637 
638    This also automatically sets the ISLocalToGlobalMapping() for this vector.
639 
640    Level: advanced
641 
642    Concepts: vectors^creating with array
643    Concepts: vectors^ghosted
644 
645 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
646           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
647           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
648 
649 @*/
650 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
651 {
652   PetscErrorCode         ierr;
653   Vec_MPI                *w;
654   PetscScalar            *larray;
655   IS                     from,to;
656   ISLocalToGlobalMapping ltog;
657   PetscInt               rstart,i,*indices;
658 
659   PetscFunctionBegin;
660   *vv = 0;
661 
662   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
663   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
664   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
665   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
666   /* Create global representation */
667   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
668   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
669   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
670   w    = (Vec_MPI*)(*vv)->data;
671   /* Create local representation */
672   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
673   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
674   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
675   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
676 
677   /*
678        Create scatter context for scattering (updating) ghost values
679   */
680   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
681   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
682   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
683   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
684   ierr = ISDestroy(&to);CHKERRQ(ierr);
685   ierr = ISDestroy(&from);CHKERRQ(ierr);
686 
687   /* set local to global mapping for ghosted vector */
688   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
689   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
690   for (i=0; i<n; i++) {
691     indices[i] = rstart + i;
692   }
693   for (i=0; i<nghost; i++) {
694     indices[n+i] = ghosts[i];
695   }
696   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
697   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
698   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 /*@
703    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
704 
705    Collective on MPI_Comm
706 
707    Input Parameters:
708 +  comm - the MPI communicator to use
709 .  n - local vector length
710 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
711 .  nghost - number of local ghost points
712 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
713 
714    Output Parameter:
715 .  vv - the global vector representation (without ghost points as part of vector)
716 
717    Notes:
718    Use VecGhostGetLocalForm() to access the local, ghosted representation
719    of the vector.
720 
721    This also automatically sets the ISLocalToGlobalMapping() for this vector.
722 
723    Level: advanced
724 
725    Concepts: vectors^ghosted
726 
727 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
728           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
729           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
730           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
731 
732 @*/
733 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
734 {
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
739   PetscFunctionReturn(0);
740 }
741 
742 /*@
743    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
744 
745    Collective on Vec
746 
747    Input Parameters:
748 +  vv - the MPI vector
749 .  nghost - number of local ghost points
750 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
751 
752 
753    Notes:
754    Use VecGhostGetLocalForm() to access the local, ghosted representation
755    of the vector.
756 
757    This also automatically sets the ISLocalToGlobalMapping() for this vector.
758 
759    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
760 
761    Level: advanced
762 
763    Concepts: vectors^ghosted
764 
765 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
766           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
767           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
768           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
769 
770 @*/
771 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
772 {
773   PetscErrorCode ierr;
774   PetscBool      flg;
775 
776   PetscFunctionBegin;
777   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
778   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
779   if (flg) {
780     PetscInt               n,N;
781     Vec_MPI                *w;
782     PetscScalar            *larray;
783     IS                     from,to;
784     ISLocalToGlobalMapping ltog;
785     PetscInt               rstart,i,*indices;
786     MPI_Comm               comm;
787 
788     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
789     n    = vv->map->n;
790     N    = vv->map->N;
791     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
792     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
793     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
794     w    = (Vec_MPI*)(vv)->data;
795     /* Create local representation */
796     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
797     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
798     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
799     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
800 
801     /*
802      Create scatter context for scattering (updating) ghost values
803      */
804     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
805     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
806     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
807     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
808     ierr = ISDestroy(&to);CHKERRQ(ierr);
809     ierr = ISDestroy(&from);CHKERRQ(ierr);
810 
811     /* set local to global mapping for ghosted vector */
812     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
813     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
814 
815     for (i=0; i<n; i++)      indices[i]   = rstart + i;
816     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
817 
818     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
819     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
820     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
821   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
822   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
823   PetscFunctionReturn(0);
824 }
825 
826 
827 /* ------------------------------------------------------------------------------------------*/
828 /*@C
829    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
830    the caller allocates the array space. Indices in the ghost region are based on blocks.
831 
832    Collective on MPI_Comm
833 
834    Input Parameters:
835 +  comm - the MPI communicator to use
836 .  bs - block size
837 .  n - local vector length
838 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
839 .  nghost - number of local ghost blocks
840 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
841 -  array - the space to store the vector values (as long as n + nghost*bs)
842 
843    Output Parameter:
844 .  vv - the global vector representation (without ghost points as part of vector)
845 
846    Notes:
847    Use VecGhostGetLocalForm() to access the local, ghosted representation
848    of the vector.
849 
850    n is the local vector size (total local size not the number of blocks) while nghost
851    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
852    portion is bs*nghost
853 
854    Level: advanced
855 
856    Concepts: vectors^creating ghosted
857    Concepts: vectors^creating with array
858 
859 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
860           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
861           VecCreateGhostWithArray(), VecCreateGhostBlock()
862 
863 @*/
864 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
865 {
866   PetscErrorCode         ierr;
867   Vec_MPI                *w;
868   PetscScalar            *larray;
869   IS                     from,to;
870   ISLocalToGlobalMapping ltog;
871   PetscInt               rstart,i,nb,*indices;
872 
873   PetscFunctionBegin;
874   *vv = 0;
875 
876   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
877   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
878   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
879   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
880   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
881   /* Create global representation */
882   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
883   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
884   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
885   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
886   w    = (Vec_MPI*)(*vv)->data;
887   /* Create local representation */
888   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
889   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
890   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
891   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
892 
893   /*
894        Create scatter context for scattering (updating) ghost values
895   */
896   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
897   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
898   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
899   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
900   ierr = ISDestroy(&to);CHKERRQ(ierr);
901   ierr = ISDestroy(&from);CHKERRQ(ierr);
902 
903   /* set local to global mapping for ghosted vector */
904   nb   = n/bs;
905   ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
906   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
907 
908   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
909   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
910 
911   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
912   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
913   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 /*@
918    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
919         The indicing of the ghost points is done with blocks.
920 
921    Collective on MPI_Comm
922 
923    Input Parameters:
924 +  comm - the MPI communicator to use
925 .  bs - the block size
926 .  n - local vector length
927 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
928 .  nghost - number of local ghost blocks
929 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
930 
931    Output Parameter:
932 .  vv - the global vector representation (without ghost points as part of vector)
933 
934    Notes:
935    Use VecGhostGetLocalForm() to access the local, ghosted representation
936    of the vector.
937 
938    n is the local vector size (total local size not the number of blocks) while nghost
939    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
940    portion is bs*nghost
941 
942    Level: advanced
943 
944    Concepts: vectors^ghosted
945 
946 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
947           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
948           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
949 
950 @*/
951 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
952 {
953   PetscErrorCode ierr;
954 
955   PetscFunctionBegin;
956   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959