xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 40df0d723ea94fee6b28ca5943ca31376c9d8193)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscFunctionBegin;
94   switch (op) {
95   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
96     break;
97   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
98     break;
99   case VEC_SUBSET_OFF_PROC_ENTRIES: v->assembly_subset = flag;
100     break;
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 
106 static PetscErrorCode VecResetArray_MPI(Vec vin)
107 {
108   Vec_MPI        *v = (Vec_MPI*)vin->data;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = 0;
114   if (v->localrep) {
115     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126   PetscErrorCode ierr;
127 
128   PetscFunctionBegin;
129   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
130    * we have to send them this time if we sent them before because the receiver is expecting them. */
131   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
132     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
133     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
134   }
135   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
136     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
137     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
138   }
139   PetscFunctionReturn(0);
140 }
141 
142 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
143 {
144   Vec X = (Vec)ctx;
145   Vec_MPI *x = (Vec_MPI*)X->data;
146   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
147   PetscErrorCode ierr;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
153 
154   if (hdr->count) {
155     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
156     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
157     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
158     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
169     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
170     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   PetscErrorCode ierr;
184   MPI_Comm       comm;
185   PetscInt       i,j,jb,bs;
186 
187   PetscFunctionBegin;
188   if (X->stash.donotstash) PetscFunctionReturn(0);
189 
190   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
191   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
192 #if defined(PETSC_USE_DEBUG)
193   {
194     InsertMode addv;
195     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
196     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
197   }
198 #endif
199   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
200 
201   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
202   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
203 
204   if (!x->sendranks) {
205     PetscMPIInt nowners,bnowners,*owners,*bowners;
206     PetscInt ntmp;
207     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
208     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
209     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
210     x->nsendranks = ntmp;
211     ierr = PetscFree(owners);CHKERRQ(ierr);
212     ierr = PetscFree(bowners);CHKERRQ(ierr);
213     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
214     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
215   }
216   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
217     PetscMPIInt rank = x->sendranks[i];
218     x->sendhdr[i].insertmode = X->stash.insertmode;
219     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
220      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
221      * there is nothing new to send, so that size-zero messages get sent instead. */
222     x->sendhdr[i].count = 0;
223     if (X->stash.n) {
224       x->sendptrs[i].ints    = &X->stash.idx[j];
225       x->sendptrs[i].scalars = &X->stash.array[j];
226       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
227     }
228     x->sendhdr[i].bcount = 0;
229     if (X->bstash.n) {
230       x->sendptrs[i].intb    = &X->bstash.idx[jb];
231       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
232       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
233     }
234   }
235 
236   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
237   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
238   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
239   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
240     PetscMPIInt tag[4];
241     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
242     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
243     for (i=0; i<x->nsendranks; i++) {
244       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
245     }
246     for (i=0; i<x->nrecvranks; i++) {
247       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
248     }
249     x->use_status = PETSC_TRUE;
250   } else {                      /* First time */
251     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
252     x->use_status = PETSC_FALSE;
253   }
254 
255   {
256     PetscInt nstash,reallocs;
257     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
258     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
259     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
260     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
266 {
267   Vec_MPI *x = (Vec_MPI*)X->data;
268   PetscInt bs = X->map->bs;
269   PetscMPIInt npending,*some_indices,r;
270   MPI_Status  *some_statuses;
271   PetscScalar *xarray;
272   PetscErrorCode ierr;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
283   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
284   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
285   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
286   while (npending>0) {
287     PetscMPIInt ndone=0,ii;
288     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
289      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
290      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
291      * subsequent assembly can set a proper subset of the values. */
292     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
293     for (ii=0; ii<ndone; ii++) {
294       PetscInt i = some_indices[ii]/4,j,k;
295       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
296       PetscInt *recvint;
297       PetscScalar *recvscalar;
298       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
299       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
300       npending--;
301       if (!blockmsg) { /* Scalar stash */
302         PetscMPIInt count;
303         if (--frame[i].pendings > 0) continue;
304         if (x->use_status) {
305           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
306         } else count = x->recvhdr[i].count;
307         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
308           PetscInt loc = *recvint - X->map->rstart;
309           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
310           switch (imode) {
311           case ADD_VALUES:
312             xarray[loc] += *recvscalar++;
313             break;
314           case INSERT_VALUES:
315             xarray[loc] = *recvscalar++;
316             break;
317           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
318           }
319         }
320       } else {                  /* Block stash */
321         PetscMPIInt count;
322         if (--frame[i].pendingb > 0) continue;
323         if (x->use_status) {
324           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
325           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
326         } else count = x->recvhdr[i].bcount;
327         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
328           PetscInt loc = (*recvint)*bs - X->map->rstart;
329           switch (imode) {
330           case ADD_VALUES:
331             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
332             break;
333           case INSERT_VALUES:
334             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
335             break;
336           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
337           }
338         }
339       }
340     }
341   }
342   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
343   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
344   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
345   if (x->assembly_subset) {
346     void *dummy;                /* reset segbuffers */
347     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
348     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
349   } else {
350     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
351   }
352 
353   X->stash.insertmode = NOT_SET_VALUES;
354   X->bstash.insertmode = NOT_SET_VALUES;
355   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
356   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
357   PetscFunctionReturn(0);
358 }
359 
360 PetscErrorCode VecAssemblyReset_MPI(Vec X)
361 {
362   Vec_MPI *x = (Vec_MPI*)X->data;
363   PetscErrorCode ierr;
364 
365   PetscFunctionBegin;
366   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
367   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
368   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
369   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
370   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
371   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
372   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
373   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
374   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
375   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 
379 
380 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
381 {
382 #if !defined(PETSC_HAVE_MPIUNI)
383   PetscErrorCode ierr;
384   PetscBool      flg = PETSC_FALSE,set;
385 
386   PetscFunctionBegin;
387   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
388   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
389   if (set) {
390     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
391     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
392   }
393   ierr = PetscOptionsTail();CHKERRQ(ierr);
394 #else
395   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
396   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
397 #endif
398   PetscFunctionReturn(0);
399 }
400 
401 
402 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
403                                 VecDuplicateVecs_Default,
404                                 VecDestroyVecs_Default,
405                                 VecDot_MPI,
406                                 VecMDot_MPI,
407                                 VecNorm_MPI,
408                                 VecTDot_MPI,
409                                 VecMTDot_MPI,
410                                 VecScale_Seq,
411                                 VecCopy_Seq, /* 10 */
412                                 VecSet_Seq,
413                                 VecSwap_Seq,
414                                 VecAXPY_Seq,
415                                 VecAXPBY_Seq,
416                                 VecMAXPY_Seq,
417                                 VecAYPX_Seq,
418                                 VecWAXPY_Seq,
419                                 VecAXPBYPCZ_Seq,
420                                 VecPointwiseMult_Seq,
421                                 VecPointwiseDivide_Seq,
422                                 VecSetValues_MPI, /* 20 */
423                                 VecAssemblyBegin_MPI_BTS,
424                                 VecAssemblyEnd_MPI_BTS,
425                                 0,
426                                 VecGetSize_MPI,
427                                 VecGetSize_Seq,
428                                 0,
429                                 VecMax_MPI,
430                                 VecMin_MPI,
431                                 VecSetRandom_Seq,
432                                 VecSetOption_MPI,
433                                 VecSetValuesBlocked_MPI,
434                                 VecDestroy_MPI,
435                                 VecView_MPI,
436                                 VecPlaceArray_MPI,
437                                 VecReplaceArray_Seq,
438                                 VecDot_Seq,
439                                 VecTDot_Seq,
440                                 VecNorm_Seq,
441                                 VecMDot_Seq,
442                                 VecMTDot_Seq,
443                                 VecLoad_Default,
444                                 VecReciprocal_Default,
445                                 VecConjugate_Seq,
446                                 0,
447                                 0,
448                                 VecResetArray_MPI,
449                                 VecSetFromOptions_MPI,/*set from options */
450                                 VecMaxPointwiseDivide_Seq,
451                                 VecPointwiseMax_Seq,
452                                 VecPointwiseMaxAbs_Seq,
453                                 VecPointwiseMin_Seq,
454                                 VecGetValues_MPI,
455                                 0,
456                                 0,
457                                 0,
458                                 0,
459                                 0,
460                                 0,
461                                 VecStrideGather_Default,
462                                 VecStrideScatter_Default,
463                                 0,
464                                 0,
465                                 0,
466                                 0,
467                                 0,
468                                 VecStrideSubSetGather_Default,
469                                 VecStrideSubSetScatter_Default,
470                                 0,
471                                 0
472 };
473 
474 /*
475     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
476     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
477     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
478 
479     If alloc is true and array is NULL then this routine allocates the space, otherwise
480     no space is allocated.
481 */
482 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
483 {
484   Vec_MPI        *s;
485   PetscErrorCode ierr;
486 
487   PetscFunctionBegin;
488   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
489   v->data        = (void*)s;
490   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
491   s->nghost      = nghost;
492   v->petscnative = PETSC_TRUE;
493 
494   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
495 
496   s->array           = (PetscScalar*)array;
497   s->array_allocated = 0;
498   if (alloc && !array) {
499     PetscInt n = v->map->n+nghost;
500     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
501     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
502     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
503     s->array_allocated = s->array;
504   }
505 
506   /* By default parallel vectors do not have local representation */
507   s->localrep    = 0;
508   s->localupdate = 0;
509 
510   v->stash.insertmode = NOT_SET_VALUES;
511   v->bstash.insertmode = NOT_SET_VALUES;
512   /* create the stashes. The block-size for bstash is set later when
513      VecSetValuesBlocked is called.
514   */
515   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
516   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
517 
518 #if defined(PETSC_HAVE_MATLAB_ENGINE)
519   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
520   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
521 #endif
522   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 /*MC
527    VECMPI - VECMPI = "mpi" - The basic parallel vector
528 
529    Options Database Keys:
530 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
531 
532   Level: beginner
533 
534 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
535 M*/
536 
537 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
538 {
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
543   PetscFunctionReturn(0);
544 }
545 
546 /*MC
547    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
548 
549    Options Database Keys:
550 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
551 
552   Level: beginner
553 
554 .seealso: VecCreateSeq(), VecCreateMPI()
555 M*/
556 
557 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
558 {
559   PetscErrorCode ierr;
560   PetscMPIInt    size;
561 
562   PetscFunctionBegin;
563   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
564   if (size == 1) {
565     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
566   } else {
567     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
568   }
569   PetscFunctionReturn(0);
570 }
571 
572 /*@C
573    VecCreateMPIWithArray - Creates a parallel, array-style vector,
574    where the user provides the array space to store the vector values.
575 
576    Collective on MPI_Comm
577 
578    Input Parameters:
579 +  comm  - the MPI communicator to use
580 .  bs    - block size, same meaning as VecSetBlockSize()
581 .  n     - local vector length, cannot be PETSC_DECIDE
582 .  N     - global vector length (or PETSC_DECIDE to have calculated)
583 -  array - the user provided array to store the vector values
584 
585    Output Parameter:
586 .  vv - the vector
587 
588    Notes:
589    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
590    same type as an existing vector.
591 
592    If the user-provided array is NULL, then VecPlaceArray() can be used
593    at a later stage to SET the array for storing the vector values.
594 
595    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
596    The user should not free the array until the vector is destroyed.
597 
598    Level: intermediate
599 
600    Concepts: vectors^creating with array
601 
602 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
603           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
604 
605 @*/
606 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
607 {
608   PetscErrorCode ierr;
609 
610   PetscFunctionBegin;
611   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
612   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
613   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
614   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
615   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
616   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
617   PetscFunctionReturn(0);
618 }
619 
620 /*@C
621    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
622    the caller allocates the array space.
623 
624    Collective on MPI_Comm
625 
626    Input Parameters:
627 +  comm - the MPI communicator to use
628 .  n - local vector length
629 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
630 .  nghost - number of local ghost points
631 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
632 -  array - the space to store the vector values (as long as n + nghost)
633 
634    Output Parameter:
635 .  vv - the global vector representation (without ghost points as part of vector)
636 
637    Notes:
638    Use VecGhostGetLocalForm() to access the local, ghosted representation
639    of the vector.
640 
641    This also automatically sets the ISLocalToGlobalMapping() for this vector.
642 
643    Level: advanced
644 
645    Concepts: vectors^creating with array
646    Concepts: vectors^ghosted
647 
648 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
649           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
650           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
651 
652 @*/
653 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
654 {
655   PetscErrorCode         ierr;
656   Vec_MPI                *w;
657   PetscScalar            *larray;
658   IS                     from,to;
659   ISLocalToGlobalMapping ltog;
660   PetscInt               rstart,i,*indices;
661 
662   PetscFunctionBegin;
663   *vv = 0;
664 
665   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
666   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
667   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
668   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
669   /* Create global representation */
670   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
671   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
672   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
673   w    = (Vec_MPI*)(*vv)->data;
674   /* Create local representation */
675   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
676   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
677   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
678   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
679 
680   /*
681        Create scatter context for scattering (updating) ghost values
682   */
683   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
684   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
685   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
686   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
687   ierr = ISDestroy(&to);CHKERRQ(ierr);
688   ierr = ISDestroy(&from);CHKERRQ(ierr);
689 
690   /* set local to global mapping for ghosted vector */
691   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
692   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
693   for (i=0; i<n; i++) {
694     indices[i] = rstart + i;
695   }
696   for (i=0; i<nghost; i++) {
697     indices[n+i] = ghosts[i];
698   }
699   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
700   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
701   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 /*@
706    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
707 
708    Collective on MPI_Comm
709 
710    Input Parameters:
711 +  comm - the MPI communicator to use
712 .  n - local vector length
713 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
714 .  nghost - number of local ghost points
715 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
716 
717    Output Parameter:
718 .  vv - the global vector representation (without ghost points as part of vector)
719 
720    Notes:
721    Use VecGhostGetLocalForm() to access the local, ghosted representation
722    of the vector.
723 
724    This also automatically sets the ISLocalToGlobalMapping() for this vector.
725 
726    Level: advanced
727 
728    Concepts: vectors^ghosted
729 
730 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
731           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
732           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
733           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
734 
735 @*/
736 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
737 {
738   PetscErrorCode ierr;
739 
740   PetscFunctionBegin;
741   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
742   PetscFunctionReturn(0);
743 }
744 
745 /*@
746    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
747 
748    Collective on Vec
749 
750    Input Parameters:
751 +  vv - the MPI vector
752 .  nghost - number of local ghost points
753 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
754 
755 
756    Notes:
757    Use VecGhostGetLocalForm() to access the local, ghosted representation
758    of the vector.
759 
760    This also automatically sets the ISLocalToGlobalMapping() for this vector.
761 
762    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
763 
764    Level: advanced
765 
766    Concepts: vectors^ghosted
767 
768 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
769           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
770           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
771           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
772 
773 @*/
774 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
775 {
776   PetscErrorCode ierr;
777   PetscBool      flg;
778 
779   PetscFunctionBegin;
780   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
781   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
782   if (flg) {
783     PetscInt               n,N;
784     Vec_MPI                *w;
785     PetscScalar            *larray;
786     IS                     from,to;
787     ISLocalToGlobalMapping ltog;
788     PetscInt               rstart,i,*indices;
789     MPI_Comm               comm;
790 
791     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
792     n    = vv->map->n;
793     N    = vv->map->N;
794     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
795     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
796     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
797     w    = (Vec_MPI*)(vv)->data;
798     /* Create local representation */
799     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
800     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
801     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
802     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
803 
804     /*
805      Create scatter context for scattering (updating) ghost values
806      */
807     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
808     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
809     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
810     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
811     ierr = ISDestroy(&to);CHKERRQ(ierr);
812     ierr = ISDestroy(&from);CHKERRQ(ierr);
813 
814     /* set local to global mapping for ghosted vector */
815     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
816     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
817 
818     for (i=0; i<n; i++)      indices[i]   = rstart + i;
819     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
820 
821     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
822     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
823     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
824   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
825   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
826   PetscFunctionReturn(0);
827 }
828 
829 
830 /* ------------------------------------------------------------------------------------------*/
831 /*@C
832    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
833    the caller allocates the array space. Indices in the ghost region are based on blocks.
834 
835    Collective on MPI_Comm
836 
837    Input Parameters:
838 +  comm - the MPI communicator to use
839 .  bs - block size
840 .  n - local vector length
841 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
842 .  nghost - number of local ghost blocks
843 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
844 -  array - the space to store the vector values (as long as n + nghost*bs)
845 
846    Output Parameter:
847 .  vv - the global vector representation (without ghost points as part of vector)
848 
849    Notes:
850    Use VecGhostGetLocalForm() to access the local, ghosted representation
851    of the vector.
852 
853    n is the local vector size (total local size not the number of blocks) while nghost
854    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
855    portion is bs*nghost
856 
857    Level: advanced
858 
859    Concepts: vectors^creating ghosted
860    Concepts: vectors^creating with array
861 
862 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
863           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
864           VecCreateGhostWithArray(), VecCreateGhostBlock()
865 
866 @*/
867 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
868 {
869   PetscErrorCode         ierr;
870   Vec_MPI                *w;
871   PetscScalar            *larray;
872   IS                     from,to;
873   ISLocalToGlobalMapping ltog;
874   PetscInt               rstart,i,nb,*indices;
875 
876   PetscFunctionBegin;
877   *vv = 0;
878 
879   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
880   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
881   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
882   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
883   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
884   /* Create global representation */
885   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
886   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
887   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
888   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
889   w    = (Vec_MPI*)(*vv)->data;
890   /* Create local representation */
891   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
892   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
893   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
894   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
895 
896   /*
897        Create scatter context for scattering (updating) ghost values
898   */
899   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
900   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
901   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
902   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
903   ierr = ISDestroy(&to);CHKERRQ(ierr);
904   ierr = ISDestroy(&from);CHKERRQ(ierr);
905 
906   /* set local to global mapping for ghosted vector */
907   nb     = n/bs;
908   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
909   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
910   rstart = rstart/bs;
911 
912   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
913   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
914 
915   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
916   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
917   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 /*@
922    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
923         The indicing of the ghost points is done with blocks.
924 
925    Collective on MPI_Comm
926 
927    Input Parameters:
928 +  comm - the MPI communicator to use
929 .  bs - the block size
930 .  n - local vector length
931 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
932 .  nghost - number of local ghost blocks
933 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
934 
935    Output Parameter:
936 .  vv - the global vector representation (without ghost points as part of vector)
937 
938    Notes:
939    Use VecGhostGetLocalForm() to access the local, ghosted representation
940    of the vector.
941 
942    n is the local vector size (total local size not the number of blocks) while nghost
943    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
944    portion is bs*nghost
945 
946    Level: advanced
947 
948    Concepts: vectors^ghosted
949 
950 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
951           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
952           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
953 
954 @*/
955 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
956 {
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963