xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 3dc8cc2aecbf0f2787dc0ded4440f2f4250ad96b)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscFunctionBegin;
94   switch (op) {
95   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
96     break;
97   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
98     break;
99   case VEC_SUBSET_OFF_PROC_ENTRIES: v->assembly_subset = flag;
100     break;
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 
106 static PetscErrorCode VecResetArray_MPI(Vec vin)
107 {
108   Vec_MPI        *v = (Vec_MPI*)vin->data;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = 0;
114   if (v->localrep) {
115     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126   PetscErrorCode ierr;
127 
128   PetscFunctionBegin;
129   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
130    * we have to send them this time if we sent them before because the receiver is expecting them. */
131   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
132     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
133     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
134   }
135   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
136     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
137     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
138   }
139   PetscFunctionReturn(0);
140 }
141 
142 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
143 {
144   Vec X = (Vec)ctx;
145   Vec_MPI *x = (Vec_MPI*)X->data;
146   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
147   PetscErrorCode ierr;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
153 
154   if (hdr->count) {
155     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
156     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
157     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
158     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
169     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
170     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   PetscErrorCode ierr;
184   MPI_Comm       comm;
185   PetscInt       i,j,jb,bs;
186 
187   PetscFunctionBegin;
188   if (X->stash.donotstash) PetscFunctionReturn(0);
189 
190   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
191   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
192 #if defined(PETSC_USE_DEBUG)
193   {
194     InsertMode addv;
195     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
196     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
197   }
198 #endif
199   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
200 
201   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
202   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
203 
204   if (!x->sendranks) {
205     PetscMPIInt nowners,bnowners,*owners,*bowners;
206     PetscInt ntmp;
207     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
208     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
209     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
210     x->nsendranks = ntmp;
211     ierr = PetscFree(owners);CHKERRQ(ierr);
212     ierr = PetscFree(bowners);CHKERRQ(ierr);
213     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
214     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
215   }
216   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
217     PetscMPIInt rank = x->sendranks[i];
218     x->sendhdr[i].insertmode = X->stash.insertmode;
219     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
220      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
221      * there is nothing new to send, so that size-zero messages get sent instead. */
222     x->sendhdr[i].count = 0;
223     if (X->stash.n) {
224       x->sendptrs[i].ints    = &X->stash.idx[j];
225       x->sendptrs[i].scalars = &X->stash.array[j];
226       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
227     }
228     x->sendhdr[i].bcount = 0;
229     if (X->bstash.n) {
230       x->sendptrs[i].intb    = &X->bstash.idx[jb];
231       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
232       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
233     }
234   }
235 
236   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
237   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
238   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
239   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
240     PetscMPIInt tag[4];
241     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
242     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
243     for (i=0; i<x->nsendranks; i++) {
244       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
245     }
246     for (i=0; i<x->nrecvranks; i++) {
247       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
248     }
249     x->use_status = PETSC_TRUE;
250   } else {                      /* First time */
251     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
252     x->use_status = PETSC_FALSE;
253   }
254 
255   {
256     PetscInt nstash,reallocs;
257     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
258     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
259     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
260     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
266 {
267   Vec_MPI *x = (Vec_MPI*)X->data;
268   PetscInt bs = X->map->bs;
269   PetscMPIInt npending,*some_indices,r;
270   MPI_Status  *some_statuses;
271   PetscScalar *xarray;
272   PetscErrorCode ierr;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
283   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
284   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
285   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
286   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
287   while (npending>0) {
288     PetscMPIInt ndone=0,ii;
289     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
290      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
291      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
292      * subsequent assembly can set a proper subset of the values. */
293     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
294     for (ii=0; ii<ndone; ii++) {
295       PetscInt i = some_indices[ii]/4,j,k;
296       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
297       PetscInt *recvint;
298       PetscScalar *recvscalar;
299       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
300       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
301       npending--;
302       if (!blockmsg) { /* Scalar stash */
303         PetscMPIInt count;
304         if (--frame[i].pendings > 0) continue;
305         if (x->use_status) {
306           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
307         } else count = x->recvhdr[i].count;
308         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
309           PetscInt loc = *recvint - X->map->rstart;
310           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
311           switch (imode) {
312           case ADD_VALUES:
313             xarray[loc] += *recvscalar++;
314             break;
315           case INSERT_VALUES:
316             xarray[loc] = *recvscalar++;
317             break;
318           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
319           }
320         }
321       } else {                  /* Block stash */
322         PetscMPIInt count;
323         if (--frame[i].pendingb > 0) continue;
324         if (x->use_status) {
325           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
326           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
327         } else count = x->recvhdr[i].bcount;
328         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
329           PetscInt loc = (*recvint)*bs - X->map->rstart;
330           switch (imode) {
331           case ADD_VALUES:
332             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
333             break;
334           case INSERT_VALUES:
335             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
336             break;
337           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
338           }
339         }
340       }
341     }
342   }
343   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
344   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
345   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
346   if (x->assembly_subset) {
347     void *dummy;                /* reset segbuffers */
348     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
349     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
350   } else {
351     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
352   }
353 
354   X->stash.insertmode = NOT_SET_VALUES;
355   X->bstash.insertmode = NOT_SET_VALUES;
356   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
357   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
358   PetscFunctionReturn(0);
359 }
360 
361 PetscErrorCode VecAssemblyReset_MPI(Vec X)
362 {
363   Vec_MPI *x = (Vec_MPI*)X->data;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBegin;
367   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
368   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
369   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
370   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
371   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
372   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
373   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
374   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
375   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
376   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 
381 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
382 {
383 #if !defined(PETSC_HAVE_MPIUNI)
384   PetscErrorCode ierr;
385   PetscBool      flg = PETSC_FALSE,set;
386 
387   PetscFunctionBegin;
388   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
389   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
390   if (set) {
391     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
392     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
393   }
394   ierr = PetscOptionsTail();CHKERRQ(ierr);
395 #else
396   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
397   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
398 #endif
399   PetscFunctionReturn(0);
400 }
401 
402 
403 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
404                                 VecDuplicateVecs_Default,
405                                 VecDestroyVecs_Default,
406                                 VecDot_MPI,
407                                 VecMDot_MPI,
408                                 VecNorm_MPI,
409                                 VecTDot_MPI,
410                                 VecMTDot_MPI,
411                                 VecScale_Seq,
412                                 VecCopy_Seq, /* 10 */
413                                 VecSet_Seq,
414                                 VecSwap_Seq,
415                                 VecAXPY_Seq,
416                                 VecAXPBY_Seq,
417                                 VecMAXPY_Seq,
418                                 VecAYPX_Seq,
419                                 VecWAXPY_Seq,
420                                 VecAXPBYPCZ_Seq,
421                                 VecPointwiseMult_Seq,
422                                 VecPointwiseDivide_Seq,
423                                 VecSetValues_MPI, /* 20 */
424                                 VecAssemblyBegin_MPI_BTS,
425                                 VecAssemblyEnd_MPI_BTS,
426                                 0,
427                                 VecGetSize_MPI,
428                                 VecGetSize_Seq,
429                                 0,
430                                 VecMax_MPI,
431                                 VecMin_MPI,
432                                 VecSetRandom_Seq,
433                                 VecSetOption_MPI,
434                                 VecSetValuesBlocked_MPI,
435                                 VecDestroy_MPI,
436                                 VecView_MPI,
437                                 VecPlaceArray_MPI,
438                                 VecReplaceArray_Seq,
439                                 VecDot_Seq,
440                                 VecTDot_Seq,
441                                 VecNorm_Seq,
442                                 VecMDot_Seq,
443                                 VecMTDot_Seq,
444                                 VecLoad_Default,
445                                 VecReciprocal_Default,
446                                 VecConjugate_Seq,
447                                 0,
448                                 0,
449                                 VecResetArray_MPI,
450                                 VecSetFromOptions_MPI,/*set from options */
451                                 VecMaxPointwiseDivide_Seq,
452                                 VecPointwiseMax_Seq,
453                                 VecPointwiseMaxAbs_Seq,
454                                 VecPointwiseMin_Seq,
455                                 VecGetValues_MPI,
456                                 0,
457                                 0,
458                                 0,
459                                 0,
460                                 0,
461                                 0,
462                                 VecStrideGather_Default,
463                                 VecStrideScatter_Default,
464                                 0,
465                                 0,
466                                 0,
467                                 0,
468                                 0,
469                                 VecStrideSubSetGather_Default,
470                                 VecStrideSubSetScatter_Default,
471                                 0,
472                                 0
473 };
474 
475 /*
476     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
477     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
478     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
479 
480     If alloc is true and array is NULL then this routine allocates the space, otherwise
481     no space is allocated.
482 */
483 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
484 {
485   Vec_MPI        *s;
486   PetscErrorCode ierr;
487 
488   PetscFunctionBegin;
489   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
490   v->data        = (void*)s;
491   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
492   s->nghost      = nghost;
493   v->petscnative = PETSC_TRUE;
494 
495   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
496 
497   s->array           = (PetscScalar*)array;
498   s->array_allocated = 0;
499   if (alloc && !array) {
500     PetscInt n = v->map->n+nghost;
501     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
502     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
503     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
504     s->array_allocated = s->array;
505   }
506 
507   /* By default parallel vectors do not have local representation */
508   s->localrep    = 0;
509   s->localupdate = 0;
510 
511   v->stash.insertmode = NOT_SET_VALUES;
512   v->bstash.insertmode = NOT_SET_VALUES;
513   /* create the stashes. The block-size for bstash is set later when
514      VecSetValuesBlocked is called.
515   */
516   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
517   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
518 
519 #if defined(PETSC_HAVE_MATLAB_ENGINE)
520   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
521   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
522 #endif
523   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
524   PetscFunctionReturn(0);
525 }
526 
527 /*MC
528    VECMPI - VECMPI = "mpi" - The basic parallel vector
529 
530    Options Database Keys:
531 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
532 
533   Level: beginner
534 
535 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
536 M*/
537 
538 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
539 {
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
544   PetscFunctionReturn(0);
545 }
546 
547 /*MC
548    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
549 
550    Options Database Keys:
551 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
552 
553   Level: beginner
554 
555 .seealso: VecCreateSeq(), VecCreateMPI()
556 M*/
557 
558 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
559 {
560   PetscErrorCode ierr;
561   PetscMPIInt    size;
562 
563   PetscFunctionBegin;
564   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
565   if (size == 1) {
566     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
567   } else {
568     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
569   }
570   PetscFunctionReturn(0);
571 }
572 
573 /*@C
574    VecCreateMPIWithArray - Creates a parallel, array-style vector,
575    where the user provides the array space to store the vector values.
576 
577    Collective on MPI_Comm
578 
579    Input Parameters:
580 +  comm  - the MPI communicator to use
581 .  bs    - block size, same meaning as VecSetBlockSize()
582 .  n     - local vector length, cannot be PETSC_DECIDE
583 .  N     - global vector length (or PETSC_DECIDE to have calculated)
584 -  array - the user provided array to store the vector values
585 
586    Output Parameter:
587 .  vv - the vector
588 
589    Notes:
590    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
591    same type as an existing vector.
592 
593    If the user-provided array is NULL, then VecPlaceArray() can be used
594    at a later stage to SET the array for storing the vector values.
595 
596    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
597    The user should not free the array until the vector is destroyed.
598 
599    Level: intermediate
600 
601    Concepts: vectors^creating with array
602 
603 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
604           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
605 
606 @*/
607 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
608 {
609   PetscErrorCode ierr;
610 
611   PetscFunctionBegin;
612   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
613   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
614   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
615   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
616   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
617   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 /*@C
622    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
623    the caller allocates the array space.
624 
625    Collective on MPI_Comm
626 
627    Input Parameters:
628 +  comm - the MPI communicator to use
629 .  n - local vector length
630 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
631 .  nghost - number of local ghost points
632 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
633 -  array - the space to store the vector values (as long as n + nghost)
634 
635    Output Parameter:
636 .  vv - the global vector representation (without ghost points as part of vector)
637 
638    Notes:
639    Use VecGhostGetLocalForm() to access the local, ghosted representation
640    of the vector.
641 
642    This also automatically sets the ISLocalToGlobalMapping() for this vector.
643 
644    Level: advanced
645 
646    Concepts: vectors^creating with array
647    Concepts: vectors^ghosted
648 
649 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
650           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
651           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
652 
653 @*/
654 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
655 {
656   PetscErrorCode         ierr;
657   Vec_MPI                *w;
658   PetscScalar            *larray;
659   IS                     from,to;
660   ISLocalToGlobalMapping ltog;
661   PetscInt               rstart,i,*indices;
662 
663   PetscFunctionBegin;
664   *vv = 0;
665 
666   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
667   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
668   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
669   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
670   /* Create global representation */
671   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
672   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
673   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
674   w    = (Vec_MPI*)(*vv)->data;
675   /* Create local representation */
676   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
677   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
678   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
679   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
680 
681   /*
682        Create scatter context for scattering (updating) ghost values
683   */
684   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
685   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
686   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
687   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
688   ierr = ISDestroy(&to);CHKERRQ(ierr);
689   ierr = ISDestroy(&from);CHKERRQ(ierr);
690 
691   /* set local to global mapping for ghosted vector */
692   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
693   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
694   for (i=0; i<n; i++) {
695     indices[i] = rstart + i;
696   }
697   for (i=0; i<nghost; i++) {
698     indices[n+i] = ghosts[i];
699   }
700   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
701   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
702   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
703   PetscFunctionReturn(0);
704 }
705 
706 /*@
707    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
708 
709    Collective on MPI_Comm
710 
711    Input Parameters:
712 +  comm - the MPI communicator to use
713 .  n - local vector length
714 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
715 .  nghost - number of local ghost points
716 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
717 
718    Output Parameter:
719 .  vv - the global vector representation (without ghost points as part of vector)
720 
721    Notes:
722    Use VecGhostGetLocalForm() to access the local, ghosted representation
723    of the vector.
724 
725    This also automatically sets the ISLocalToGlobalMapping() for this vector.
726 
727    Level: advanced
728 
729    Concepts: vectors^ghosted
730 
731 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
732           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
733           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
734           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
735 
736 @*/
737 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
738 {
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 /*@
747    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
748 
749    Collective on Vec
750 
751    Input Parameters:
752 +  vv - the MPI vector
753 .  nghost - number of local ghost points
754 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
755 
756 
757    Notes:
758    Use VecGhostGetLocalForm() to access the local, ghosted representation
759    of the vector.
760 
761    This also automatically sets the ISLocalToGlobalMapping() for this vector.
762 
763    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
764 
765    Level: advanced
766 
767    Concepts: vectors^ghosted
768 
769 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
770           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
771           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
772           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
773 
774 @*/
775 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
776 {
777   PetscErrorCode ierr;
778   PetscBool      flg;
779 
780   PetscFunctionBegin;
781   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
782   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
783   if (flg) {
784     PetscInt               n,N;
785     Vec_MPI                *w;
786     PetscScalar            *larray;
787     IS                     from,to;
788     ISLocalToGlobalMapping ltog;
789     PetscInt               rstart,i,*indices;
790     MPI_Comm               comm;
791 
792     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
793     n    = vv->map->n;
794     N    = vv->map->N;
795     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
796     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
797     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
798     w    = (Vec_MPI*)(vv)->data;
799     /* Create local representation */
800     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
801     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
802     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
803     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
804 
805     /*
806      Create scatter context for scattering (updating) ghost values
807      */
808     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
809     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
810     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
811     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
812     ierr = ISDestroy(&to);CHKERRQ(ierr);
813     ierr = ISDestroy(&from);CHKERRQ(ierr);
814 
815     /* set local to global mapping for ghosted vector */
816     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
817     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
818 
819     for (i=0; i<n; i++)      indices[i]   = rstart + i;
820     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
821 
822     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
823     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
824     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
825   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
826   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
827   PetscFunctionReturn(0);
828 }
829 
830 
831 /* ------------------------------------------------------------------------------------------*/
832 /*@C
833    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
834    the caller allocates the array space. Indices in the ghost region are based on blocks.
835 
836    Collective on MPI_Comm
837 
838    Input Parameters:
839 +  comm - the MPI communicator to use
840 .  bs - block size
841 .  n - local vector length
842 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
843 .  nghost - number of local ghost blocks
844 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
845 -  array - the space to store the vector values (as long as n + nghost*bs)
846 
847    Output Parameter:
848 .  vv - the global vector representation (without ghost points as part of vector)
849 
850    Notes:
851    Use VecGhostGetLocalForm() to access the local, ghosted representation
852    of the vector.
853 
854    n is the local vector size (total local size not the number of blocks) while nghost
855    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
856    portion is bs*nghost
857 
858    Level: advanced
859 
860    Concepts: vectors^creating ghosted
861    Concepts: vectors^creating with array
862 
863 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
864           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
865           VecCreateGhostWithArray(), VecCreateGhostBlock()
866 
867 @*/
868 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
869 {
870   PetscErrorCode         ierr;
871   Vec_MPI                *w;
872   PetscScalar            *larray;
873   IS                     from,to;
874   ISLocalToGlobalMapping ltog;
875   PetscInt               rstart,i,nb,*indices;
876 
877   PetscFunctionBegin;
878   *vv = 0;
879 
880   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
881   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
882   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
883   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
884   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
885   /* Create global representation */
886   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
887   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
888   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
889   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
890   w    = (Vec_MPI*)(*vv)->data;
891   /* Create local representation */
892   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
893   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
894   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
895   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
896 
897   /*
898        Create scatter context for scattering (updating) ghost values
899   */
900   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
901   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
902   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
903   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
904   ierr = ISDestroy(&to);CHKERRQ(ierr);
905   ierr = ISDestroy(&from);CHKERRQ(ierr);
906 
907   /* set local to global mapping for ghosted vector */
908   nb     = n/bs;
909   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
910   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
911   rstart = rstart/bs;
912 
913   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
914   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
915 
916   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
917   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
918   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 /*@
923    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
924         The indicing of the ghost points is done with blocks.
925 
926    Collective on MPI_Comm
927 
928    Input Parameters:
929 +  comm - the MPI communicator to use
930 .  bs - the block size
931 .  n - local vector length
932 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
933 .  nghost - number of local ghost blocks
934 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
935 
936    Output Parameter:
937 .  vv - the global vector representation (without ghost points as part of vector)
938 
939    Notes:
940    Use VecGhostGetLocalForm() to access the local, ghosted representation
941    of the vector.
942 
943    n is the local vector size (total local size not the number of blocks) while nghost
944    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
945    portion is bs*nghost
946 
947    Level: advanced
948 
949    Concepts: vectors^ghosted
950 
951 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
952           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
953           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
954 
955 @*/
956 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
957 {
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin;
961   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964