xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 94fbd55e5f2346eaca8ea3e09e204edb166d65a8)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscoptions.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscFunctionBegin;
94   switch (op) {
95   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
96     break;
97   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
98     break;
99   case VEC_SUBSET_OFF_PROC_ENTRIES: v->assembly_subset = flag;
100     break;
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 
106 static PetscErrorCode VecResetArray_MPI(Vec vin)
107 {
108   Vec_MPI        *v = (Vec_MPI*)vin->data;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = 0;
114   if (v->localrep) {
115     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126   PetscErrorCode ierr;
127 
128   PetscFunctionBegin;
129   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
130    * we have to send them this time if we sent them before because the receiver is expecting them. */
131   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
132     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
133     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
134   }
135   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
136     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
137     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
138   }
139   PetscFunctionReturn(0);
140 }
141 
142 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
143 {
144   Vec X = (Vec)ctx;
145   Vec_MPI *x = (Vec_MPI*)X->data;
146   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
147   PetscErrorCode ierr;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
153 
154   if (hdr->count) {
155     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
156     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
157     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
158     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
169     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
170     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   PetscErrorCode ierr;
184   MPI_Comm       comm;
185   PetscInt       i,j,jb,bs;
186 
187   PetscFunctionBegin;
188   if (X->stash.donotstash) PetscFunctionReturn(0);
189 
190   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
191   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
192 #if defined(PETSC_USE_DEBUG)
193   {
194     InsertMode addv;
195     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
196     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
197   }
198 #endif
199   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
200 
201   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
202   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
203 
204   if (!x->sendranks) {
205     PetscMPIInt nowners,bnowners,*owners,*bowners;
206     PetscInt ntmp;
207     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
208     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
209     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
210     x->nsendranks = ntmp;
211     ierr = PetscFree(owners);CHKERRQ(ierr);
212     ierr = PetscFree(bowners);CHKERRQ(ierr);
213     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
214     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
215   }
216   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
217     PetscMPIInt rank = x->sendranks[i];
218     x->sendhdr[i].insertmode = X->stash.insertmode;
219     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
220      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
221      * there is nothing new to send, so that size-zero messages get sent instead. */
222     x->sendhdr[i].count = 0;
223     if (X->stash.n) {
224       x->sendptrs[i].ints    = &X->stash.idx[j];
225       x->sendptrs[i].scalars = &X->stash.array[j];
226       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
227     }
228     x->sendhdr[i].bcount = 0;
229     if (X->bstash.n) {
230       x->sendptrs[i].intb    = &X->bstash.idx[jb];
231       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
232       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
233     }
234   }
235 
236   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
237   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
238   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
239   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
240     PetscMPIInt tag[4];
241     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
242     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
243     for (i=0; i<x->nsendranks; i++) {
244       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
245     }
246     for (i=0; i<x->nrecvranks; i++) {
247       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
248     }
249     x->use_status = PETSC_TRUE;
250   } else {                      /* First time */
251     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
252     x->use_status = PETSC_FALSE;
253   }
254 
255   {
256     PetscInt nstash,reallocs;
257     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
258     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
259     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
260     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
266 {
267   Vec_MPI *x = (Vec_MPI*)X->data;
268   PetscInt bs = X->map->bs;
269   PetscMPIInt npending,*some_indices,r;
270   MPI_Status  *some_statuses;
271   PetscScalar *xarray;
272   PetscErrorCode ierr;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
283   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
284   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
285   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
286   while (npending>0) {
287     PetscMPIInt ndone=0,ii;
288     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
289      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
290      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
291      * subsequent assembly can set a proper subset of the values. */
292     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
293     for (ii=0; ii<ndone; ii++) {
294       PetscInt i = some_indices[ii]/4,j,k;
295       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
296       PetscInt *recvint;
297       PetscScalar *recvscalar;
298       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
299       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
300       npending--;
301       if (!blockmsg) { /* Scalar stash */
302         PetscMPIInt count;
303         if (--frame[i].pendings > 0) continue;
304         if (x->use_status) {
305           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
306         } else count = x->recvhdr[i].count;
307         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
308           PetscInt loc = *recvint - X->map->rstart;
309           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
310           switch (imode) {
311           case ADD_VALUES:
312             xarray[loc] += *recvscalar++;
313             break;
314           case INSERT_VALUES:
315             xarray[loc] = *recvscalar++;
316             break;
317           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
318           }
319         }
320       } else {                  /* Block stash */
321         PetscMPIInt count;
322         if (--frame[i].pendingb > 0) continue;
323         if (x->use_status) {
324           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
325           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
326         } else count = x->recvhdr[i].bcount;
327         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
328           PetscInt loc = (*recvint)*bs - X->map->rstart;
329           switch (imode) {
330           case ADD_VALUES:
331             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
332             break;
333           case INSERT_VALUES:
334             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
335             break;
336           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
337           }
338         }
339       }
340     }
341   }
342   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
343   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
344   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
345   if (x->assembly_subset) {
346     void *dummy;                /* reset segbuffers */
347     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
348     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
349   } else {
350     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
351   }
352 
353   X->stash.insertmode = NOT_SET_VALUES;
354   X->bstash.insertmode = NOT_SET_VALUES;
355   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
356   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
357   PetscFunctionReturn(0);
358 }
359 
360 PetscErrorCode VecAssemblyReset_MPI(Vec X)
361 {
362   Vec_MPI *x = (Vec_MPI*)X->data;
363   PetscErrorCode ierr;
364 
365   PetscFunctionBegin;
366   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
367   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
368   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
369   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
370   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
371   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
372   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
373   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
374   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
375   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 
379 
380 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
381 {
382   PetscErrorCode ierr;
383   PetscBool      flg = PETSC_FALSE,set;
384 
385   PetscFunctionBegin;
386   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
387   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
388   if (set) {
389     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
390     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
391   }
392   ierr = PetscOptionsTail();CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 
397 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
398                                 VecDuplicateVecs_Default,
399                                 VecDestroyVecs_Default,
400                                 VecDot_MPI,
401                                 VecMDot_MPI,
402                                 VecNorm_MPI,
403                                 VecTDot_MPI,
404                                 VecMTDot_MPI,
405                                 VecScale_Seq,
406                                 VecCopy_Seq, /* 10 */
407                                 VecSet_Seq,
408                                 VecSwap_Seq,
409                                 VecAXPY_Seq,
410                                 VecAXPBY_Seq,
411                                 VecMAXPY_Seq,
412                                 VecAYPX_Seq,
413                                 VecWAXPY_Seq,
414                                 VecAXPBYPCZ_Seq,
415                                 VecPointwiseMult_Seq,
416                                 VecPointwiseDivide_Seq,
417                                 VecSetValues_MPI, /* 20 */
418                                 VecAssemblyBegin_MPI,
419                                 VecAssemblyEnd_MPI,
420                                 0,
421                                 VecGetSize_MPI,
422                                 VecGetSize_Seq,
423                                 0,
424                                 VecMax_MPI,
425                                 VecMin_MPI,
426                                 VecSetRandom_Seq,
427                                 VecSetOption_MPI,
428                                 VecSetValuesBlocked_MPI,
429                                 VecDestroy_MPI,
430                                 VecView_MPI,
431                                 VecPlaceArray_MPI,
432                                 VecReplaceArray_Seq,
433                                 VecDot_Seq,
434                                 VecTDot_Seq,
435                                 VecNorm_Seq,
436                                 VecMDot_Seq,
437                                 VecMTDot_Seq,
438                                 VecLoad_Default,
439                                 VecReciprocal_Default,
440                                 VecConjugate_Seq,
441                                 0,
442                                 0,
443                                 VecResetArray_MPI,
444                                 VecSetFromOptions_MPI,/*set from options */
445                                 VecMaxPointwiseDivide_Seq,
446                                 VecPointwiseMax_Seq,
447                                 VecPointwiseMaxAbs_Seq,
448                                 VecPointwiseMin_Seq,
449                                 VecGetValues_MPI,
450                                 0,
451                                 0,
452                                 0,
453                                 0,
454                                 0,
455                                 0,
456                                 VecStrideGather_Default,
457                                 VecStrideScatter_Default,
458                                 0,
459                                 0,
460                                 0,
461                                 0,
462                                 0,
463                                 VecStrideSubSetGather_Default,
464                                 VecStrideSubSetScatter_Default,
465                                 0,
466                                 0
467 };
468 
469 /*
470     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
471     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
472     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
473 
474     If alloc is true and array is NULL then this routine allocates the space, otherwise
475     no space is allocated.
476 */
477 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
478 {
479   Vec_MPI        *s;
480   PetscErrorCode ierr;
481 
482   PetscFunctionBegin;
483   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
484   v->data        = (void*)s;
485   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
486   s->nghost      = nghost;
487   v->petscnative = PETSC_TRUE;
488 
489   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
490 
491   s->array           = (PetscScalar*)array;
492   s->array_allocated = 0;
493   if (alloc && !array) {
494     PetscInt n = v->map->n+nghost;
495     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
496     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
497     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
498     s->array_allocated = s->array;
499   }
500 
501   /* By default parallel vectors do not have local representation */
502   s->localrep    = 0;
503   s->localupdate = 0;
504 
505   v->stash.insertmode = NOT_SET_VALUES;
506   v->bstash.insertmode = NOT_SET_VALUES;
507   /* create the stashes. The block-size for bstash is set later when
508      VecSetValuesBlocked is called.
509   */
510   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
511   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
512 
513 #if defined(PETSC_HAVE_MATLAB_ENGINE)
514   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
515   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
516 #endif
517   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 /*MC
522    VECMPI - VECMPI = "mpi" - The basic parallel vector
523 
524    Options Database Keys:
525 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
526 
527   Level: beginner
528 
529 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
530 M*/
531 
532 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
538   PetscFunctionReturn(0);
539 }
540 
541 /*MC
542    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
543 
544    Options Database Keys:
545 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
546 
547   Level: beginner
548 
549 .seealso: VecCreateSeq(), VecCreateMPI()
550 M*/
551 
552 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
553 {
554   PetscErrorCode ierr;
555   PetscMPIInt    size;
556 
557   PetscFunctionBegin;
558   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
559   if (size == 1) {
560     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
561   } else {
562     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
563   }
564   PetscFunctionReturn(0);
565 }
566 
567 /*@C
568    VecCreateMPIWithArray - Creates a parallel, array-style vector,
569    where the user provides the array space to store the vector values.
570 
571    Collective on MPI_Comm
572 
573    Input Parameters:
574 +  comm  - the MPI communicator to use
575 .  bs    - block size, same meaning as VecSetBlockSize()
576 .  n     - local vector length, cannot be PETSC_DECIDE
577 .  N     - global vector length (or PETSC_DECIDE to have calculated)
578 -  array - the user provided array to store the vector values
579 
580    Output Parameter:
581 .  vv - the vector
582 
583    Notes:
584    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
585    same type as an existing vector.
586 
587    If the user-provided array is NULL, then VecPlaceArray() can be used
588    at a later stage to SET the array for storing the vector values.
589 
590    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
591    The user should not free the array until the vector is destroyed.
592 
593    Level: intermediate
594 
595    Concepts: vectors^creating with array
596 
597 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
598           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
599 
600 @*/
601 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
602 {
603   PetscErrorCode ierr;
604 
605   PetscFunctionBegin;
606   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
607   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
608   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
609   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
610   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
611   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
612   PetscFunctionReturn(0);
613 }
614 
615 /*@C
616    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
617    the caller allocates the array space.
618 
619    Collective on MPI_Comm
620 
621    Input Parameters:
622 +  comm - the MPI communicator to use
623 .  n - local vector length
624 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
625 .  nghost - number of local ghost points
626 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
627 -  array - the space to store the vector values (as long as n + nghost)
628 
629    Output Parameter:
630 .  vv - the global vector representation (without ghost points as part of vector)
631 
632    Notes:
633    Use VecGhostGetLocalForm() to access the local, ghosted representation
634    of the vector.
635 
636    This also automatically sets the ISLocalToGlobalMapping() for this vector.
637 
638    Level: advanced
639 
640    Concepts: vectors^creating with array
641    Concepts: vectors^ghosted
642 
643 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
644           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
645           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
646 
647 @*/
648 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
649 {
650   PetscErrorCode         ierr;
651   Vec_MPI                *w;
652   PetscScalar            *larray;
653   IS                     from,to;
654   ISLocalToGlobalMapping ltog;
655   PetscInt               rstart,i,*indices;
656 
657   PetscFunctionBegin;
658   *vv = 0;
659 
660   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
661   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
662   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
663   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
664   /* Create global representation */
665   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
666   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
667   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
668   w    = (Vec_MPI*)(*vv)->data;
669   /* Create local representation */
670   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
671   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
672   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
673   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
674 
675   /*
676        Create scatter context for scattering (updating) ghost values
677   */
678   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
679   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
680   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
681   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
682   ierr = ISDestroy(&to);CHKERRQ(ierr);
683   ierr = ISDestroy(&from);CHKERRQ(ierr);
684 
685   /* set local to global mapping for ghosted vector */
686   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
687   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
688   for (i=0; i<n; i++) {
689     indices[i] = rstart + i;
690   }
691   for (i=0; i<nghost; i++) {
692     indices[n+i] = ghosts[i];
693   }
694   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
695   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
696   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
697   PetscFunctionReturn(0);
698 }
699 
700 /*@
701    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
702 
703    Collective on MPI_Comm
704 
705    Input Parameters:
706 +  comm - the MPI communicator to use
707 .  n - local vector length
708 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
709 .  nghost - number of local ghost points
710 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
711 
712    Output Parameter:
713 .  vv - the global vector representation (without ghost points as part of vector)
714 
715    Notes:
716    Use VecGhostGetLocalForm() to access the local, ghosted representation
717    of the vector.
718 
719    This also automatically sets the ISLocalToGlobalMapping() for this vector.
720 
721    Level: advanced
722 
723    Concepts: vectors^ghosted
724 
725 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
726           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
727           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
728           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
729 
730 @*/
731 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
732 {
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 /*@
741    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
742 
743    Collective on Vec
744 
745    Input Parameters:
746 +  vv - the MPI vector
747 .  nghost - number of local ghost points
748 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
749 
750 
751    Notes:
752    Use VecGhostGetLocalForm() to access the local, ghosted representation
753    of the vector.
754 
755    This also automatically sets the ISLocalToGlobalMapping() for this vector.
756 
757    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
758 
759    Level: advanced
760 
761    Concepts: vectors^ghosted
762 
763 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
764           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
765           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
766           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
767 
768 @*/
769 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
770 {
771   PetscErrorCode ierr;
772   PetscBool      flg;
773 
774   PetscFunctionBegin;
775   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
776   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
777   if (flg) {
778     PetscInt               n,N;
779     Vec_MPI                *w;
780     PetscScalar            *larray;
781     IS                     from,to;
782     ISLocalToGlobalMapping ltog;
783     PetscInt               rstart,i,*indices;
784     MPI_Comm               comm;
785 
786     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
787     n    = vv->map->n;
788     N    = vv->map->N;
789     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
790     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
791     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
792     w    = (Vec_MPI*)(vv)->data;
793     /* Create local representation */
794     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
795     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
796     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
797     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
798 
799     /*
800      Create scatter context for scattering (updating) ghost values
801      */
802     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
803     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
804     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
805     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
806     ierr = ISDestroy(&to);CHKERRQ(ierr);
807     ierr = ISDestroy(&from);CHKERRQ(ierr);
808 
809     /* set local to global mapping for ghosted vector */
810     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
811     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
812 
813     for (i=0; i<n; i++)      indices[i]   = rstart + i;
814     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
815 
816     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
817     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
818     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
819   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
820   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
821   PetscFunctionReturn(0);
822 }
823 
824 
825 /* ------------------------------------------------------------------------------------------*/
826 /*@C
827    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
828    the caller allocates the array space. Indices in the ghost region are based on blocks.
829 
830    Collective on MPI_Comm
831 
832    Input Parameters:
833 +  comm - the MPI communicator to use
834 .  bs - block size
835 .  n - local vector length
836 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
837 .  nghost - number of local ghost blocks
838 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
839 -  array - the space to store the vector values (as long as n + nghost*bs)
840 
841    Output Parameter:
842 .  vv - the global vector representation (without ghost points as part of vector)
843 
844    Notes:
845    Use VecGhostGetLocalForm() to access the local, ghosted representation
846    of the vector.
847 
848    n is the local vector size (total local size not the number of blocks) while nghost
849    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
850    portion is bs*nghost
851 
852    Level: advanced
853 
854    Concepts: vectors^creating ghosted
855    Concepts: vectors^creating with array
856 
857 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
858           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
859           VecCreateGhostWithArray(), VecCreateGhostBlock()
860 
861 @*/
862 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
863 {
864   PetscErrorCode         ierr;
865   Vec_MPI                *w;
866   PetscScalar            *larray;
867   IS                     from,to;
868   ISLocalToGlobalMapping ltog;
869   PetscInt               rstart,i,nb,*indices;
870 
871   PetscFunctionBegin;
872   *vv = 0;
873 
874   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
875   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
876   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
877   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
878   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
879   /* Create global representation */
880   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
881   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
882   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
883   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
884   w    = (Vec_MPI*)(*vv)->data;
885   /* Create local representation */
886   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
887   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
888   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
889   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
890 
891   /*
892        Create scatter context for scattering (updating) ghost values
893   */
894   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
895   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
896   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
897   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
898   ierr = ISDestroy(&to);CHKERRQ(ierr);
899   ierr = ISDestroy(&from);CHKERRQ(ierr);
900 
901   /* set local to global mapping for ghosted vector */
902   nb   = n/bs;
903   ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
904   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
905 
906   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
907   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
908 
909   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
910   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
911   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 /*@
916    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
917         The indicing of the ghost points is done with blocks.
918 
919    Collective on MPI_Comm
920 
921    Input Parameters:
922 +  comm - the MPI communicator to use
923 .  bs - the block size
924 .  n - local vector length
925 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
926 .  nghost - number of local ghost blocks
927 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
928 
929    Output Parameter:
930 .  vv - the global vector representation (without ghost points as part of vector)
931 
932    Notes:
933    Use VecGhostGetLocalForm() to access the local, ghosted representation
934    of the vector.
935 
936    n is the local vector size (total local size not the number of blocks) while nghost
937    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
938    portion is bs*nghost
939 
940    Level: advanced
941 
942    Concepts: vectors^ghosted
943 
944 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
945           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
946           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
947 
948 @*/
949 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957