xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision e497415562a4a2c7db96bf30e226223f458e42c3)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
17   *z = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 EXTERN_C_BEGIN
36 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 EXTERN_C_END
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "VecPlaceArray_MPI"
41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42 {
43   PetscErrorCode ierr;
44   Vec_MPI        *v = (Vec_MPI *)vin->data;
45 
46   PetscFunctionBegin;
47   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
49   v->array = (PetscScalar *)a;
50   if (v->localrep) {
51     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "VecDuplicate_MPI"
60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61 {
62   PetscErrorCode ierr;
63   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
64   PetscScalar    *array;
65 
66   PetscFunctionBegin;
67   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
68 
69   /* use the map that exists aleady in win */
70   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
71   (*v)->map = win->map;
72   win->map->refcnt++;
73 
74   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
75   vw   = (Vec_MPI *)(*v)->data;
76   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77 
78   /* save local representation of the parallel vector (and scatter) if it exists */
79   if (w->localrep) {
80     ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr);
81     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
82     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
83     ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr);
84     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
85     vw->localupdate = w->localupdate;
86     if (vw->localupdate) {
87       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
88     }
89   }
90 
91   /* New vector should inherit stashing property of parent */
92   (*v)->stash.donotstash = win->stash.donotstash;
93   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
94 
95   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
96   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
97   if (win->mapping) {
98     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
99     (*v)->mapping = win->mapping;
100   }
101   if (win->bmapping) {
102     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
103     (*v)->bmapping = win->bmapping;
104   }
105   (*v)->map->bs    = win->map->bs;
106   (*v)->bstash.bs = win->bstash.bs;
107 
108   PetscFunctionReturn(0);
109 }
110 
111 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
112 extern PetscErrorCode VecResetArray_MPI(Vec);
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "VecPointwiseMax_Seq"
116 static PetscErrorCode VecPointwiseMax_Seq(Vec win,Vec xin,Vec yin)
117 {
118   PetscErrorCode ierr;
119   PetscInt       n = win->map->n,i;
120   PetscScalar    *ww,*xx,*yy;
121 
122   PetscFunctionBegin;
123   ierr = VecGetArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
124   for (i=0; i<n; i++) {
125     ww[i] = PetscMax(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
126   }
127   ierr = PetscLogFlops(n);CHKERRQ(ierr);
128   ierr = VecRestoreArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "VecPointwiseMin_Seq"
134 static PetscErrorCode VecPointwiseMin_Seq(Vec win,Vec xin,Vec yin)
135 {
136   PetscErrorCode ierr;
137   PetscInt       n = win->map->n,i;
138   PetscScalar    *ww,*xx,*yy;
139 
140   PetscFunctionBegin;
141   ierr = VecGetArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
142   for (i=0; i<n; i++) {
143     ww[i] = PetscMin(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
144   }
145   ierr = PetscLogFlops(n);CHKERRQ(ierr);
146   ierr = VecRestoreArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "VecPointwiseMaxAbs_Seq"
152 static PetscErrorCode VecPointwiseMaxAbs_Seq(Vec win,Vec xin,Vec yin)
153 {
154   PetscErrorCode ierr;
155   PetscInt       n = win->map->n,i;
156   PetscScalar    *ww,*xx,*yy;
157 
158   PetscFunctionBegin;
159   ierr = VecGetArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
160   for (i=0; i<n; i++) {
161     ww[i] = PetscMax(PetscAbsScalar(xx[i]),PetscAbsScalar(yy[i]));
162   }
163   ierr = PetscLogFlops(n);CHKERRQ(ierr);
164   ierr = VecRestoreArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 #include "../src/vec/vec/impls/seq/ftn-kernels/fxtimesy.h"
169 #undef __FUNCT__
170 #define __FUNCT__ "VecPointwiseMult_Seq"
171 static PetscErrorCode VecPointwiseMult_Seq(Vec win,Vec xin,Vec yin)
172 {
173   PetscErrorCode ierr;
174   PetscInt       n = win->map->n,i;
175   PetscScalar    *ww,*xx,*yy;
176 
177   PetscFunctionBegin;
178   ierr = VecGetArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
179   if (ww == xx) {
180     for (i=0; i<n; i++) ww[i] *= yy[i];
181   } else if (ww == yy) {
182     for (i=0; i<n; i++) ww[i] *= xx[i];
183   } else {
184     /*  This was suppose to help on SGI but didn't really seem to
185           PetscReal * PETSC_RESTRICT www = ww;
186           PetscReal * PETSC_RESTRICT yyy = yy;
187           PetscReal * PETSC_RESTRICT xxx = xx;
188           for (i=0; i<n; i++) www[i] = xxx[i] * yyy[i];
189     */
190 #if defined(PETSC_USE_FORTRAN_KERNEL_XTIMESY)
191     fortranxtimesy_(xx,yy,ww,&n);
192 #else
193     for (i=0; i<n; i++) ww[i] = xx[i] * yy[i];
194 #endif
195   }
196   ierr = VecRestoreArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
197   ierr = PetscLogFlops(n);CHKERRQ(ierr);
198   PetscFunctionReturn(0);
199 }
200 
201 #undef __FUNCT__
202 #define __FUNCT__ "VecSetRandom_Seq"
203 static PetscErrorCode VecSetRandom_Seq(Vec xin,PetscRandom r)
204 {
205   PetscErrorCode ierr;
206   PetscInt       n = xin->map->n,i;
207   PetscScalar    *xx = *(PetscScalar**)xin->data;
208 
209   PetscFunctionBegin;
210   for (i=0; i<n; i++) {ierr = PetscRandomGetValue(r,&xx[i]);CHKERRQ(ierr);}
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "VecPointwiseDivide_Seq"
216 static PetscErrorCode VecPointwiseDivide_Seq(Vec win,Vec xin,Vec yin)
217 {
218   PetscErrorCode ierr;
219   PetscInt       n = win->map->n,i;
220   PetscScalar    *ww,*xx,*yy;
221 
222   PetscFunctionBegin;
223   ierr = VecGetArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
224   for (i=0; i<n; i++) {
225     ww[i] = xx[i] / yy[i];
226   }
227   ierr = PetscLogFlops(n);CHKERRQ(ierr);
228   ierr = VecRestoreArrayPrivate3(win,&ww,xin,&xx,yin,&yy);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "VecGetSize_Seq"
234 static PetscErrorCode VecGetSize_Seq(Vec vin,PetscInt *size)
235 {
236   PetscFunctionBegin;
237   *size = vin->map->n;
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "VecConjugate_Seq"
243 static PetscErrorCode VecConjugate_Seq(Vec xin)
244 {
245   PetscScalar    *x;
246   PetscInt       n = xin->map->n;
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   ierr = VecGetArrayPrivate(xin,&x);CHKERRQ(ierr);
251   while (n-->0) {
252     *x = PetscConj(*x);
253     x++;
254   }
255   ierr = VecRestoreArrayPrivate(xin,&x);CHKERRQ(ierr);
256   PetscFunctionReturn(0);
257 }
258 
259 #undef __FUNCT__
260 #define __FUNCT__ "VecCopy_Seq"
261 static PetscErrorCode VecCopy_Seq(Vec xin,Vec yin)
262 {
263   PetscScalar    *ya, *xa;
264   PetscErrorCode ierr;
265 
266   PetscFunctionBegin;
267   if (xin != yin) {
268     ierr = VecGetArrayPrivate2(xin,&xa,yin,&ya);CHKERRQ(ierr);
269     ierr = PetscMemcpy(ya,xa,xin->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
270     ierr = VecRestoreArrayPrivate2(xin,&xa,yin,&ya);CHKERRQ(ierr);
271   }
272   PetscFunctionReturn(0);
273 }
274 
275 #include "petscblaslapack.h"
276 #undef __FUNCT__
277 #define __FUNCT__ "VecSwap_Seq"
278 static PetscErrorCode VecSwap_Seq(Vec xin,Vec yin)
279 {
280   PetscScalar    *ya, *xa;
281   PetscErrorCode ierr;
282   PetscBLASInt   one = 1,bn = PetscBLASIntCast(xin->map->n);
283 
284   PetscFunctionBegin;
285   if (xin != yin) {
286     ierr = VecGetArrayPrivate2(xin,&xa,yin,&ya);CHKERRQ(ierr);
287     BLASswap_(&bn,xa,&one,ya,&one);
288     ierr = VecRestoreArrayPrivate2(xin,&xa,yin,&ya);CHKERRQ(ierr);
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
294             VecDuplicateVecs_Default,
295             VecDestroyVecs_Default,
296             VecDot_MPI,
297             VecMDot_MPI,
298             VecNorm_MPI,
299             VecTDot_MPI,
300             VecMTDot_MPI,
301             VecScale_Seq,
302             VecCopy_Seq, /* 10 */
303             VecSet_Seq,
304             VecSwap_Seq,
305             VecAXPY_Seq,
306             VecAXPBY_Seq,
307             VecMAXPY_Seq,
308             VecAYPX_Seq,
309             VecWAXPY_Seq,
310             VecAXPBYPCZ_Seq,
311             VecPointwiseMult_Seq,
312             VecPointwiseDivide_Seq,
313             VecSetValues_MPI, /* 20 */
314             VecAssemblyBegin_MPI,
315             VecAssemblyEnd_MPI,
316             VecGetArray_Seq,
317             VecGetSize_MPI,
318             VecGetSize_Seq,
319             VecRestoreArray_Seq,
320             VecMax_MPI,
321             VecMin_MPI,
322             VecSetRandom_Seq,
323             VecSetOption_MPI,
324             VecSetValuesBlocked_MPI,
325             VecDestroy_MPI,
326             VecView_MPI,
327             VecPlaceArray_MPI,
328             VecReplaceArray_Seq,
329             VecDot_Seq,
330             VecTDot_Seq,
331             VecNorm_Seq,
332             VecMDot_Seq,
333             VecMTDot_Seq,
334 	    VecLoad_Default,
335             VecReciprocal_Default,
336             VecConjugate_Seq,
337             0,
338             0,
339             VecResetArray_MPI,
340             0,
341             VecMaxPointwiseDivide_Seq,
342             VecPointwiseMax_Seq,
343             VecPointwiseMaxAbs_Seq,
344             VecPointwiseMin_Seq,
345   	    VecGetValues_MPI,
346     	    0,
347     	    0,
348     	    0,
349     	    0,
350     	    0,
351     	    0,
352    	    VecStrideGather_Default,
353    	    VecStrideScatter_Default
354 };
355 
356 #undef __FUNCT__
357 #define __FUNCT__ "VecCreate_MPI_Private"
358 /*
359     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
360     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
361     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
362 
363     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
364     no space is allocated.
365 */
366 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
367 {
368   Vec_MPI        *s;
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372 
373   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
374   v->data        = (void*)s;
375   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
376   s->nghost      = nghost;
377   v->mapping     = 0;
378   v->bmapping    = 0;
379   v->petscnative = PETSC_TRUE;
380 
381   if (v->map->bs == -1) v->map->bs = 1;
382   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
383   s->array           = (PetscScalar *)array;
384   s->array_allocated = 0;
385   if (alloc && !array) {
386     PetscInt n         = v->map->n+nghost;
387     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
388     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
389     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
390     s->array_allocated = s->array;
391   }
392 
393   /* By default parallel vectors do not have local representation */
394   s->localrep    = 0;
395   s->localupdate = 0;
396 
397   v->stash.insertmode  = NOT_SET_VALUES;
398   /* create the stashes. The block-size for bstash is set later when
399      VecSetValuesBlocked is called.
400   */
401   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
402   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
403 
404 #if defined(PETSC_HAVE_MATLAB_ENGINE)
405   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
406   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
407 #endif
408   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 /*MC
413    VECMPI - VECMPI = "mpi" - The basic parallel vector
414 
415    Options Database Keys:
416 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
417 
418   Level: beginner
419 
420 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
421 M*/
422 
423 EXTERN_C_BEGIN
424 #undef __FUNCT__
425 #define __FUNCT__ "VecCreate_MPI"
426 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
427 {
428   PetscErrorCode ierr;
429 
430   PetscFunctionBegin;
431   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
432   PetscFunctionReturn(0);
433 }
434 EXTERN_C_END
435 
436 /*MC
437    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
438 
439    Options Database Keys:
440 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
441 
442   Level: beginner
443 
444 .seealso: VecCreateSeq(), VecCreateMPI()
445 M*/
446 
447 EXTERN_C_BEGIN
448 #undef __FUNCT__
449 #define __FUNCT__ "VecCreate_Standard"
450 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Standard(Vec v)
451 {
452   PetscErrorCode ierr;
453   PetscMPIInt    size;
454 
455   PetscFunctionBegin;
456   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
457   if (size == 1) {
458     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
459   } else {
460     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
461   }
462   PetscFunctionReturn(0);
463 }
464 EXTERN_C_END
465 
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "VecCreateMPIWithArray"
469 /*@C
470    VecCreateMPIWithArray - Creates a parallel, array-style vector,
471    where the user provides the array space to store the vector values.
472 
473    Collective on MPI_Comm
474 
475    Input Parameters:
476 +  comm  - the MPI communicator to use
477 .  n     - local vector length, cannot be PETSC_DECIDE
478 .  N     - global vector length (or PETSC_DECIDE to have calculated)
479 -  array - the user provided array to store the vector values
480 
481    Output Parameter:
482 .  vv - the vector
483 
484    Notes:
485    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
486    same type as an existing vector.
487 
488    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
489    at a later stage to SET the array for storing the vector values.
490 
491    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
492    The user should not free the array until the vector is destroyed.
493 
494    Level: intermediate
495 
496    Concepts: vectors^creating with array
497 
498 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
499           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
500 
501 @*/
502 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
503 {
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
508   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
509   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
510   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
511   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 #undef __FUNCT__
516 #define __FUNCT__ "VecCreateGhostWithArray"
517 /*@C
518    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
519    the caller allocates the array space.
520 
521    Collective on MPI_Comm
522 
523    Input Parameters:
524 +  comm - the MPI communicator to use
525 .  n - local vector length
526 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
527 .  nghost - number of local ghost points
528 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
529 -  array - the space to store the vector values (as long as n + nghost)
530 
531    Output Parameter:
532 .  vv - the global vector representation (without ghost points as part of vector)
533 
534    Notes:
535    Use VecGhostGetLocalForm() to access the local, ghosted representation
536    of the vector.
537 
538    This also automatically sets the ISLocalToGlobalMapping() for this vector.
539 
540    Level: advanced
541 
542    Concepts: vectors^creating with array
543    Concepts: vectors^ghosted
544 
545 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
546           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
547           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
548 
549 @*/
550 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
551 {
552   PetscErrorCode         ierr;
553   Vec_MPI                *w;
554   PetscScalar            *larray;
555   IS                     from,to;
556   ISLocalToGlobalMapping ltog;
557   PetscInt               rstart,i,*indices;
558 
559   PetscFunctionBegin;
560   *vv = 0;
561 
562   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
563   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
564   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
565   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
566   /* Create global representation */
567   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
568   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
569   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
570   w    = (Vec_MPI *)(*vv)->data;
571   /* Create local representation */
572   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
573   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
574   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
575   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
576 
577   /*
578        Create scatter context for scattering (updating) ghost values
579   */
580   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
581   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
582   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
583   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
584   ierr = ISDestroy(to);CHKERRQ(ierr);
585   ierr = ISDestroy(from);CHKERRQ(ierr);
586 
587   /* set local to global mapping for ghosted vector */
588   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
589   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
590   for (i=0; i<n; i++) {
591     indices[i] = rstart + i;
592   }
593   for (i=0; i<nghost; i++) {
594     indices[n+i] = ghosts[i];
595   }
596   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
597   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
598   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
599   PetscFunctionReturn(0);
600 }
601 
602 #undef __FUNCT__
603 #define __FUNCT__ "VecCreateGhost"
604 /*@
605    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
606 
607    Collective on MPI_Comm
608 
609    Input Parameters:
610 +  comm - the MPI communicator to use
611 .  n - local vector length
612 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
613 .  nghost - number of local ghost points
614 -  ghosts - global indices of ghost points
615 
616    Output Parameter:
617 .  vv - the global vector representation (without ghost points as part of vector)
618 
619    Notes:
620    Use VecGhostGetLocalForm() to access the local, ghosted representation
621    of the vector.
622 
623    This also automatically sets the ISLocalToGlobalMapping() for this vector.
624 
625    Level: advanced
626 
627    Concepts: vectors^ghosted
628 
629 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
630           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
631           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
632           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
633 
634 @*/
635 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
636 {
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
641   PetscFunctionReturn(0);
642 }
643 
644 
645 /* ------------------------------------------------------------------------------------------*/
646 #undef __FUNCT__
647 #define __FUNCT__ "VecCreateGhostBlockWithArray"
648 /*@C
649    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
650    the caller allocates the array space. Indices in the ghost region are based on blocks.
651 
652    Collective on MPI_Comm
653 
654    Input Parameters:
655 +  comm - the MPI communicator to use
656 .  bs - block size
657 .  n - local vector length
658 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
659 .  nghost - number of local ghost blocks
660 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
661 -  array - the space to store the vector values (as long as n + nghost*bs)
662 
663    Output Parameter:
664 .  vv - the global vector representation (without ghost points as part of vector)
665 
666    Notes:
667    Use VecGhostGetLocalForm() to access the local, ghosted representation
668    of the vector.
669 
670    n is the local vector size (total local size not the number of blocks) while nghost
671    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
672    portion is bs*nghost
673 
674    Level: advanced
675 
676    Concepts: vectors^creating ghosted
677    Concepts: vectors^creating with array
678 
679 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
680           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
681           VecCreateGhostWithArray(), VecCreateGhostBlocked()
682 
683 @*/
684 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
685 {
686   PetscErrorCode ierr;
687   Vec_MPI        *w;
688   PetscScalar    *larray;
689   IS             from,to;
690   ISLocalToGlobalMapping ltog;
691   PetscInt       rstart,i,nb,*indices;
692 
693   PetscFunctionBegin;
694   *vv = 0;
695 
696   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
697   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
698   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
699   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
700   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
701   /* Create global representation */
702   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
703   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
704   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
705   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
706   w    = (Vec_MPI *)(*vv)->data;
707   /* Create local representation */
708   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
709   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
710   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
711   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
712   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
713 
714   /*
715        Create scatter context for scattering (updating) ghost values
716   */
717   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
718   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
719   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
720   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
721   ierr = ISDestroy(to);CHKERRQ(ierr);
722   ierr = ISDestroy(from);CHKERRQ(ierr);
723 
724   /* set local to global mapping for ghosted vector */
725   nb = n/bs;
726   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
727   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
728   for (i=0; i<nb; i++) {
729     indices[i] = rstart + i*bs;
730   }
731   for (i=0; i<nghost; i++) {
732     indices[nb+i] = ghosts[i];
733   }
734   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
735   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
736   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "VecCreateGhostBlock"
742 /*@
743    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
744         The indicing of the ghost points is done with blocks.
745 
746    Collective on MPI_Comm
747 
748    Input Parameters:
749 +  comm - the MPI communicator to use
750 .  bs - the block size
751 .  n - local vector length
752 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
753 .  nghost - number of local ghost blocks
754 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index
755 
756    Output Parameter:
757 .  vv - the global vector representation (without ghost points as part of vector)
758 
759    Notes:
760    Use VecGhostGetLocalForm() to access the local, ghosted representation
761    of the vector.
762 
763    n is the local vector size (total local size not the number of blocks) while nghost
764    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
765    portion is bs*nghost
766 
767    Level: advanced
768 
769    Concepts: vectors^ghosted
770 
771 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
772           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
773           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
774 
775 @*/
776 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
777 {
778   PetscErrorCode ierr;
779 
780   PetscFunctionBegin;
781   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
782   PetscFunctionReturn(0);
783 }
784