xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 51dcc8499171394a80a6d1776d3567ce6d419e32)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecPointwiseMax_Seq"
9 static PetscErrorCode VecPointwiseMax_Seq(Vec win,Vec xin,Vec yin)
10 {
11   PetscErrorCode ierr;
12   PetscInt       n = win->map->n,i;
13   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
14 
15   PetscFunctionBegin;
16   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
17   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
18   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
19   for (i=0; i<n; i++) {
20     ww[i] = PetscMax(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
21   }
22   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
23   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
24   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
25   ierr = PetscLogFlops(n);CHKERRQ(ierr);
26   PetscFunctionReturn(0);
27 }
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "VecPointwiseMin_Seq"
31 static PetscErrorCode VecPointwiseMin_Seq(Vec win,Vec xin,Vec yin)
32 {
33   PetscErrorCode ierr;
34   PetscInt       n = win->map->n,i;
35   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
36 
37   PetscFunctionBegin;
38   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
39   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
40   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
41   for (i=0; i<n; i++) {
42     ww[i] = PetscMin(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
43   }
44   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
45   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
46   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
47   ierr = PetscLogFlops(n);CHKERRQ(ierr);
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "VecPointwiseMaxAbs_Seq"
53 static PetscErrorCode VecPointwiseMaxAbs_Seq(Vec win,Vec xin,Vec yin)
54 {
55   PetscErrorCode ierr;
56   PetscInt       n = win->map->n,i;
57   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
58 
59   PetscFunctionBegin;
60   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
61   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
62   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
63   for (i=0; i<n; i++) {
64     ww[i] = PetscMax(PetscAbsScalar(xx[i]),PetscAbsScalar(yy[i]));
65   }
66   ierr = PetscLogFlops(n);CHKERRQ(ierr);
67   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
68   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
69   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
70   PetscFunctionReturn(0);
71 }
72 
73 #include <../src/vec/vec/impls/seq/ftn-kernels/fxtimesy.h>
74 #undef __FUNCT__
75 #define __FUNCT__ "VecPointwiseMult_Seq"
76 static PetscErrorCode VecPointwiseMult_Seq(Vec win,Vec xin,Vec yin)
77 {
78   PetscErrorCode ierr;
79   PetscInt       n = win->map->n,i;
80   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
81 
82   PetscFunctionBegin;
83   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
84   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
85   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
86   if (ww == xx) {
87     for (i=0; i<n; i++) ww[i] *= yy[i];
88   } else if (ww == yy) {
89     for (i=0; i<n; i++) ww[i] *= xx[i];
90   } else {
91 #if defined(PETSC_USE_FORTRAN_KERNEL_XTIMESY)
92     fortranxtimesy_(xx,yy,ww,&n);
93 #else
94     for (i=0; i<n; i++) ww[i] = xx[i] * yy[i];
95 #endif
96   }
97   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
98   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
99   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
100   ierr = PetscLogFlops(n);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "VecPointwiseDivide_Seq"
106 static PetscErrorCode VecPointwiseDivide_Seq(Vec win,Vec xin,Vec yin)
107 {
108   PetscErrorCode ierr;
109   PetscInt       n = win->map->n,i;
110   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
111 
112   PetscFunctionBegin;
113   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
114   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
115   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
116   for (i=0; i<n; i++) {
117     ww[i] = xx[i] / yy[i];
118   }
119   ierr = PetscLogFlops(n);CHKERRQ(ierr);
120   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
121   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
122   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "VecDot_MPI"
128 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
129 {
130   PetscScalar    sum,work;
131   PetscErrorCode ierr;
132 
133   PetscFunctionBegin;
134   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
135   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
136   *z = sum;
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "VecTDot_MPI"
142 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
143 {
144   PetscScalar    sum,work;
145   PetscErrorCode ierr;
146 
147   PetscFunctionBegin;
148   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
149   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
150   *z   = sum;
151   PetscFunctionReturn(0);
152 }
153 
154 EXTERN_C_BEGIN
155 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
156 EXTERN_C_END
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "VecPlaceArray_MPI"
160 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
161 {
162   PetscErrorCode ierr;
163   Vec_MPI        *v = (Vec_MPI *)vin->data;
164 
165   PetscFunctionBegin;
166   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
167   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
168   v->array = (PetscScalar *)a;
169   if (v->localrep) {
170     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "VecDuplicate_MPI"
179 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
180 {
181   PetscErrorCode ierr;
182   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
183   PetscScalar    *array;
184 
185   PetscFunctionBegin;
186   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
187   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
188 
189   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
190   vw   = (Vec_MPI *)(*v)->data;
191   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
192 
193   /* save local representation of the parallel vector (and scatter) if it exists */
194   if (w->localrep) {
195     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
196     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
197     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
198     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
199     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
200     vw->localupdate = w->localupdate;
201     if (vw->localupdate) {
202       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
203     }
204   }
205 
206   /* New vector should inherit stashing property of parent */
207   (*v)->stash.donotstash = win->stash.donotstash;
208   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
209 
210   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
211   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
212   (*v)->map->bs    = win->map->bs;
213   (*v)->bstash.bs = win->bstash.bs;
214 
215   PetscFunctionReturn(0);
216 }
217 
218 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
219 extern PetscErrorCode VecResetArray_MPI(Vec);
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "VecSetRandom_Seq"
223 static PetscErrorCode VecSetRandom_Seq(Vec xin,PetscRandom r)
224 {
225   PetscErrorCode ierr;
226   PetscInt       n = xin->map->n,i;
227   PetscScalar    *xx;
228 
229   PetscFunctionBegin;
230   ierr = VecGetArray(xin,&xx);CHKERRQ(ierr);
231   for (i=0; i<n; i++) {ierr = PetscRandomGetValue(r,&xx[i]);CHKERRQ(ierr);}
232   ierr = VecRestoreArray(xin,&xx);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "VecGetSize_Seq"
239 static PetscErrorCode VecGetSize_Seq(Vec vin,PetscInt *size)
240 {
241   PetscFunctionBegin;
242   *size = vin->map->n;
243   PetscFunctionReturn(0);
244 }
245 
246 #undef __FUNCT__
247 #define __FUNCT__ "VecConjugate_Seq"
248 static PetscErrorCode VecConjugate_Seq(Vec xin)
249 {
250   PetscScalar    *x;
251   PetscInt       n = xin->map->n;
252   PetscErrorCode ierr;
253 
254   PetscFunctionBegin;
255   ierr = VecGetArray(xin,&x);CHKERRQ(ierr);
256   while (n-->0) {
257     *x = PetscConj(*x);
258     x++;
259   }
260   ierr = VecRestoreArray(xin,&x);CHKERRQ(ierr);
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "VecCopy_Seq"
266 static PetscErrorCode VecCopy_Seq(Vec xin,Vec yin)
267 {
268   PetscScalar       *ya;
269   const PetscScalar *xa;
270   PetscErrorCode    ierr;
271 
272   PetscFunctionBegin;
273   if (xin != yin) {
274     ierr = VecGetArrayRead(xin,&xa);CHKERRQ(ierr);
275     ierr = VecGetArray(yin,&ya);CHKERRQ(ierr);
276     ierr = PetscMemcpy(ya,xa,xin->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
277     ierr = VecRestoreArrayRead(xin,&xa);CHKERRQ(ierr);
278     ierr = VecRestoreArray(yin,&ya);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 #include <petscblaslapack.h>
284 #undef __FUNCT__
285 #define __FUNCT__ "VecSwap_Seq"
286 static PetscErrorCode VecSwap_Seq(Vec xin,Vec yin)
287 {
288   PetscScalar    *ya, *xa;
289   PetscErrorCode ierr;
290   PetscBLASInt   one = 1,bn = PetscBLASIntCast(xin->map->n);
291 
292   PetscFunctionBegin;
293   if (xin != yin) {
294     ierr = VecGetArray(xin,&xa);CHKERRQ(ierr);
295     ierr = VecGetArray(yin,&ya);CHKERRQ(ierr);
296     BLASswap_(&bn,xa,&one,ya,&one);
297     ierr = VecRestoreArray(xin,&xa);CHKERRQ(ierr);
298     ierr = VecRestoreArray(yin,&ya);CHKERRQ(ierr);
299   }
300   PetscFunctionReturn(0);
301 }
302 
303 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
304             VecDuplicateVecs_Default,
305             VecDestroyVecs_Default,
306             VecDot_MPI,
307             VecMDot_MPI,
308             VecNorm_MPI,
309             VecTDot_MPI,
310             VecMTDot_MPI,
311             VecScale_Seq,
312             VecCopy_Seq, /* 10 */
313             VecSet_Seq,
314             VecSwap_Seq,
315             VecAXPY_Seq,
316             VecAXPBY_Seq,
317             VecMAXPY_Seq,
318             VecAYPX_Seq,
319             VecWAXPY_Seq,
320             VecAXPBYPCZ_Seq,
321             VecPointwiseMult_Seq,
322             VecPointwiseDivide_Seq,
323             VecSetValues_MPI, /* 20 */
324             VecAssemblyBegin_MPI,
325             VecAssemblyEnd_MPI,
326             0,
327             VecGetSize_MPI,
328             VecGetSize_Seq,
329             0,
330             VecMax_MPI,
331             VecMin_MPI,
332             VecSetRandom_Seq,
333             VecSetOption_MPI,
334             VecSetValuesBlocked_MPI,
335             VecDestroy_MPI,
336             VecView_MPI,
337             VecPlaceArray_MPI,
338             VecReplaceArray_Seq,
339             VecDot_Seq,
340             VecTDot_Seq,
341             VecNorm_Seq,
342             VecMDot_Seq,
343             VecMTDot_Seq,
344 	    VecLoad_Default,
345             VecReciprocal_Default,
346             VecConjugate_Seq,
347             0,
348             0,
349             VecResetArray_MPI,
350             0,
351             VecMaxPointwiseDivide_Seq,
352             VecPointwiseMax_Seq,
353             VecPointwiseMaxAbs_Seq,
354             VecPointwiseMin_Seq,
355   	    VecGetValues_MPI,
356     	    0,
357     	    0,
358     	    0,
359     	    0,
360     	    0,
361     	    0,
362    	    VecStrideGather_Default,
363    	    VecStrideScatter_Default
364 };
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "VecCreate_MPI_Private"
368 /*
369     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
370     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
371     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
372 
373     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
374     no space is allocated.
375 */
376 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
377 {
378   Vec_MPI        *s;
379   PetscErrorCode ierr;
380 
381   PetscFunctionBegin;
382 
383   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
384   v->data        = (void*)s;
385   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
386   s->nghost      = nghost;
387   v->petscnative = PETSC_TRUE;
388 
389   if (v->map->bs == -1) v->map->bs = 1;
390   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
391   s->array           = (PetscScalar *)array;
392   s->array_allocated = 0;
393   if (alloc && !array) {
394     PetscInt n         = v->map->n+nghost;
395     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
396     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
397     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
398     s->array_allocated = s->array;
399   }
400 
401   /* By default parallel vectors do not have local representation */
402   s->localrep    = 0;
403   s->localupdate = 0;
404 
405   v->stash.insertmode  = NOT_SET_VALUES;
406   /* create the stashes. The block-size for bstash is set later when
407      VecSetValuesBlocked is called.
408   */
409   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
410   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
411 
412 #if defined(PETSC_HAVE_MATLAB_ENGINE)
413   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
414   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
415 #endif
416   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
417   PetscFunctionReturn(0);
418 }
419 
420 /*MC
421    VECMPI - VECMPI = "mpi" - The basic parallel vector
422 
423    Options Database Keys:
424 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
425 
426   Level: beginner
427 
428 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
429 M*/
430 
431 EXTERN_C_BEGIN
432 #undef __FUNCT__
433 #define __FUNCT__ "VecCreate_MPI"
434 PetscErrorCode  VecCreate_MPI(Vec vv)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
440   PetscFunctionReturn(0);
441 }
442 EXTERN_C_END
443 
444 /*MC
445    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
446 
447    Options Database Keys:
448 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
449 
450   Level: beginner
451 
452 .seealso: VecCreateSeq(), VecCreateMPI()
453 M*/
454 
455 EXTERN_C_BEGIN
456 #undef __FUNCT__
457 #define __FUNCT__ "VecCreate_Standard"
458 PetscErrorCode  VecCreate_Standard(Vec v)
459 {
460   PetscErrorCode ierr;
461   PetscMPIInt    size;
462 
463   PetscFunctionBegin;
464   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
465   if (size == 1) {
466     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
467   } else {
468     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
469   }
470   PetscFunctionReturn(0);
471 }
472 EXTERN_C_END
473 
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "VecCreateMPIWithArray"
477 /*@C
478    VecCreateMPIWithArray - Creates a parallel, array-style vector,
479    where the user provides the array space to store the vector values.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm  - the MPI communicator to use
485 .  n     - local vector length, cannot be PETSC_DECIDE
486 .  N     - global vector length (or PETSC_DECIDE to have calculated)
487 -  array - the user provided array to store the vector values
488 
489    Output Parameter:
490 .  vv - the vector
491 
492    Notes:
493    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
494    same type as an existing vector.
495 
496    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
497    at a later stage to SET the array for storing the vector values.
498 
499    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
500    The user should not free the array until the vector is destroyed.
501 
502    Level: intermediate
503 
504    Concepts: vectors^creating with array
505 
506 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
507           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
508 
509 @*/
510 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
511 {
512   PetscErrorCode ierr;
513 
514   PetscFunctionBegin;
515   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
516   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
517   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
518   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
519   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "VecCreateGhostWithArray"
525 /*@C
526    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
527    the caller allocates the array space.
528 
529    Collective on MPI_Comm
530 
531    Input Parameters:
532 +  comm - the MPI communicator to use
533 .  n - local vector length
534 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
535 .  nghost - number of local ghost points
536 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
537 -  array - the space to store the vector values (as long as n + nghost)
538 
539    Output Parameter:
540 .  vv - the global vector representation (without ghost points as part of vector)
541 
542    Notes:
543    Use VecGhostGetLocalForm() to access the local, ghosted representation
544    of the vector.
545 
546    This also automatically sets the ISLocalToGlobalMapping() for this vector.
547 
548    Level: advanced
549 
550    Concepts: vectors^creating with array
551    Concepts: vectors^ghosted
552 
553 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
554           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
555           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
556 
557 @*/
558 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
559 {
560   PetscErrorCode         ierr;
561   Vec_MPI                *w;
562   PetscScalar            *larray;
563   IS                     from,to;
564   ISLocalToGlobalMapping ltog;
565   PetscInt               rstart,i,*indices;
566 
567   PetscFunctionBegin;
568   *vv = 0;
569 
570   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
571   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
572   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
573   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
574   /* Create global representation */
575   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
576   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
577   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
578   w    = (Vec_MPI *)(*vv)->data;
579   /* Create local representation */
580   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
581   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
582   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
583   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
584 
585   /*
586        Create scatter context for scattering (updating) ghost values
587   */
588   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
589   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
590   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
591   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
592   ierr = ISDestroy(&to);CHKERRQ(ierr);
593   ierr = ISDestroy(&from);CHKERRQ(ierr);
594 
595   /* set local to global mapping for ghosted vector */
596   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
597   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
598   for (i=0; i<n; i++) {
599     indices[i] = rstart + i;
600   }
601   for (i=0; i<nghost; i++) {
602     indices[n+i] = ghosts[i];
603   }
604   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
605   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
606   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "VecCreateGhost"
612 /*@
613    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
614 
615    Collective on MPI_Comm
616 
617    Input Parameters:
618 +  comm - the MPI communicator to use
619 .  n - local vector length
620 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
621 .  nghost - number of local ghost points
622 -  ghosts - global indices of ghost points
623 
624    Output Parameter:
625 .  vv - the global vector representation (without ghost points as part of vector)
626 
627    Notes:
628    Use VecGhostGetLocalForm() to access the local, ghosted representation
629    of the vector.
630 
631    This also automatically sets the ISLocalToGlobalMapping() for this vector.
632 
633    Level: advanced
634 
635    Concepts: vectors^ghosted
636 
637 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
638           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
639           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
640           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
641 
642 @*/
643 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
644 {
645   PetscErrorCode ierr;
646 
647   PetscFunctionBegin;
648   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
649   PetscFunctionReturn(0);
650 }
651 
652 
653 /* ------------------------------------------------------------------------------------------*/
654 #undef __FUNCT__
655 #define __FUNCT__ "VecCreateGhostBlockWithArray"
656 /*@C
657    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
658    the caller allocates the array space. Indices in the ghost region are based on blocks.
659 
660    Collective on MPI_Comm
661 
662    Input Parameters:
663 +  comm - the MPI communicator to use
664 .  bs - block size
665 .  n - local vector length
666 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
667 .  nghost - number of local ghost blocks
668 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
669 -  array - the space to store the vector values (as long as n + nghost*bs)
670 
671    Output Parameter:
672 .  vv - the global vector representation (without ghost points as part of vector)
673 
674    Notes:
675    Use VecGhostGetLocalForm() to access the local, ghosted representation
676    of the vector.
677 
678    n is the local vector size (total local size not the number of blocks) while nghost
679    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
680    portion is bs*nghost
681 
682    Level: advanced
683 
684    Concepts: vectors^creating ghosted
685    Concepts: vectors^creating with array
686 
687 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
688           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
689           VecCreateGhostWithArray(), VecCreateGhostBlocked()
690 
691 @*/
692 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
693 {
694   PetscErrorCode ierr;
695   Vec_MPI        *w;
696   PetscScalar    *larray;
697   IS             from,to;
698   ISLocalToGlobalMapping ltog;
699   PetscInt       rstart,i,nb,*indices;
700 
701   PetscFunctionBegin;
702   *vv = 0;
703 
704   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
705   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
706   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
707   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
708   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
709   /* Create global representation */
710   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
711   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
712   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
713   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
714   w    = (Vec_MPI *)(*vv)->data;
715   /* Create local representation */
716   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
717   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
718   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
719   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
720   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
721 
722   /*
723        Create scatter context for scattering (updating) ghost values
724   */
725   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
726   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
727   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
728   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
729   ierr = ISDestroy(&to);CHKERRQ(ierr);
730   ierr = ISDestroy(&from);CHKERRQ(ierr);
731 
732   /* set local to global mapping for ghosted vector */
733   nb = n/bs;
734   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
735   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
736   for (i=0; i<nb; i++) {
737     indices[i] = rstart + i*bs;
738   }
739   for (i=0; i<nghost; i++) {
740     indices[nb+i] = ghosts[i];
741   }
742   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
743   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
744   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "VecCreateGhostBlock"
750 /*@
751    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
752         The indicing of the ghost points is done with blocks.
753 
754    Collective on MPI_Comm
755 
756    Input Parameters:
757 +  comm - the MPI communicator to use
758 .  bs - the block size
759 .  n - local vector length
760 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
761 .  nghost - number of local ghost blocks
762 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index
763 
764    Output Parameter:
765 .  vv - the global vector representation (without ghost points as part of vector)
766 
767    Notes:
768    Use VecGhostGetLocalForm() to access the local, ghosted representation
769    of the vector.
770 
771    n is the local vector size (total local size not the number of blocks) while nghost
772    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
773    portion is bs*nghost
774 
775    Level: advanced
776 
777    Concepts: vectors^ghosted
778 
779 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
780           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
781           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
782 
783 @*/
784 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
785 {
786   PetscErrorCode ierr;
787 
788   PetscFunctionBegin;
789   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792