xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision ba337c4413f444c9b07b767e9700a1bd83f660a1)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
97 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
98 
99 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
100             VecDuplicateVecs_Default,
101             VecDestroyVecs_Default,
102             VecDot_MPI,
103             VecMDot_MPI,
104             VecNorm_MPI,
105             VecTDot_MPI,
106             VecMTDot_MPI,
107             VecScale_Seq,
108             VecCopy_Seq, /* 10 */
109             VecSet_Seq,
110             VecSwap_Seq,
111             VecAXPY_Seq,
112             VecAXPBY_Seq,
113             VecMAXPY_Seq,
114             VecAYPX_Seq,
115             VecWAXPY_Seq,
116             VecAXPBYPCZ_Seq,
117             VecPointwiseMult_Seq,
118             VecPointwiseDivide_Seq,
119             VecSetValues_MPI, /* 20 */
120             VecAssemblyBegin_MPI,
121             VecAssemblyEnd_MPI,
122             VecGetArray_Seq,
123             VecGetSize_MPI,
124             VecGetSize_Seq,
125             VecRestoreArray_Seq,
126             VecMax_MPI,
127             VecMin_MPI,
128             VecSetRandom_Seq,
129             VecSetOption_MPI,
130             VecSetValuesBlocked_MPI,
131             VecDestroy_MPI,
132             VecView_MPI,
133             VecPlaceArray_MPI,
134             VecReplaceArray_Seq,
135             VecDot_Seq,
136             VecTDot_Seq,
137             VecNorm_Seq,
138             VecMDot_Seq,
139             VecMTDot_Seq,
140             VecLoadIntoVector_Default,
141             0, /* VecLoadIntoVectorNative */
142             VecReciprocal_Default,
143             0, /* VecViewNative... */
144             VecConjugate_Seq,
145             0,
146             0,
147             VecResetArray_MPI,
148             0,
149             VecMaxPointwiseDivide_Seq,
150             VecLoad_Binary,
151             VecPointwiseMax_Seq,
152             VecPointwiseMaxAbs_Seq,
153             VecPointwiseMin_Seq,
154             VecGetValues_MPI};
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "VecCreate_MPI_Private"
158 /*
159     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
160     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
161     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
162 
163     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
164     no space is allocated.
165 */
166 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
167 {
168   Vec_MPI        *s;
169   PetscErrorCode ierr;
170 
171   PetscFunctionBegin;
172 
173   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
174   v->data        = (void*)s;
175   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
176   s->nghost      = nghost;
177   v->mapping     = 0;
178   v->bmapping    = 0;
179   v->petscnative = PETSC_TRUE;
180 
181   if (v->map->bs == -1) v->map->bs = 1;
182   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
183   s->array           = (PetscScalar *)array;
184   s->array_allocated = 0;
185   if (alloc && !array) {
186     PetscInt n         = v->map->n+nghost;
187     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
188     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
189     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
190     s->array_allocated = s->array;
191   }
192 
193   /* By default parallel vectors do not have local representation */
194   s->localrep    = 0;
195   s->localupdate = 0;
196 
197   v->stash.insertmode  = NOT_SET_VALUES;
198   /* create the stashes. The block-size for bstash is set later when
199      VecSetValuesBlocked is called.
200   */
201   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
202   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
203 
204 #if defined(PETSC_HAVE_MATLAB_ENGINE)
205   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
206   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
207 #endif
208   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
209   ierr = PetscPublishAll(v);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 /*MC
214    VECMPI - VECMPI = "mpi" - The basic parallel vector
215 
216    Options Database Keys:
217 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
218 
219   Level: beginner
220 
221 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
222 M*/
223 
224 EXTERN_C_BEGIN
225 #undef __FUNCT__
226 #define __FUNCT__ "VecCreate_MPI"
227 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
228 {
229   PetscErrorCode ierr;
230 
231   PetscFunctionBegin;
232   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 EXTERN_C_END
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "VecCreateMPIWithArray"
239 /*@C
240    VecCreateMPIWithArray - Creates a parallel, array-style vector,
241    where the user provides the array space to store the vector values.
242 
243    Collective on MPI_Comm
244 
245    Input Parameters:
246 +  comm  - the MPI communicator to use
247 .  n     - local vector length, cannot be PETSC_DECIDE
248 .  N     - global vector length (or PETSC_DECIDE to have calculated)
249 -  array - the user provided array to store the vector values
250 
251    Output Parameter:
252 .  vv - the vector
253 
254    Notes:
255    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
256    same type as an existing vector.
257 
258    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
259    at a later stage to SET the array for storing the vector values.
260 
261    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
262    The user should not free the array until the vector is destroyed.
263 
264    Level: intermediate
265 
266    Concepts: vectors^creating with array
267 
268 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
269           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
270 
271 @*/
272 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
273 {
274   PetscErrorCode ierr;
275 
276   PetscFunctionBegin;
277   if (n == PETSC_DECIDE) {
278     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
279   }
280   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
281   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
282   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
283   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "VecGhostStateSync_Private"
289 /*
290   This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure
291   that the state is updated if either vector has changed since the last time
292   one of these functions was called.  It could apply to any PetscObject, but
293   VecGhost is quite different from other objects in that two separate vectors
294   look at the same memory.
295 
296   In principle, we could only propagate state to the local vector on
297   GetLocalForm and to the global vector on RestoreLocalForm, but this version is
298   more conservative (i.e. robust against misuse) and simpler.
299 
300   Note that this function is correct and changes nothing if both arguments are the
301   same, which is the case in serial.
302 */
303 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l)
304 {
305   PetscErrorCode ierr;
306   PetscInt       gstate,lstate;
307 
308   PetscFunctionBegin;
309   ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr);
310   ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr);
311   ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr);
312   ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr);
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "VecGhostGetLocalForm"
318 /*@
319     VecGhostGetLocalForm - Obtains the local ghosted representation of
320     a parallel vector created with VecCreateGhost().
321 
322     Not Collective
323 
324     Input Parameter:
325 .   g - the global vector. Vector must be have been obtained with either
326         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
327 
328     Output Parameter:
329 .   l - the local (ghosted) representation
330 
331     Notes:
332     This routine does not actually update the ghost values, but rather it
333     returns a sequential vector that includes the locations for the ghost
334     values and their current values. The returned vector and the original
335     vector passed in share the same array that contains the actual vector data.
336 
337     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
338     finished using the object.
339 
340     Level: advanced
341 
342    Concepts: vectors^ghost point access
343 
344 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
345 
346 @*/
347 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
348 {
349   PetscErrorCode ierr;
350   PetscTruth     isseq,ismpi;
351 
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
354   PetscValidPointer(l,2);
355 
356   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
357   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
358   if (ismpi) {
359     Vec_MPI *v  = (Vec_MPI*)g->data;
360     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
361     *l = v->localrep;
362   } else if (isseq) {
363     *l = g;
364   } else {
365     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
366   }
367   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
368   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "VecGhostRestoreLocalForm"
374 /*@
375     VecGhostRestoreLocalForm - Restores the local ghosted representation of
376     a parallel vector obtained with VecGhostGetLocalForm().
377 
378     Not Collective
379 
380     Input Parameter:
381 +   g - the global vector
382 -   l - the local (ghosted) representation
383 
384     Notes:
385     This routine does not actually update the ghost values, but rather it
386     returns a sequential vector that includes the locations for the ghost values
387     and their current values.
388 
389     Level: advanced
390 
391 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
392 @*/
393 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
394 {
395   PetscErrorCode ierr;
396 
397   PetscFunctionBegin;
398   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
399   ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "VecGhostUpdateBegin"
405 /*@
406    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
407    local representation to global or global representation to local.
408 
409    Collective on Vec
410 
411    Input Parameters:
412 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
413 .  insertmode - one of ADD_VALUES or INSERT_VALUES
414 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
415 
416    Notes:
417    Use the following to update the ghost regions with correct values from the owning process
418 .vb
419        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
420        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
421 .ve
422 
423    Use the following to accumulate the ghost region values onto the owning processors
424 .vb
425        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
426        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
427 .ve
428 
429    To accumulate the ghost region values onto the owning processors and then update
430    the ghost regions correctly, call the later followed by the former, i.e.,
431 .vb
432        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
433        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
434        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
435        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
436 .ve
437 
438    Level: advanced
439 
440 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
441           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
442 
443 @*/
444 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
445 {
446   Vec_MPI        *v;
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
451 
452   v  = (Vec_MPI*)g->data;
453   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
454   if (!v->localupdate) PetscFunctionReturn(0);
455 
456   if (scattermode == SCATTER_REVERSE) {
457     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
458   } else {
459     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
460   }
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "VecGhostUpdateEnd"
466 /*@
467    VecGhostUpdateEnd - End the vector scatter to update the vector from
468    local representation to global or global representation to local.
469 
470    Collective on Vec
471 
472    Input Parameters:
473 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
474 .  insertmode - one of ADD_VALUES or INSERT_VALUES
475 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
476 
477    Notes:
478 
479    Use the following to update the ghost regions with correct values from the owning process
480 .vb
481        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
482        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
483 .ve
484 
485    Use the following to accumulate the ghost region values onto the owning processors
486 .vb
487        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
488        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
489 .ve
490 
491    To accumulate the ghost region values onto the owning processors and then update
492    the ghost regions correctly, call the later followed by the former, i.e.,
493 .vb
494        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
495        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
496        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
497        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
498 .ve
499 
500    Level: advanced
501 
502 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
503           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
504 
505 @*/
506 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
507 {
508   Vec_MPI        *v;
509   PetscErrorCode ierr;
510 
511   PetscFunctionBegin;
512   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
513 
514   v  = (Vec_MPI*)g->data;
515   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
516   if (!v->localupdate) PetscFunctionReturn(0);
517 
518   if (scattermode == SCATTER_REVERSE) {
519     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
520   } else {
521     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
522   }
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "VecCreateGhostWithArray"
528 /*@C
529    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
530    the caller allocates the array space.
531 
532    Collective on MPI_Comm
533 
534    Input Parameters:
535 +  comm - the MPI communicator to use
536 .  n - local vector length
537 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
538 .  nghost - number of local ghost points
539 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
540 -  array - the space to store the vector values (as long as n + nghost)
541 
542    Output Parameter:
543 .  vv - the global vector representation (without ghost points as part of vector)
544 
545    Notes:
546    Use VecGhostGetLocalForm() to access the local, ghosted representation
547    of the vector.
548 
549    This also automatically sets the ISLocalToGlobalMapping() for this vector.
550 
551    Level: advanced
552 
553    Concepts: vectors^creating with array
554    Concepts: vectors^ghosted
555 
556 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
557           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
558           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
559 
560 @*/
561 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
562 {
563   PetscErrorCode         ierr;
564   Vec_MPI                *w;
565   PetscScalar            *larray;
566   IS                     from,to;
567   ISLocalToGlobalMapping ltog;
568   PetscInt               rstart,i,*indices;
569 
570   PetscFunctionBegin;
571   *vv = 0;
572 
573   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
574   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
575   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
576   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
577   /* Create global representation */
578   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
579   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
580   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
581   w    = (Vec_MPI *)(*vv)->data;
582   /* Create local representation */
583   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
584   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
585   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
586   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
587 
588   /*
589        Create scatter context for scattering (updating) ghost values
590   */
591   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
592   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
593   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
594   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
595   ierr = ISDestroy(to);CHKERRQ(ierr);
596   ierr = ISDestroy(from);CHKERRQ(ierr);
597 
598   /* set local to global mapping for ghosted vector */
599   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
600   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
601   for (i=0; i<n; i++) {
602     indices[i] = rstart + i;
603   }
604   for (i=0; i<nghost; i++) {
605     indices[n+i] = ghosts[i];
606   }
607   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
608   ierr = PetscFree(indices);CHKERRQ(ierr);
609   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
610   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
611   ierr = PetscFree(indices);CHKERRQ(ierr);
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "VecCreateGhost"
617 /*@
618    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
619 
620    Collective on MPI_Comm
621 
622    Input Parameters:
623 +  comm - the MPI communicator to use
624 .  n - local vector length
625 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
626 .  nghost - number of local ghost points
627 -  ghosts - global indices of ghost points
628 
629    Output Parameter:
630 .  vv - the global vector representation (without ghost points as part of vector)
631 
632    Notes:
633    Use VecGhostGetLocalForm() to access the local, ghosted representation
634    of the vector.
635 
636    This also automatically sets the ISLocalToGlobalMapping() for this vector.
637 
638    Level: advanced
639 
640    Concepts: vectors^ghosted
641 
642 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
643           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
644           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
645           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
646 
647 @*/
648 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
649 {
650   PetscErrorCode ierr;
651 
652   PetscFunctionBegin;
653   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
654   PetscFunctionReturn(0);
655 }
656 
657 #undef __FUNCT__
658 #define __FUNCT__ "VecDuplicate_MPI"
659 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
660 {
661   PetscErrorCode ierr;
662   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
663   PetscScalar    *array;
664 
665   PetscFunctionBegin;
666   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
667 
668   /* use the map that exists aleady in win */
669   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
670   (*v)->map = win->map;
671   win->map->refcnt++;
672 
673   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
674   vw   = (Vec_MPI *)(*v)->data;
675   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
676 
677   /* save local representation of the parallel vector (and scatter) if it exists */
678   if (w->localrep) {
679     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
680     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
681     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
682     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
683     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
684     vw->localupdate = w->localupdate;
685     if (vw->localupdate) {
686       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
687     }
688   }
689 
690   /* New vector should inherit stashing property of parent */
691   (*v)->stash.donotstash = win->stash.donotstash;
692   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
693 
694   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
695   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
696   if (win->mapping) {
697     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
698     (*v)->mapping = win->mapping;
699   }
700   if (win->bmapping) {
701     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
702     (*v)->bmapping = win->bmapping;
703   }
704   (*v)->map->bs    = win->map->bs;
705   (*v)->bstash.bs = win->bstash.bs;
706 
707   PetscFunctionReturn(0);
708 }
709 
710 /* ------------------------------------------------------------------------------------------*/
711 #undef __FUNCT__
712 #define __FUNCT__ "VecCreateGhostBlockWithArray"
713 /*@C
714    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
715    the caller allocates the array space. Indices in the ghost region are based on blocks.
716 
717    Collective on MPI_Comm
718 
719    Input Parameters:
720 +  comm - the MPI communicator to use
721 .  bs - block size
722 .  n - local vector length
723 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
724 .  nghost - number of local ghost blocks
725 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
726 -  array - the space to store the vector values (as long as n + nghost*bs)
727 
728    Output Parameter:
729 .  vv - the global vector representation (without ghost points as part of vector)
730 
731    Notes:
732    Use VecGhostGetLocalForm() to access the local, ghosted representation
733    of the vector.
734 
735    n is the local vector size (total local size not the number of blocks) while nghost
736    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
737    portion is bs*nghost
738 
739    Level: advanced
740 
741    Concepts: vectors^creating ghosted
742    Concepts: vectors^creating with array
743 
744 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
745           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
746           VecCreateGhostWithArray(), VecCreateGhostBlocked()
747 
748 @*/
749 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
750 {
751   PetscErrorCode ierr;
752   Vec_MPI        *w;
753   PetscScalar    *larray;
754   IS             from,to;
755   ISLocalToGlobalMapping ltog;
756   PetscInt       rstart,i,nb,*indices;
757 
758   PetscFunctionBegin;
759   *vv = 0;
760 
761   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
762   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
763   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
764   if (n % bs)                 SETERRQ(PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
765   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
766   /* Create global representation */
767   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
768   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
769   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
770   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
771   w    = (Vec_MPI *)(*vv)->data;
772   /* Create local representation */
773   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
774   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
775   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
776   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
777   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
778 
779   /*
780        Create scatter context for scattering (updating) ghost values
781   */
782   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
783   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
784   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
785   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
786   ierr = ISDestroy(to);CHKERRQ(ierr);
787   ierr = ISDestroy(from);CHKERRQ(ierr);
788 
789   /* set local to global mapping for ghosted vector */
790   nb = n/bs;
791   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
792   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
793   for (i=0; i<nb; i++) {
794     indices[i] = rstart + i*bs;
795   }
796   for (i=0; i<nghost; i++) {
797     indices[nb+i] = ghosts[i];
798   }
799   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,&ltog);CHKERRQ(ierr);
800   ierr = PetscFree(indices);CHKERRQ(ierr);
801   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
802   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
803   ierr = PetscFree(indices);CHKERRQ(ierr);
804 
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "VecCreateGhostBlock"
810 /*@
811    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
812         The indicing of the ghost points is done with blocks.
813 
814    Collective on MPI_Comm
815 
816    Input Parameters:
817 +  comm - the MPI communicator to use
818 .  bs - the block size
819 .  n - local vector length
820 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
821 .  nghost - number of local ghost blocks
822 -  ghosts - global indices of ghost blocks
823 
824    Output Parameter:
825 .  vv - the global vector representation (without ghost points as part of vector)
826 
827    Notes:
828    Use VecGhostGetLocalForm() to access the local, ghosted representation
829    of the vector.
830 
831    n is the local vector size (total local size not the number of blocks) while nghost
832    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
833    portion is bs*nghost
834 
835    Level: advanced
836 
837    Concepts: vectors^ghosted
838 
839 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
840           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
841           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
842 
843 @*/
844 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
845 {
846   PetscErrorCode ierr;
847 
848   PetscFunctionBegin;
849   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 /*
854     These introduce a ghosted vector where the ghosting is determined by the call to
855   VecSetLocalToGlobalMapping()
856 */
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
860 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
861 {
862   PetscErrorCode ierr;
863   Vec_MPI        *v = (Vec_MPI *)vv->data;
864 
865   PetscFunctionBegin;
866   v->nghost = map->n - vv->map->n;
867 
868   /* we need to make longer the array space that was allocated when the vector was created */
869   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
870   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
871   v->array = v->array_allocated;
872 
873   /* Create local representation */
874   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
875   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 
879 
880 #undef __FUNCT__
881 #define __FUNCT__ "VecSetValuesLocal_FETI"
882 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
883 {
884   PetscErrorCode ierr;
885   Vec_MPI        *v = (Vec_MPI *)vv->data;
886 
887   PetscFunctionBegin;
888   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 EXTERN_C_BEGIN
893 #undef __FUNCT__
894 #define __FUNCT__ "VecCreate_FETI"
895 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
896 {
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
901 
902   /* overwrite the functions to handle setting values locally */
903   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
904   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
905   vv->ops->assemblybegin           = 0;
906   vv->ops->assemblyend             = 0;
907   vv->ops->setvaluesblocked        = 0;
908   vv->ops->setvaluesblocked        = 0;
909 
910   PetscFunctionReturn(0);
911 }
912 EXTERN_C_END
913 
914 
915 
916 
917 
918 
919 
920 
921 
922