xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision c457296d466dd3de681344323ebedaf97eb226ba)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
97 
98 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
99             VecDuplicateVecs_Default,
100             VecDestroyVecs_Default,
101             VecDot_MPI,
102             VecMDot_MPI,
103             VecNorm_MPI,
104             VecTDot_MPI,
105             VecMTDot_MPI,
106             VecScale_Seq,
107             VecCopy_Seq, /* 10 */
108             VecSet_Seq,
109             VecSwap_Seq,
110             VecAXPY_Seq,
111             VecAXPBY_Seq,
112             VecMAXPY_Seq,
113             VecAYPX_Seq,
114             VecWAXPY_Seq,
115             VecAXPBYPCZ_Seq,
116             VecPointwiseMult_Seq,
117             VecPointwiseDivide_Seq,
118             VecSetValues_MPI, /* 20 */
119             VecAssemblyBegin_MPI,
120             VecAssemblyEnd_MPI,
121             VecGetArray_Seq,
122             VecGetSize_MPI,
123             VecGetSize_Seq,
124             VecRestoreArray_Seq,
125             VecMax_MPI,
126             VecMin_MPI,
127             VecSetRandom_Seq,
128             VecSetOption_MPI,
129             VecSetValuesBlocked_MPI,
130             VecDestroy_MPI,
131             VecView_MPI,
132             VecPlaceArray_MPI,
133             VecReplaceArray_Seq,
134             VecDot_Seq,
135             VecTDot_Seq,
136             VecNorm_Seq,
137             VecMDot_Seq,
138             VecMTDot_Seq,
139 	    VecLoad_Default,
140             VecReciprocal_Default,
141             VecConjugate_Seq,
142             0,
143             0,
144             VecResetArray_MPI,
145             0,
146             VecMaxPointwiseDivide_Seq,
147             VecPointwiseMax_Seq,
148             VecPointwiseMaxAbs_Seq,
149             VecPointwiseMin_Seq,
150   	    VecGetValues_MPI,
151     	    0,
152     	    0,
153     	    0,
154     	    0,
155     	    0,
156     	    0,
157    	    VecStrideGather_Default,
158    	    VecStrideScatter_Default
159 };
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "VecCreate_MPI_Private"
163 /*
164     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
165     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
166     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
167 
168     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
169     no space is allocated.
170 */
171 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
172 {
173   Vec_MPI        *s;
174   PetscErrorCode ierr;
175 
176   PetscFunctionBegin;
177 
178   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
179   v->data        = (void*)s;
180   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
181   s->nghost      = nghost;
182   v->mapping     = 0;
183   v->bmapping    = 0;
184   v->petscnative = PETSC_TRUE;
185 
186   if (v->map->bs == -1) v->map->bs = 1;
187   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
188   s->array           = (PetscScalar *)array;
189   s->array_allocated = 0;
190   if (alloc && !array) {
191     PetscInt n         = v->map->n+nghost;
192     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
193     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
194     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
195     s->array_allocated = s->array;
196   }
197 
198   /* By default parallel vectors do not have local representation */
199   s->localrep    = 0;
200   s->localupdate = 0;
201 
202   v->stash.insertmode  = NOT_SET_VALUES;
203   /* create the stashes. The block-size for bstash is set later when
204      VecSetValuesBlocked is called.
205   */
206   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
207   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
208 
209 #if defined(PETSC_HAVE_MATLAB_ENGINE)
210   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
211   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
212 #endif
213   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 /*MC
218    VECMPI - VECMPI = "mpi" - The basic parallel vector
219 
220    Options Database Keys:
221 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
222 
223   Level: beginner
224 
225 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
226 M*/
227 
228 EXTERN_C_BEGIN
229 #undef __FUNCT__
230 #define __FUNCT__ "VecCreate_MPI"
231 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
232 {
233   PetscErrorCode ierr;
234 
235   PetscFunctionBegin;
236   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 EXTERN_C_END
240 
241 /*MC
242    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
243 
244    Options Database Keys:
245 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions
246 
247   Level: beginner
248 
249 .seealso: VecCreateSeq(), VecCreateMPI()
250 M*/
251 
252 EXTERN_C_BEGIN
253 #undef __FUNCT__
254 #define __FUNCT__ "VecCreate_Standard"
255 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Standard(Vec v)
256 {
257   PetscErrorCode ierr;
258   PetscMPIInt    size;
259 
260   PetscFunctionBegin;
261   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
262   if (size == 1) {
263     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
264   } else {
265     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
266   }
267   PetscFunctionReturn(0);
268 }
269 EXTERN_C_END
270 
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "VecCreateMPIWithArray"
274 /*@C
275    VecCreateMPIWithArray - Creates a parallel, array-style vector,
276    where the user provides the array space to store the vector values.
277 
278    Collective on MPI_Comm
279 
280    Input Parameters:
281 +  comm  - the MPI communicator to use
282 .  n     - local vector length, cannot be PETSC_DECIDE
283 .  N     - global vector length (or PETSC_DECIDE to have calculated)
284 -  array - the user provided array to store the vector values
285 
286    Output Parameter:
287 .  vv - the vector
288 
289    Notes:
290    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
291    same type as an existing vector.
292 
293    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
294    at a later stage to SET the array for storing the vector values.
295 
296    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
297    The user should not free the array until the vector is destroyed.
298 
299    Level: intermediate
300 
301    Concepts: vectors^creating with array
302 
303 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
304           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
305 
306 @*/
307 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
308 {
309   PetscErrorCode ierr;
310 
311   PetscFunctionBegin;
312   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
313   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
314   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
315   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
316   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "VecGhostStateSync_Private"
322 /*
323   This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure
324   that the state is updated if either vector has changed since the last time
325   one of these functions was called.  It could apply to any PetscObject, but
326   VecGhost is quite different from other objects in that two separate vectors
327   look at the same memory.
328 
329   In principle, we could only propagate state to the local vector on
330   GetLocalForm and to the global vector on RestoreLocalForm, but this version is
331   more conservative (i.e. robust against misuse) and simpler.
332 
333   Note that this function is correct and changes nothing if both arguments are the
334   same, which is the case in serial.
335 */
336 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l)
337 {
338   PetscErrorCode ierr;
339   PetscInt       gstate,lstate;
340 
341   PetscFunctionBegin;
342   ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr);
343   ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr);
344   ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr);
345   ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "VecGhostGetLocalForm"
351 /*@
352     VecGhostGetLocalForm - Obtains the local ghosted representation of
353     a parallel vector created with VecCreateGhost().
354 
355     Not Collective
356 
357     Input Parameter:
358 .   g - the global vector. Vector must be have been obtained with either
359         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
360 
361     Output Parameter:
362 .   l - the local (ghosted) representation
363 
364     Notes:
365     This routine does not actually update the ghost values, but rather it
366     returns a sequential vector that includes the locations for the ghost
367     values and their current values. The returned vector and the original
368     vector passed in share the same array that contains the actual vector data.
369 
370     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
371     finished using the object.
372 
373     Level: advanced
374 
375    Concepts: vectors^ghost point access
376 
377 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
378 
379 @*/
380 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
381 {
382   PetscErrorCode ierr;
383   PetscTruth     isseq,ismpi;
384 
385   PetscFunctionBegin;
386   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
387   PetscValidPointer(l,2);
388 
389   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
390   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
391   if (ismpi) {
392     Vec_MPI *v  = (Vec_MPI*)g->data;
393     if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
394     *l = v->localrep;
395   } else if (isseq) {
396     *l = g;
397   } else {
398     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
399   }
400   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
401   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "VecGhostRestoreLocalForm"
407 /*@
408     VecGhostRestoreLocalForm - Restores the local ghosted representation of
409     a parallel vector obtained with VecGhostGetLocalForm().
410 
411     Not Collective
412 
413     Input Parameter:
414 +   g - the global vector
415 -   l - the local (ghosted) representation
416 
417     Notes:
418     This routine does not actually update the ghost values, but rather it
419     returns a sequential vector that includes the locations for the ghost values
420     and their current values.
421 
422     Level: advanced
423 
424 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
425 @*/
426 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
427 {
428   PetscErrorCode ierr;
429 
430   PetscFunctionBegin;
431   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
432   ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "VecGhostUpdateBegin"
438 /*@
439    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
440    local representation to global or global representation to local.
441 
442    Neighbor-wise Collective on Vec
443 
444    Input Parameters:
445 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
446 .  insertmode - one of ADD_VALUES or INSERT_VALUES
447 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
448 
449    Notes:
450    Use the following to update the ghost regions with correct values from the owning process
451 .vb
452        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
453        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
454 .ve
455 
456    Use the following to accumulate the ghost region values onto the owning processors
457 .vb
458        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
459        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
460 .ve
461 
462    To accumulate the ghost region values onto the owning processors and then update
463    the ghost regions correctly, call the later followed by the former, i.e.,
464 .vb
465        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
466        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
467        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
468        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
469 .ve
470 
471    Level: advanced
472 
473 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
474           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
475 
476 @*/
477 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
478 {
479   Vec_MPI        *v;
480   PetscErrorCode ierr;
481 
482   PetscFunctionBegin;
483   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
484 
485   v  = (Vec_MPI*)g->data;
486   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
487   if (!v->localupdate) PetscFunctionReturn(0);
488 
489   if (scattermode == SCATTER_REVERSE) {
490     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
491   } else {
492     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
493   }
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "VecGhostUpdateEnd"
499 /*@
500    VecGhostUpdateEnd - End the vector scatter to update the vector from
501    local representation to global or global representation to local.
502 
503    Neighbor-wise Collective on Vec
504 
505    Input Parameters:
506 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
507 .  insertmode - one of ADD_VALUES or INSERT_VALUES
508 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
509 
510    Notes:
511 
512    Use the following to update the ghost regions with correct values from the owning process
513 .vb
514        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
515        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
516 .ve
517 
518    Use the following to accumulate the ghost region values onto the owning processors
519 .vb
520        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
521        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
522 .ve
523 
524    To accumulate the ghost region values onto the owning processors and then update
525    the ghost regions correctly, call the later followed by the former, i.e.,
526 .vb
527        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
528        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
529        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
530        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
531 .ve
532 
533    Level: advanced
534 
535 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
536           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
537 
538 @*/
539 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
540 {
541   Vec_MPI        *v;
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
546 
547   v  = (Vec_MPI*)g->data;
548   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
549   if (!v->localupdate) PetscFunctionReturn(0);
550 
551   if (scattermode == SCATTER_REVERSE) {
552     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
553   } else {
554     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
555   }
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "VecCreateGhostWithArray"
561 /*@C
562    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
563    the caller allocates the array space.
564 
565    Collective on MPI_Comm
566 
567    Input Parameters:
568 +  comm - the MPI communicator to use
569 .  n - local vector length
570 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
571 .  nghost - number of local ghost points
572 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
573 -  array - the space to store the vector values (as long as n + nghost)
574 
575    Output Parameter:
576 .  vv - the global vector representation (without ghost points as part of vector)
577 
578    Notes:
579    Use VecGhostGetLocalForm() to access the local, ghosted representation
580    of the vector.
581 
582    This also automatically sets the ISLocalToGlobalMapping() for this vector.
583 
584    Level: advanced
585 
586    Concepts: vectors^creating with array
587    Concepts: vectors^ghosted
588 
589 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
590           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
591           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
592 
593 @*/
594 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
595 {
596   PetscErrorCode         ierr;
597   Vec_MPI                *w;
598   PetscScalar            *larray;
599   IS                     from,to;
600   ISLocalToGlobalMapping ltog;
601   PetscInt               rstart,i,*indices;
602 
603   PetscFunctionBegin;
604   *vv = 0;
605 
606   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
607   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
608   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
609   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
610   /* Create global representation */
611   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
612   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
613   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
614   w    = (Vec_MPI *)(*vv)->data;
615   /* Create local representation */
616   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
617   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
618   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
619   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
620 
621   /*
622        Create scatter context for scattering (updating) ghost values
623   */
624   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
625   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
626   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
627   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
628   ierr = ISDestroy(to);CHKERRQ(ierr);
629   ierr = ISDestroy(from);CHKERRQ(ierr);
630 
631   /* set local to global mapping for ghosted vector */
632   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
633   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
634   for (i=0; i<n; i++) {
635     indices[i] = rstart + i;
636   }
637   for (i=0; i<nghost; i++) {
638     indices[n+i] = ghosts[i];
639   }
640   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
641   ierr = PetscFree(indices);CHKERRQ(ierr);
642   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
643   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
644   ierr = PetscFree(indices);CHKERRQ(ierr);
645   PetscFunctionReturn(0);
646 }
647 
648 #undef __FUNCT__
649 #define __FUNCT__ "VecCreateGhost"
650 /*@
651    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
652 
653    Collective on MPI_Comm
654 
655    Input Parameters:
656 +  comm - the MPI communicator to use
657 .  n - local vector length
658 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
659 .  nghost - number of local ghost points
660 -  ghosts - global indices of ghost points
661 
662    Output Parameter:
663 .  vv - the global vector representation (without ghost points as part of vector)
664 
665    Notes:
666    Use VecGhostGetLocalForm() to access the local, ghosted representation
667    of the vector.
668 
669    This also automatically sets the ISLocalToGlobalMapping() for this vector.
670 
671    Level: advanced
672 
673    Concepts: vectors^ghosted
674 
675 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
676           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
677           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
678           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
679 
680 @*/
681 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
682 {
683   PetscErrorCode ierr;
684 
685   PetscFunctionBegin;
686   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
687   PetscFunctionReturn(0);
688 }
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "VecDuplicate_MPI"
692 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
693 {
694   PetscErrorCode ierr;
695   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
696   PetscScalar    *array;
697 
698   PetscFunctionBegin;
699   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
700 
701   /* use the map that exists aleady in win */
702   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
703   (*v)->map = win->map;
704   win->map->refcnt++;
705 
706   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
707   vw   = (Vec_MPI *)(*v)->data;
708   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
709 
710   /* save local representation of the parallel vector (and scatter) if it exists */
711   if (w->localrep) {
712     ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr);
713     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
714     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
715     ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr);
716     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
717     vw->localupdate = w->localupdate;
718     if (vw->localupdate) {
719       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
720     }
721   }
722 
723   /* New vector should inherit stashing property of parent */
724   (*v)->stash.donotstash = win->stash.donotstash;
725   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
726 
727   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
728   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
729   if (win->mapping) {
730     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
731     (*v)->mapping = win->mapping;
732   }
733   if (win->bmapping) {
734     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
735     (*v)->bmapping = win->bmapping;
736   }
737   (*v)->map->bs    = win->map->bs;
738   (*v)->bstash.bs = win->bstash.bs;
739 
740   PetscFunctionReturn(0);
741 }
742 
743 /* ------------------------------------------------------------------------------------------*/
744 #undef __FUNCT__
745 #define __FUNCT__ "VecCreateGhostBlockWithArray"
746 /*@C
747    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
748    the caller allocates the array space. Indices in the ghost region are based on blocks.
749 
750    Collective on MPI_Comm
751 
752    Input Parameters:
753 +  comm - the MPI communicator to use
754 .  bs - block size
755 .  n - local vector length
756 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
757 .  nghost - number of local ghost blocks
758 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
759 -  array - the space to store the vector values (as long as n + nghost*bs)
760 
761    Output Parameter:
762 .  vv - the global vector representation (without ghost points as part of vector)
763 
764    Notes:
765    Use VecGhostGetLocalForm() to access the local, ghosted representation
766    of the vector.
767 
768    n is the local vector size (total local size not the number of blocks) while nghost
769    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
770    portion is bs*nghost
771 
772    Level: advanced
773 
774    Concepts: vectors^creating ghosted
775    Concepts: vectors^creating with array
776 
777 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
778           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
779           VecCreateGhostWithArray(), VecCreateGhostBlocked()
780 
781 @*/
782 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
783 {
784   PetscErrorCode ierr;
785   Vec_MPI        *w;
786   PetscScalar    *larray;
787   IS             from,to;
788   ISLocalToGlobalMapping ltog;
789   PetscInt       rstart,i,nb,*indices;
790 
791   PetscFunctionBegin;
792   *vv = 0;
793 
794   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
795   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
796   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
797   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
798   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
799   /* Create global representation */
800   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
801   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
802   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
803   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
804   w    = (Vec_MPI *)(*vv)->data;
805   /* Create local representation */
806   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
807   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
808   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
809   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
810   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
811 
812   /*
813        Create scatter context for scattering (updating) ghost values
814   */
815   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
816   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
817   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
818   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
819   ierr = ISDestroy(to);CHKERRQ(ierr);
820   ierr = ISDestroy(from);CHKERRQ(ierr);
821 
822   /* set local to global mapping for ghosted vector */
823   nb = n/bs;
824   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
825   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
826   for (i=0; i<nb; i++) {
827     indices[i] = rstart + i*bs;
828   }
829   for (i=0; i<nghost; i++) {
830     indices[nb+i] = ghosts[i];
831   }
832   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,&ltog);CHKERRQ(ierr);
833   ierr = PetscFree(indices);CHKERRQ(ierr);
834   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
835   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
836   ierr = PetscFree(indices);CHKERRQ(ierr);
837 
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "VecCreateGhostBlock"
843 /*@
844    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
845         The indicing of the ghost points is done with blocks.
846 
847    Collective on MPI_Comm
848 
849    Input Parameters:
850 +  comm - the MPI communicator to use
851 .  bs - the block size
852 .  n - local vector length
853 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
854 .  nghost - number of local ghost blocks
855 -  ghosts - global indices of ghost blocks
856 
857    Output Parameter:
858 .  vv - the global vector representation (without ghost points as part of vector)
859 
860    Notes:
861    Use VecGhostGetLocalForm() to access the local, ghosted representation
862    of the vector.
863 
864    n is the local vector size (total local size not the number of blocks) while nghost
865    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
866    portion is bs*nghost
867 
868    Level: advanced
869 
870    Concepts: vectors^ghosted
871 
872 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
873           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
874           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
875 
876 @*/
877 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
878 {
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*
887     These introduce a ghosted vector where the ghosting is determined by the call to
888   VecSetLocalToGlobalMapping()
889 */
890 
891 #undef __FUNCT__
892 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
893 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
894 {
895   PetscErrorCode ierr;
896   Vec_MPI        *v = (Vec_MPI *)vv->data;
897 
898   PetscFunctionBegin;
899   v->nghost = map->n - vv->map->n;
900 
901   /* we need to make longer the array space that was allocated when the vector was created */
902   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
903   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
904   v->array = v->array_allocated;
905 
906   /* Create local representation */
907   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
908   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "VecSetValuesLocal_FETI"
915 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
916 {
917   PetscErrorCode ierr;
918   Vec_MPI        *v = (Vec_MPI *)vv->data;
919 
920   PetscFunctionBegin;
921   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 EXTERN_C_BEGIN
926 #undef __FUNCT__
927 #define __FUNCT__ "VecCreate_FETI"
928 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
929 {
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
934 
935   /* overwrite the functions to handle setting values locally */
936   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
937   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
938   vv->ops->assemblybegin           = 0;
939   vv->ops->assemblyend             = 0;
940   vv->ops->setvaluesblocked        = 0;
941   vv->ops->setvaluesblocked        = 0;
942 
943   PetscFunctionReturn(0);
944 }
945 EXTERN_C_END
946 
947 
948 
949 
950 
951 
952 
953 
954 
955