xref: /petsc/include/petscsf.h (revision b5a8e515b429ceb8098d7b9fc29a8331d9ac04b7)
1936c5a86SJed Brown /*
2936c5a86SJed Brown    A star forest (SF) describes a communication pattern
3936c5a86SJed Brown */
4936c5a86SJed Brown #if !defined(__PETSCSF_H)
5936c5a86SJed Brown #define __PETSCSF_H
62c8e378dSBarry Smith #include <petscsys.h>
70c312b8eSJed Brown #include <petscsftypes.h>
8936c5a86SJed Brown 
9014dd563SJed Brown PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
10936c5a86SJed Brown 
115af141bcSJed Brown /*J
125af141bcSJed Brown     PetscSFType - String with the name of a PetscSF method or the creation function
135af141bcSJed Brown        with an optional dynamic library name, for example
145af141bcSJed Brown        http://www.mcs.anl.gov/petsc/lib.so:mysfcreate()
155af141bcSJed Brown 
165af141bcSJed Brown    Level: beginner
175af141bcSJed Brown 
18ed658588SBarry Smith    Notes: The two approaches provided are
19ed658588SBarry Smith $     PETSCSFBASIC which uses MPI 1 message passing to perform the communication and
20ed658588SBarry Smith $     PETSCSFWINDOW which uses MPI 2 one-sided operations to perform the communication, this may be more efficient,
21ed658588SBarry Smith $                   but may not be available for all MPI distributions. In particular OpenMPI has bugs in its one-sided
22ed658588SBarry Smith $                   operations that prevent its use.
23ed658588SBarry Smith 
245af141bcSJed Brown .seealso: PetscSFSetType(), PetscSF
255af141bcSJed Brown J*/
265af141bcSJed Brown typedef const char *PetscSFType;
27ac762476SJed Brown #define PETSCSFBASIC  "basic"
28ed658588SBarry Smith #define PETSCSFWINDOW "window"
295af141bcSJed Brown 
30936c5a86SJed Brown /*E
315af141bcSJed Brown     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
32936c5a86SJed Brown 
335af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
345af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
355af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
36936c5a86SJed Brown 
37e84a5f06SJed Brown    Level: advanced
38936c5a86SJed Brown 
39e84a5f06SJed Brown .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
40936c5a86SJed Brown E*/
415af141bcSJed Brown typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
425af141bcSJed Brown PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
43936c5a86SJed Brown 
44e84a5f06SJed Brown /*E
45e84a5f06SJed Brown     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
46e84a5f06SJed Brown 
47e84a5f06SJed Brown $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
48e84a5f06SJed Brown $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
49e84a5f06SJed Brown $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
50e84a5f06SJed Brown 
51e84a5f06SJed Brown    Level: beginner
52e84a5f06SJed Brown 
53e84a5f06SJed Brown .seealso: PetscSFDuplicate()
54e84a5f06SJed Brown E*/
55e84a5f06SJed Brown typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
56e84a5f06SJed Brown PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
57090c6444SJed Brown 
58adc40e5bSBarry Smith PETSC_EXTERN PetscFunctionList PetscSFList;
59bdf89e91SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
605af141bcSJed Brown 
61607a6623SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
62014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
6379c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
64014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
655af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
66014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
673a93e3b7SToby Isaac PETSC_STATIC_INLINE PetscErrorCode PetscSFViewFromOptions(PetscSF A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);}
685af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
69014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
70e84a5f06SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
715af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
725af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
73014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
7463f4a732SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
7579c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
76f723732fSJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
7779c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
782f5fb4c2SMatthew G. Knepley PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
79014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
80*b5a8e515SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
815b6cb184SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
82014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
83014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
84014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
85936c5a86SJed Brown 
86936c5a86SJed Brown /* broadcasts rootdata to leafdata */
8779c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void*,void*)
88894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
8979c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void*,void*)
90894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
91936c5a86SJed Brown /* Reduce leafdata into rootdata using provided operation */
9279c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
9319436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
9479c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
9519436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
96936c5a86SJed Brown /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
9779c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
98894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
9979c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
100894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
101936c5a86SJed Brown /* Compute the degree of every root vertex (number of leaves in its star) */
10279c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
10379c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
104936c5a86SJed Brown /* Concatenate data from all leaves into roots */
10579c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
106894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
10779c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
108894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
109936c5a86SJed Brown /* Distribute distinct values to each leaf from roots */
11079c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
111894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
11279c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
113894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
114936c5a86SJed Brown 
115a7b3aa13SAta Mesgarnejad PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
116a7b3aa13SAta Mesgarnejad 
1178bfbc91cSJed Brown #if defined(MPI_REPLACE)
1188bfbc91cSJed Brown #  define MPIU_REPLACE MPI_REPLACE
1198bfbc91cSJed Brown #else
1208bfbc91cSJed Brown /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
1218bfbc91cSJed Brown  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
1228bfbc91cSJed Brown  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
1238bfbc91cSJed Brown  * MPICH2-1.0.6. */
1248bfbc91cSJed Brown #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
1258bfbc91cSJed Brown #endif
1268bfbc91cSJed Brown 
127936c5a86SJed Brown #endif
128