1 /*
2    A star forest (SF) describes a communication pattern
3 */
4 #if !defined(PETSCSF_H)
5 #define PETSCSF_H
6 #include <petscsys.h>
7 #include <petscis.h>
8 #include <petscsftypes.h>
9 
10 PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
11 
12 /*J
13     PetscSFType - String with the name of a PetscSF type
14 
15    Level: beginner
16 
17 .seealso: PetscSFSetType(), PetscSF
18 J*/
19 typedef const char *PetscSFType;
20 #define PETSCSFBASIC      "basic"
21 #define PETSCSFNEIGHBOR   "neighbor"
22 #define PETSCSFALLGATHERV "allgatherv"
23 #define PETSCSFALLGATHER  "allgather"
24 #define PETSCSFGATHERV    "gatherv"
25 #define PETSCSFGATHER     "gather"
26 #define PETSCSFALLTOALL   "alltoall"
27 #define PETSCSFWINDOW     "window"
28 
29 /*E
30    PetscSFPattern - Pattern of the PetscSF graph
31 
32 $  PETSCSF_PATTERN_GENERAL   - A general graph. One sets the graph with PetscSFSetGraph() and usually does not use this enum directly.
33 $  PETSCSF_PATTERN_ALLGATHER - A graph that every rank gathers all roots from all ranks (like MPI_Allgather/v). One sets the graph with PetscSFSetGraphWithPattern().
34 $  PETSCSF_PATTERN_GATHER    - A graph that rank 0 gathers all roots from all ranks (like MPI_Gather/v with root=0). One sets the graph with PetscSFSetGraphWithPattern().
35 $  PETSCSF_PATTERN_ALLTOALL  - A graph that every rank gathers different roots from all ranks (like MPI_Alltoall). One sets the graph with PetscSFSetGraphWithPattern().
36                                In an ALLTOALL graph, we assume each process has <size> leaves and <size> roots, with each leaf connecting to a remote root. Here <size> is
37                                the size of the communicator. This does not mean one can not communicate multiple data items between a pair of processes. One just needs to
38                                create a new MPI datatype for the multiple data items, e.g., by MPI_Type_contiguous.
39    Level: beginner
40 
41 .seealso: PetscSFSetGraph(), PetscSFSetGraphWithPattern()
42 E*/
43 typedef enum {PETSCSF_PATTERN_GENERAL=0,PETSCSF_PATTERN_ALLGATHER,PETSCSF_PATTERN_GATHER,PETSCSF_PATTERN_ALLTOALL} PetscSFPattern;
44 
45 /*E
46     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
47 
48 $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
49 $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
50 $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
51 
52    Level: advanced
53 
54 .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
55 E*/
56 typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
57 PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
58 
59 /*E
60     PetscSFWindowFlavorType - Flavor for the creation of MPI windows for PETSCSFWINDOW
61 
62 $  PETSCSF_WINDOW_FLAVOR_CREATE - Use MPI_Win_create, no reusage
63 $  PETSCSF_WINDOW_FLAVOR_DYNAMIC - Use MPI_Win_create_dynamic and dynamically attach pointers
64 $  PETSCSF_WINDOW_FLAVOR_ALLOCATE - Use MPI_Win_allocate
65 $  PETSCSF_WINDOW_FLAVOR_SHARED - Use MPI_Win_allocate_shared
66 
67    Level: advanced
68 
69 .seealso: PetscSFWindowSetFlavorType(), PetscSFWindowGetFlavorType()
70 E*/
71 typedef enum {PETSCSF_WINDOW_FLAVOR_CREATE,PETSCSF_WINDOW_FLAVOR_DYNAMIC,PETSCSF_WINDOW_FLAVOR_ALLOCATE,PETSCSF_WINDOW_FLAVOR_SHARED} PetscSFWindowFlavorType;
72 PETSC_EXTERN const char *const PetscSFWindowFlavorTypes[];
73 
74 /*E
75     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
76 
77 $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
78 $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
79 $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
80 
81    Level: beginner
82 
83 .seealso: PetscSFDuplicate()
84 E*/
85 typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
86 PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
87 
88 PETSC_EXTERN PetscFunctionList PetscSFList;
89 PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
90 
91 PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
92 PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
93 PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
94 PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
95 PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
96 PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*);
97 PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
98 PETSC_EXTERN PetscErrorCode PetscSFViewFromOptions(PetscSF,PetscObject,const char[]);
99 PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
100 PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
101 PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
102 PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
103 PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
104 PETSC_EXTERN PetscErrorCode PetscSFWindowSetFlavorType(PetscSF,PetscSFWindowFlavorType);
105 PETSC_EXTERN PetscErrorCode PetscSFWindowGetFlavorType(PetscSF,PetscSFWindowFlavorType*);
106 PETSC_EXTERN PetscErrorCode PetscSFWindowSetInfo(PetscSF,MPI_Info);
107 PETSC_EXTERN PetscErrorCode PetscSFWindowGetInfo(PetscSF,MPI_Info*);
108 PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
109 PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
110 PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern);
111 PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
112 PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
113 PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
114 PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
115 PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
116 PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
117 PETSC_EXTERN PetscErrorCode PetscSFGetRootRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
118 PETSC_EXTERN PetscErrorCode PetscSFGetLeafRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**);
119 PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
120 PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
121 PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
122 
123 /* Reduce rootdata to leafdata using provided operation */
124 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
125   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
126 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
127   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
128 PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op)
129   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
130 
131 /* Reduce leafdata into rootdata using provided operation */
132 PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
133   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
134 PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
135   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
136 PETSC_EXTERN PetscErrorCode PetscSFReduceWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void *,MPI_Op)
137   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
138 /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
139 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
140   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
141 PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
142   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
143 /* Compute the degree of every root vertex (number of leaves in its star) */
144 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
145 PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
146 PETSC_EXTERN PetscErrorCode PetscSFComputeMultiRootOriginalNumbering(PetscSF,const PetscInt[],PetscInt*,PetscInt*[]);
147 /* Concatenate data from all leaves into roots */
148 PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
149   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
150 PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
151   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
152 /* Distribute distinct values to each leaf from roots */
153 PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
154   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
155 PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
156   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
157 
158 PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
159 PETSC_EXTERN PetscErrorCode PetscSFComposeInverse(PetscSF,PetscSF,PetscSF*);
160 
161 #if defined(MPI_REPLACE)
162 #  define MPIU_REPLACE MPI_REPLACE
163 #else
164 /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
165  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
166  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
167  * MPICH2-1.0.6. */
168 #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
169 #endif
170 
171 PETSC_DEPRECATED_FUNCTION("Use PetscSFGetRootRanks (since v3.12)")
PetscSFGetRanks(PetscSF sf,PetscInt * nranks,const PetscMPIInt ** ranks,const PetscInt ** roffset,const PetscInt ** rmine,const PetscInt ** rremote)172 PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) {
173   return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote);
174 }
175 
176 /*@C
177    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
178 
179    Collective on PetscSF
180 
181    Input Arguments:
182 +  sf - star forest on which to communicate
183 .  unit - data type associated with each node
184 -  rootdata - buffer to broadcast
185 
186    Output Arguments:
187 .  leafdata - buffer to update with values from each leaf's respective root
188 
189    Level: intermediate
190 
191 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin()
192 @*/
PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void * rootdata,void * leafdata)193 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
194   return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE);
195 }
196 
PetscSFBcastWithMemTypeBegin(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void * rootdata,PetscMemType leafmtype,void * leafdata)197 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastWithMemTypeBegin(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void* rootdata,PetscMemType leafmtype,void* leafdata) {
198   return PetscSFBcastAndOpWithMemTypeBegin(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPIU_REPLACE);
199 }
200 
201 /*@C
202    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
203 
204    Collective
205 
206    Input Arguments:
207 +  sf - star forest
208 .  unit - data type
209 -  rootdata - buffer to broadcast
210 
211    Output Arguments:
212 .  leafdata - buffer to update with values from each leaf's respective root
213 
214    Level: intermediate
215 
216 .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
217 @*/
PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void * rootdata,void * leafdata)218 PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
219   return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE);
220 }
221 
222 #endif
223