1
2 #include <petscsys.h> /*I "petscsys.h" I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5 Note that tag of 0 is ok because comm is a private communicator
6 generated below just for these routines.
7 */
8
PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10 {
11 PetscErrorCode ierr;
12 PetscMPIInt rank,size,tag = 0;
13 MPI_Status status;
14
15 PetscFunctionBegin;
16 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
17 if (size == 1) PetscFunctionReturn(0);
18 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
19 if (rank) {
20 ierr = MPI_Recv(NULL,0,MPI_INT,rank-1,tag,comm,&status);CHKERRQ(ierr);
21 }
22 /* Send to the next process in the group unless we are the last process */
23 if ((rank % ng) < ng - 1 && rank != size - 1) {
24 ierr = MPI_Send(NULL,0,MPI_INT,rank + 1,tag,comm);CHKERRQ(ierr);
25 }
26 PetscFunctionReturn(0);
27 }
28
PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)29 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
30 {
31 PetscErrorCode ierr;
32 PetscMPIInt rank,size,tag = 0;
33 MPI_Status status;
34
35 PetscFunctionBegin;
36 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
37 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
38 if (size == 1) PetscFunctionReturn(0);
39
40 /* Send to the first process in the next group */
41 if ((rank % ng) == ng - 1 || rank == size - 1) {
42 ierr = MPI_Send(NULL,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRQ(ierr);
43 }
44 if (!rank) {
45 ierr = MPI_Recv(NULL,0,MPI_INT,size-1,tag,comm,&status);CHKERRQ(ierr);
46 }
47 PetscFunctionReturn(0);
48 }
49
50 /* ---------------------------------------------------------------------*/
51 /*
52 The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
53 is attached to a communicator that manages the sequential phase code below.
54 */
55 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
56
57 /*@
58 PetscSequentialPhaseBegin - Begins a sequential section of code.
59
60 Collective
61
62 Input Parameters:
63 + comm - Communicator to sequentialize.
64 - ng - Number in processor group. This many processes are allowed to execute
65 at the same time (usually 1)
66
67 Level: intermediate
68
69 Notes:
70 PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
71 way to force a section of code to be executed by the processes in
72 rank order. Typically, this is done with
73 .vb
74 PetscSequentialPhaseBegin(comm, 1);
75 <code to be executed sequentially>
76 PetscSequentialPhaseEnd(comm, 1);
77 .ve
78
79 Often, the sequential code contains output statements (e.g., printf) to
80 be executed. Note that you may need to flush the I/O buffers before
81 calling PetscSequentialPhaseEnd(). Also, note that some systems do
82 not propagate I/O in any order to the controling terminal (in other words,
83 even if you flush the output, you may not get the data in the order
84 that you want).
85
86 .seealso: PetscSequentialPhaseEnd()
87
88 @*/
PetscSequentialPhaseBegin(MPI_Comm comm,int ng)89 PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
90 {
91 PetscErrorCode ierr;
92 PetscMPIInt size;
93 MPI_Comm local_comm,*addr_local_comm;
94
95 PetscFunctionBegin;
96 ierr = PetscSysInitializePackage();CHKERRQ(ierr);
97 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
98 if (size == 1) PetscFunctionReturn(0);
99
100 /* Get the private communicator for the sequential operations */
101 if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
102 ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,NULL);CHKERRQ(ierr);
103 }
104
105 ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
106 ierr = PetscMalloc1(1,&addr_local_comm);CHKERRQ(ierr);
107
108 *addr_local_comm = local_comm;
109
110 ierr = MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
111 ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
112 PetscFunctionReturn(0);
113 }
114
115 /*@
116 PetscSequentialPhaseEnd - Ends a sequential section of code.
117
118 Collective
119
120 Input Parameters:
121 + comm - Communicator to sequentialize.
122 - ng - Number in processor group. This many processes are allowed to execute
123 at the same time (usually 1)
124
125 Level: intermediate
126
127 Notes:
128 See PetscSequentialPhaseBegin() for more details.
129
130 .seealso: PetscSequentialPhaseBegin()
131
132 @*/
PetscSequentialPhaseEnd(MPI_Comm comm,int ng)133 PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
134 {
135 PetscErrorCode ierr;
136 PetscMPIInt size,flag;
137 MPI_Comm local_comm,*addr_local_comm;
138
139 PetscFunctionBegin;
140 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
141 if (size == 1) PetscFunctionReturn(0);
142
143 ierr = MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag);CHKERRQ(ierr);
144 if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
145 local_comm = *addr_local_comm;
146
147 ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
148
149 ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
150 ierr = MPI_Comm_free(&local_comm);CHKERRQ(ierr);
151 ierr = MPI_Comm_delete_attr(comm,Petsc_Seq_keyval);CHKERRQ(ierr);
152 PetscFunctionReturn(0);
153 }
154
155 /*@C
156 PetscGlobalMinMaxInt - Get the global min/max from local min/max input
157
158 Collective
159
160 Input Parameter:
161 . minMaxVal - An array with the local min and max
162
163 Output Parameter:
164 . minMaxValGlobal - An array with the global min and max
165
166 Level: beginner
167
168 .seealso: PetscSplitOwnership()
169 @*/
PetscGlobalMinMaxInt(MPI_Comm comm,PetscInt minMaxVal[2],PetscInt minMaxValGlobal[2])170 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
171 {
172 PetscErrorCode ierr;
173
174 PetscFunctionBegin;
175 minMaxVal[1] = -minMaxVal[1];
176 ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_INT, MPI_MIN, comm);CHKERRQ(ierr);
177 minMaxValGlobal[1] = -minMaxValGlobal[1];
178 PetscFunctionReturn(0);
179 }
180
181 /*@C
182 PetscGlobalMinMaxReal - Get the global min/max from local min/max input
183
184 Collective
185
186 Input Parameter:
187 . minMaxVal - An array with the local min and max
188
189 Output Parameter:
190 . minMaxValGlobal - An array with the global min and max
191
192 Level: beginner
193
194 .seealso: PetscSplitOwnership()
195 @*/
PetscGlobalMinMaxReal(MPI_Comm comm,PetscReal minMaxVal[2],PetscReal minMaxValGlobal[2])196 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
197 {
198 PetscErrorCode ierr;
199
200 PetscFunctionBegin;
201 minMaxVal[1] = -minMaxVal[1];
202 ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_REAL, MPI_MIN, comm);CHKERRQ(ierr);
203 minMaxValGlobal[1] = -minMaxValGlobal[1];
204 PetscFunctionReturn(0);
205 }
206