1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/
3
4 /*@C
5 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6
7 Input Parameters:
8 + dm - The DM object
9 . user - The user callback, may be NULL (to clear the callback)
10 - ctx - context for callback evaluation, may be NULL
11
12 Level: advanced
13
14 Notes:
15 The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16
17 Any setting here overrides other configuration of DMPlex adjacency determination.
18
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (* user)(DM,PetscInt,PetscInt *,PetscInt[],void *),void * ctx)21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23 DM_Plex *mesh = (DM_Plex *)dm->data;
24
25 PetscFunctionBegin;
26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27 mesh->useradjacency = user;
28 mesh->useradjacencyctx = ctx;
29 PetscFunctionReturn(0);
30 }
31
32 /*@C
33 DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34
35 Input Parameter:
36 . dm - The DM object
37
38 Output Parameters:
39 - user - The user callback
40 - ctx - context for callback evaluation
41
42 Level: advanced
43
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
DMPlexGetAdjacencyUser(DM dm,PetscErrorCode (** user)(DM,PetscInt,PetscInt *,PetscInt[],void *),void ** ctx)46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48 DM_Plex *mesh = (DM_Plex *)dm->data;
49
50 PetscFunctionBegin;
51 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52 if (user) *user = mesh->useradjacency;
53 if (ctx) *ctx = mesh->useradjacencyctx;
54 PetscFunctionReturn(0);
55 }
56
57 /*@
58 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59
60 Input Parameters:
61 + dm - The DM object
62 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63
64 Level: intermediate
65
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
DMPlexSetAdjacencyUseAnchors(DM dm,PetscBool useAnchors)68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70 DM_Plex *mesh = (DM_Plex *) dm->data;
71
72 PetscFunctionBegin;
73 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74 mesh->useAnchors = useAnchors;
75 PetscFunctionReturn(0);
76 }
77
78 /*@
79 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80
81 Input Parameter:
82 . dm - The DM object
83
84 Output Parameter:
85 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86
87 Level: intermediate
88
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
DMPlexGetAdjacencyUseAnchors(DM dm,PetscBool * useAnchors)91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93 DM_Plex *mesh = (DM_Plex *) dm->data;
94
95 PetscFunctionBegin;
96 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97 PetscValidIntPointer(useAnchors, 2);
98 *useAnchors = mesh->useAnchors;
99 PetscFunctionReturn(0);
100 }
101
DMPlexGetAdjacency_Cone_Internal(DM dm,PetscInt p,PetscInt * adjSize,PetscInt adj[])102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104 const PetscInt *cone = NULL;
105 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 PetscErrorCode ierr;
107
108 PetscFunctionBeginHot;
109 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111 for (c = 0; c <= coneSize; ++c) {
112 const PetscInt point = !c ? p : cone[c-1];
113 const PetscInt *support = NULL;
114 PetscInt supportSize, s, q;
115
116 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118 for (s = 0; s < supportSize; ++s) {
119 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120 if (support[s] == adj[q]) break;
121 }
122 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123 }
124 }
125 *adjSize = numAdj;
126 PetscFunctionReturn(0);
127 }
128
DMPlexGetAdjacency_Support_Internal(DM dm,PetscInt p,PetscInt * adjSize,PetscInt adj[])129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131 const PetscInt *support = NULL;
132 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
133 PetscErrorCode ierr;
134
135 PetscFunctionBeginHot;
136 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138 for (s = 0; s <= supportSize; ++s) {
139 const PetscInt point = !s ? p : support[s-1];
140 const PetscInt *cone = NULL;
141 PetscInt coneSize, c, q;
142
143 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145 for (c = 0; c < coneSize; ++c) {
146 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147 if (cone[c] == adj[q]) break;
148 }
149 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150 }
151 }
152 *adjSize = numAdj;
153 PetscFunctionReturn(0);
154 }
155
DMPlexGetAdjacency_Transitive_Internal(DM dm,PetscInt p,PetscBool useClosure,PetscInt * adjSize,PetscInt adj[])156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158 PetscInt *star = NULL;
159 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160 PetscErrorCode ierr;
161
162 PetscFunctionBeginHot;
163 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164 for (s = 0; s < starSize*2; s += 2) {
165 const PetscInt *closure = NULL;
166 PetscInt closureSize, c, q;
167
168 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169 for (c = 0; c < closureSize*2; c += 2) {
170 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171 if (closure[c] == adj[q]) break;
172 }
173 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174 }
175 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176 }
177 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178 *adjSize = numAdj;
179 PetscFunctionReturn(0);
180 }
181
DMPlexGetAdjacency_Internal(DM dm,PetscInt p,PetscBool useCone,PetscBool useTransitiveClosure,PetscBool useAnchors,PetscInt * adjSize,PetscInt * adj[])182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184 static PetscInt asiz = 0;
185 PetscInt maxAnchors = 1;
186 PetscInt aStart = -1, aEnd = -1;
187 PetscInt maxAdjSize;
188 PetscSection aSec = NULL;
189 IS aIS = NULL;
190 const PetscInt *anchors;
191 DM_Plex *mesh = (DM_Plex *)dm->data;
192 PetscErrorCode ierr;
193
194 PetscFunctionBeginHot;
195 if (useAnchors) {
196 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197 if (aSec) {
198 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199 maxAnchors = PetscMax(1,maxAnchors);
200 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202 }
203 }
204 if (!*adj) {
205 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206
207 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209 depth = PetscMax(depth, -depth);
210 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214 asiz *= maxAnchors;
215 asiz = PetscMin(asiz,pEnd-pStart);
216 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217 }
218 if (*adjSize < 0) *adjSize = asiz;
219 maxAdjSize = *adjSize;
220 if (mesh->useradjacency) {
221 ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222 } else if (useTransitiveClosure) {
223 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224 } else if (useCone) {
225 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226 } else {
227 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228 }
229 if (useAnchors && aSec) {
230 PetscInt origSize = *adjSize;
231 PetscInt numAdj = origSize;
232 PetscInt i = 0, j;
233 PetscInt *orig = *adj;
234
235 while (i < origSize) {
236 PetscInt p = orig[i];
237 PetscInt aDof = 0;
238
239 if (p >= aStart && p < aEnd) {
240 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241 }
242 if (aDof) {
243 PetscInt aOff;
244 PetscInt s, q;
245
246 for (j = i + 1; j < numAdj; j++) {
247 orig[j - 1] = orig[j];
248 }
249 origSize--;
250 numAdj--;
251 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252 for (s = 0; s < aDof; ++s) {
253 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254 if (anchors[aOff+s] == orig[q]) break;
255 }
256 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257 }
258 }
259 else {
260 i++;
261 }
262 }
263 *adjSize = numAdj;
264 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265 }
266 PetscFunctionReturn(0);
267 }
268
269 /*@
270 DMPlexGetAdjacency - Return all points adjacent to the given point
271
272 Input Parameters:
273 + dm - The DM object
274 . p - The point
275 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
276 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
277
278 Output Parameters:
279 + adjSize - The number of adjacent points
280 - adj - The adjacent points
281
282 Level: advanced
283
284 Notes:
285 The user must PetscFree the adj array if it was not passed in.
286
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
DMPlexGetAdjacency(DM dm,PetscInt p,PetscInt * adjSize,PetscInt * adj[])289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291 PetscBool useCone, useClosure, useAnchors;
292 PetscErrorCode ierr;
293
294 PetscFunctionBeginHot;
295 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296 PetscValidPointer(adjSize,3);
297 PetscValidPointer(adj,4);
298 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300 ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301 PetscFunctionReturn(0);
302 }
303
304 /*@
305 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306
307 Collective on dm
308
309 Input Parameters:
310 + dm - The DM
311 - sfPoint - The PetscSF which encodes point connectivity
312
313 Output Parameters:
314 + processRanks - A list of process neighbors, or NULL
315 - sfProcess - An SF encoding the two-sided process connectivity, or NULL
316
317 Level: developer
318
319 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
320 @*/
DMPlexCreateTwoSidedProcessSF(DM dm,PetscSF sfPoint,PetscSection rootRankSection,IS rootRanks,PetscSection leafRankSection,IS leafRanks,IS * processRanks,PetscSF * sfProcess)321 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
322 {
323 const PetscSFNode *remotePoints;
324 PetscInt *localPointsNew;
325 PetscSFNode *remotePointsNew;
326 const PetscInt *nranks;
327 PetscInt *ranksNew;
328 PetscBT neighbors;
329 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n;
330 PetscMPIInt size, proc, rank;
331 PetscErrorCode ierr;
332
333 PetscFunctionBegin;
334 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
335 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
336 if (processRanks) {PetscValidPointer(processRanks, 3);}
337 if (sfProcess) {PetscValidPointer(sfProcess, 4);}
338 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
339 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
340 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
341 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
342 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
343 /* Compute root-to-leaf process connectivity */
344 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
345 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
346 for (p = pStart; p < pEnd; ++p) {
347 PetscInt ndof, noff, n;
348
349 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
350 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
351 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
352 }
353 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
354 /* Compute leaf-to-neighbor process connectivity */
355 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
356 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
357 for (p = pStart; p < pEnd; ++p) {
358 PetscInt ndof, noff, n;
359
360 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
361 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
362 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
363 }
364 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
365 /* Compute leaf-to-root process connectivity */
366 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
367 /* Calculate edges */
368 PetscBTClear(neighbors, rank);
369 for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
370 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
371 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
372 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
373 for (proc = 0, n = 0; proc < size; ++proc) {
374 if (PetscBTLookup(neighbors, proc)) {
375 ranksNew[n] = proc;
376 localPointsNew[n] = proc;
377 remotePointsNew[n].index = rank;
378 remotePointsNew[n].rank = proc;
379 ++n;
380 }
381 }
382 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
383 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
384 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
385 if (sfProcess) {
386 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
387 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
388 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
389 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
390 }
391 PetscFunctionReturn(0);
392 }
393
394 /*@
395 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
396
397 Collective on dm
398
399 Input Parameter:
400 . dm - The DM
401
402 Output Parameters:
403 + rootSection - The number of leaves for a given root point
404 . rootrank - The rank of each edge into the root point
405 . leafSection - The number of processes sharing a given leaf point
406 - leafrank - The rank of each process sharing a leaf point
407
408 Level: developer
409
410 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
411 @*/
DMPlexDistributeOwnership(DM dm,PetscSection rootSection,IS * rootrank,PetscSection leafSection,IS * leafrank)412 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
413 {
414 MPI_Comm comm;
415 PetscSF sfPoint;
416 const PetscInt *rootdegree;
417 PetscInt *myrank, *remoterank;
418 PetscInt pStart, pEnd, p, nedges;
419 PetscMPIInt rank;
420 PetscErrorCode ierr;
421
422 PetscFunctionBegin;
423 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
424 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
425 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
426 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
427 /* Compute number of leaves for each root */
428 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
429 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
430 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
431 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
432 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
433 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
434 /* Gather rank of each leaf to root */
435 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
436 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
437 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr);
438 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
439 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
441 ierr = PetscFree(myrank);CHKERRQ(ierr);
442 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
443 /* Distribute remote ranks to leaves */
444 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
445 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
446 PetscFunctionReturn(0);
447 }
448
449 /*@C
450 DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
451
452 Collective on dm
453
454 Input Parameters:
455 + dm - The DM
456 . levels - Number of overlap levels
457 . rootSection - The number of leaves for a given root point
458 . rootrank - The rank of each edge into the root point
459 . leafSection - The number of processes sharing a given leaf point
460 - leafrank - The rank of each process sharing a leaf point
461
462 Output Parameter:
463 . ovLabel - DMLabel containing remote overlap contributions as point/rank pairings
464
465 Level: developer
466
467 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
468 @*/
DMPlexCreateOverlapLabel(DM dm,PetscInt levels,PetscSection rootSection,IS rootrank,PetscSection leafSection,IS leafrank,DMLabel * ovLabel)469 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
470 {
471 MPI_Comm comm;
472 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
473 PetscSF sfPoint;
474 const PetscSFNode *remote;
475 const PetscInt *local;
476 const PetscInt *nrank, *rrank;
477 PetscInt *adj = NULL;
478 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l;
479 PetscMPIInt rank, size;
480 PetscBool flg;
481 PetscErrorCode ierr;
482
483 PetscFunctionBegin;
484 *ovLabel = NULL;
485 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
486 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
487 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
488 if (size == 1) PetscFunctionReturn(0);
489 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
492 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
493 ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
494 /* Handle leaves: shared with the root point */
495 for (l = 0; l < nleaves; ++l) {
496 PetscInt adjSize = PETSC_DETERMINE, a;
497
498 ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
499 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
500 }
501 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
502 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
503 /* Handle roots */
504 for (p = pStart; p < pEnd; ++p) {
505 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
506
507 if ((p >= sStart) && (p < sEnd)) {
508 /* Some leaves share a root with other leaves on different processes */
509 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
510 if (neighbors) {
511 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
512 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
513 for (n = 0; n < neighbors; ++n) {
514 const PetscInt remoteRank = nrank[noff+n];
515
516 if (remoteRank == rank) continue;
517 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
518 }
519 }
520 }
521 /* Roots are shared with leaves */
522 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
523 if (!neighbors) continue;
524 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
525 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
526 for (n = 0; n < neighbors; ++n) {
527 const PetscInt remoteRank = rrank[noff+n];
528
529 if (remoteRank == rank) continue;
530 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
531 }
532 }
533 ierr = PetscFree(adj);CHKERRQ(ierr);
534 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
535 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
536 /* Add additional overlap levels */
537 for (l = 1; l < levels; l++) {
538 /* Propagate point donations over SF to capture remote connections */
539 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
540 /* Add next level of point donations to the label */
541 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
542 }
543 /* We require the closure in the overlap */
544 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
545 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546 if (flg) {
547 PetscViewer viewer;
548 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
549 ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
550 }
551 /* Invert sender to receiver label */
552 ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
553 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
554 /* Add owned points, except for shared local points */
555 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
556 for (l = 0; l < nleaves; ++l) {
557 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
558 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
559 }
560 /* Clean up */
561 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
562 PetscFunctionReturn(0);
563 }
564
565 /*@C
566 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
567
568 Collective on dm
569
570 Input Parameters:
571 + dm - The DM
572 - overlapSF - The SF mapping ghost points in overlap to owner points on other processes
573
574 Output Parameter:
575 . migrationSF - An SF that maps original points in old locations to points in new locations
576
577 Level: developer
578
579 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
580 @*/
DMPlexCreateOverlapMigrationSF(DM dm,PetscSF overlapSF,PetscSF * migrationSF)581 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
582 {
583 MPI_Comm comm;
584 PetscMPIInt rank, size;
585 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
586 PetscInt *pointDepths, *remoteDepths, *ilocal;
587 PetscInt *depthRecv, *depthShift, *depthIdx;
588 PetscSFNode *iremote;
589 PetscSF pointSF;
590 const PetscInt *sharedLocal;
591 const PetscSFNode *overlapRemote, *sharedRemote;
592 PetscErrorCode ierr;
593
594 PetscFunctionBegin;
595 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
596 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
597 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
598 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
599 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
600
601 /* Before building the migration SF we need to know the new stratum offsets */
602 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
603 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
604 for (d=0; d<dim+1; d++) {
605 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
606 for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
607 }
608 for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
609 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
610 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
611
612 /* Count received points in each stratum and compute the internal strata shift */
613 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
614 for (d=0; d<dim+1; d++) depthRecv[d]=0;
615 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
616 depthShift[dim] = 0;
617 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
618 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
619 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
620 for (d=0; d<dim+1; d++) {
621 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
622 depthIdx[d] = pStart + depthShift[d];
623 }
624
625 /* Form the overlap SF build an SF that describes the full overlap migration SF */
626 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
627 newLeaves = pEnd - pStart + nleaves;
628 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
629 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
630 /* First map local points to themselves */
631 for (d=0; d<dim+1; d++) {
632 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633 for (p=pStart; p<pEnd; p++) {
634 point = p + depthShift[d];
635 ilocal[point] = point;
636 iremote[point].index = p;
637 iremote[point].rank = rank;
638 depthIdx[d]++;
639 }
640 }
641
642 /* Add in the remote roots for currently shared points */
643 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
644 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
645 for (d=0; d<dim+1; d++) {
646 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
647 for (p=0; p<numSharedPoints; p++) {
648 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
649 point = sharedLocal[p] + depthShift[d];
650 iremote[point].index = sharedRemote[p].index;
651 iremote[point].rank = sharedRemote[p].rank;
652 }
653 }
654 }
655
656 /* Now add the incoming overlap points */
657 for (p=0; p<nleaves; p++) {
658 point = depthIdx[remoteDepths[p]];
659 ilocal[point] = point;
660 iremote[point].index = overlapRemote[p].index;
661 iremote[point].rank = overlapRemote[p].rank;
662 depthIdx[remoteDepths[p]]++;
663 }
664 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
665
666 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
667 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
668 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
669 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
670 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
671
672 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
673 PetscFunctionReturn(0);
674 }
675
676 /*@
677 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
678
679 Input Parameters:
680 + dm - The DM
681 - sf - A star forest with non-ordered leaves, usually defining a DM point migration
682
683 Output Parameter:
684 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
685
686 Note:
687 This lexicographically sorts by (depth, cellType)
688
689 Level: developer
690
691 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
692 @*/
DMPlexStratifyMigrationSF(DM dm,PetscSF sf,PetscSF * migrationSF)693 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
694 {
695 MPI_Comm comm;
696 PetscMPIInt rank, size;
697 PetscInt d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
698 PetscSFNode *pointDepths, *remoteDepths;
699 PetscInt *ilocal;
700 PetscInt *depthRecv, *depthShift, *depthIdx;
701 PetscInt *ctRecv, *ctShift, *ctIdx;
702 const PetscSFNode *iremote;
703 PetscErrorCode ierr;
704
705 PetscFunctionBegin;
706 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
707 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
708 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
709 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
710 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
711 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
712 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
713 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
714 ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
715
716 /* Before building the migration SF we need to know the new stratum offsets */
717 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
718 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
719 for (d = 0; d < depth+1; ++d) {
720 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
721 for (p = pStart; p < pEnd; ++p) {
722 DMPolytopeType ct;
723
724 ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
725 pointDepths[p].index = d;
726 pointDepths[p].rank = ct;
727 }
728 }
729 for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
730 ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
731 ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
732 /* Count received points in each stratum and compute the internal strata shift */
733 ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
734 for (p = 0; p < nleaves; ++p) {
735 if (remoteDepths[p].rank < 0) {
736 ++depthRecv[remoteDepths[p].index];
737 } else {
738 ++ctRecv[remoteDepths[p].rank];
739 }
740 }
741 {
742 PetscInt depths[4], dims[4], shift = 0, i, c;
743
744 /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
745 Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
746 */
747 depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
748 dims[0] = dim; dims[1] = 0; dims[2] = dim-1; dims[3] = 1;
749 for (i = 0; i <= depth; ++i) {
750 const PetscInt dep = depths[i];
751 const PetscInt dim = dims[i];
752
753 for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
754 if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
755 ctShift[c] = shift;
756 shift += ctRecv[c];
757 }
758 depthShift[dep] = shift;
759 shift += depthRecv[dep];
760 }
761 for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
762 const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
763
764 if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
765 ctShift[c] = shift;
766 shift += ctRecv[c];
767 }
768 }
769 }
770 /* Derive a new local permutation based on stratified indices */
771 ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
772 for (p = 0; p < nleaves; ++p) {
773 const PetscInt dep = remoteDepths[p].index;
774 const DMPolytopeType ct = (DMPolytopeType) remoteDepths[p].rank;
775
776 if ((PetscInt) ct < 0) {
777 ilocal[p] = depthShift[dep] + depthIdx[dep];
778 ++depthIdx[dep];
779 } else {
780 ilocal[p] = ctShift[ct] + ctIdx[ct];
781 ++ctIdx[ct];
782 }
783 }
784 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
785 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
786 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
787 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
788 ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
789 ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
790 PetscFunctionReturn(0);
791 }
792
793 /*@
794 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
795
796 Collective on dm
797
798 Input Parameters:
799 + dm - The DMPlex object
800 . pointSF - The PetscSF describing the communication pattern
801 . originalSection - The PetscSection for existing data layout
802 - originalVec - The existing data in a local vector
803
804 Output Parameters:
805 + newSection - The PetscSF describing the new data layout
806 - newVec - The new data in a local vector
807
808 Level: developer
809
810 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
811 @*/
DMPlexDistributeField(DM dm,PetscSF pointSF,PetscSection originalSection,Vec originalVec,PetscSection newSection,Vec newVec)812 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
813 {
814 PetscSF fieldSF;
815 PetscInt *remoteOffsets, fieldSize;
816 PetscScalar *originalValues, *newValues;
817 PetscErrorCode ierr;
818
819 PetscFunctionBegin;
820 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
821 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
822
823 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
824 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
825 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
826
827 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
828 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
829 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
830 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
831 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
832 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
833 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
834 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
835 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
836 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
837 PetscFunctionReturn(0);
838 }
839
840 /*@
841 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
842
843 Collective on dm
844
845 Input Parameters:
846 + dm - The DMPlex object
847 . pointSF - The PetscSF describing the communication pattern
848 . originalSection - The PetscSection for existing data layout
849 - originalIS - The existing data
850
851 Output Parameters:
852 + newSection - The PetscSF describing the new data layout
853 - newIS - The new data
854
855 Level: developer
856
857 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
858 @*/
DMPlexDistributeFieldIS(DM dm,PetscSF pointSF,PetscSection originalSection,IS originalIS,PetscSection newSection,IS * newIS)859 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
860 {
861 PetscSF fieldSF;
862 PetscInt *newValues, *remoteOffsets, fieldSize;
863 const PetscInt *originalValues;
864 PetscErrorCode ierr;
865
866 PetscFunctionBegin;
867 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
868 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
869
870 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
871 ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
872
873 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
874 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
875 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
876 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
877 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
878 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
879 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
880 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
881 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
882 PetscFunctionReturn(0);
883 }
884
885 /*@
886 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
887
888 Collective on dm
889
890 Input Parameters:
891 + dm - The DMPlex object
892 . pointSF - The PetscSF describing the communication pattern
893 . originalSection - The PetscSection for existing data layout
894 . datatype - The type of data
895 - originalData - The existing data
896
897 Output Parameters:
898 + newSection - The PetscSection describing the new data layout
899 - newData - The new data
900
901 Level: developer
902
903 .seealso: DMPlexDistribute(), DMPlexDistributeField()
904 @*/
DMPlexDistributeData(DM dm,PetscSF pointSF,PetscSection originalSection,MPI_Datatype datatype,void * originalData,PetscSection newSection,void ** newData)905 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
906 {
907 PetscSF fieldSF;
908 PetscInt *remoteOffsets, fieldSize;
909 PetscMPIInt dataSize;
910 PetscErrorCode ierr;
911
912 PetscFunctionBegin;
913 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
914 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
915
916 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
917 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
918 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
919
920 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
921 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
922 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
923 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
924 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
925 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
926 PetscFunctionReturn(0);
927 }
928
DMPlexDistributeCones(DM dm,PetscSF migrationSF,ISLocalToGlobalMapping original,ISLocalToGlobalMapping renumbering,DM dmParallel)929 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
930 {
931 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
932 MPI_Comm comm;
933 PetscSF coneSF;
934 PetscSection originalConeSection, newConeSection;
935 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
936 PetscBool flg;
937 PetscErrorCode ierr;
938
939 PetscFunctionBegin;
940 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
941 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
942 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
943 /* Distribute cone section */
944 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
945 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
946 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
947 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
948 ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
949 {
950 PetscInt pStart, pEnd, p;
951
952 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
953 for (p = pStart; p < pEnd; ++p) {
954 PetscInt coneSize;
955 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
956 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
957 }
958 }
959 /* Communicate and renumber cones */
960 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
961 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
962 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
963 if (original) {
964 PetscInt numCones;
965
966 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
967 ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
968 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
969 } else {
970 globCones = cones;
971 }
972 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
973 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
974 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
975 if (original) {
976 ierr = PetscFree(globCones);CHKERRQ(ierr);
977 }
978 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
979 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
980 if (PetscDefined(USE_DEBUG)) {
981 PetscInt p;
982 PetscBool valid = PETSC_TRUE;
983 for (p = 0; p < newConesSize; ++p) {
984 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
985 }
986 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
987 }
988 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
989 if (flg) {
990 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
991 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
992 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
993 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
994 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
995 }
996 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
997 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
998 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
999 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1000 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1001 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1002 /* Create supports and stratify DMPlex */
1003 {
1004 PetscInt pStart, pEnd;
1005
1006 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1007 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1008 }
1009 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1010 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1011 {
1012 PetscBool useCone, useClosure, useAnchors;
1013
1014 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1015 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1016 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1017 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1018 }
1019 PetscFunctionReturn(0);
1020 }
1021
DMPlexDistributeCoordinates(DM dm,PetscSF migrationSF,DM dmParallel)1022 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1023 {
1024 MPI_Comm comm;
1025 PetscSection originalCoordSection, newCoordSection;
1026 Vec originalCoordinates, newCoordinates;
1027 PetscInt bs;
1028 PetscBool isper;
1029 const char *name;
1030 const PetscReal *maxCell, *L;
1031 const DMBoundaryType *bd;
1032 PetscErrorCode ierr;
1033
1034 PetscFunctionBegin;
1035 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1036 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1037
1038 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1039 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1040 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1041 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1042 if (originalCoordinates) {
1043 ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1044 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1045 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1046
1047 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1048 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1049 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1050 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1051 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1052 }
1053 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1054 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1055 PetscFunctionReturn(0);
1056 }
1057
DMPlexDistributeLabels(DM dm,PetscSF migrationSF,DM dmParallel)1058 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1059 {
1060 DM_Plex *mesh = (DM_Plex*) dm->data;
1061 MPI_Comm comm;
1062 DMLabel depthLabel;
1063 PetscMPIInt rank;
1064 PetscInt depth, d, numLabels, numLocalLabels, l;
1065 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1066 PetscObjectState depthState = -1;
1067 PetscErrorCode ierr;
1068
1069 PetscFunctionBegin;
1070 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1071 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1072
1073 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1074 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1075 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1076
1077 /* If the user has changed the depth label, communicate it instead */
1078 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1079 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1080 if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1081 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1082 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1083 if (sendDepth) {
1084 ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1085 ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1086 }
1087 /* Everyone must have either the same number of labels, or none */
1088 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1089 numLabels = numLocalLabels;
1090 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1091 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1092 for (l = 0; l < numLabels; ++l) {
1093 DMLabel label = NULL, labelNew = NULL;
1094 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput;
1095 const char *name = NULL;
1096
1097 if (hasLabels) {
1098 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1099 /* Skip "depth" because it is recreated */
1100 ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1101 ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1102 }
1103 ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1104 if (isDepth && !sendDepth) continue;
1105 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1106 if (isDepth) {
1107 /* Put in any missing strata which can occur if users are managing the depth label themselves */
1108 PetscInt gdepth;
1109
1110 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1111 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1112 for (d = 0; d <= gdepth; ++d) {
1113 PetscBool has;
1114
1115 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1116 if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1117 }
1118 }
1119 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1120 /* Put the output flag in the new label */
1121 if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1122 ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1123 ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1124 ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1125 ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1126 }
1127 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1128 PetscFunctionReturn(0);
1129 }
1130
DMPlexDistributeSetupTree(DM dm,PetscSF migrationSF,ISLocalToGlobalMapping original,ISLocalToGlobalMapping renumbering,DM dmParallel)1131 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1132 {
1133 DM_Plex *mesh = (DM_Plex*) dm->data;
1134 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
1135 MPI_Comm comm;
1136 DM refTree;
1137 PetscSection origParentSection, newParentSection;
1138 PetscInt *origParents, *origChildIDs;
1139 PetscBool flg;
1140 PetscErrorCode ierr;
1141
1142 PetscFunctionBegin;
1143 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1144 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1145 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1146
1147 /* Set up tree */
1148 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1149 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1150 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1151 if (origParentSection) {
1152 PetscInt pStart, pEnd;
1153 PetscInt *newParents, *newChildIDs, *globParents;
1154 PetscInt *remoteOffsetsParents, newParentSize;
1155 PetscSF parentSF;
1156
1157 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1158 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1159 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1160 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1161 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1162 ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1163 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1164 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1165 if (original) {
1166 PetscInt numParents;
1167
1168 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1169 ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1170 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1171 }
1172 else {
1173 globParents = origParents;
1174 }
1175 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1176 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1177 if (original) {
1178 ierr = PetscFree(globParents);CHKERRQ(ierr);
1179 }
1180 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1181 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1182 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1183 if (PetscDefined(USE_DEBUG)) {
1184 PetscInt p;
1185 PetscBool valid = PETSC_TRUE;
1186 for (p = 0; p < newParentSize; ++p) {
1187 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1188 }
1189 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1190 }
1191 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1192 if (flg) {
1193 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1194 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1195 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1196 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1197 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1198 }
1199 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1200 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1201 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1202 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1203 }
1204 pmesh->useAnchors = mesh->useAnchors;
1205 PetscFunctionReturn(0);
1206 }
1207
DMPlexDistributeSF(DM dm,PetscSF migrationSF,DM dmParallel)1208 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1209 {
1210 PetscMPIInt rank, size;
1211 MPI_Comm comm;
1212 PetscErrorCode ierr;
1213
1214 PetscFunctionBegin;
1215 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1216 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1217
1218 /* Create point SF for parallel mesh */
1219 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1220 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1221 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1222 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1223 {
1224 const PetscInt *leaves;
1225 PetscSFNode *remotePoints, *rowners, *lowners;
1226 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1227 PetscInt pStart, pEnd;
1228
1229 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1230 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1231 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1232 for (p=0; p<numRoots; p++) {
1233 rowners[p].rank = -1;
1234 rowners[p].index = -1;
1235 }
1236 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1237 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1238 for (p = 0; p < numLeaves; ++p) {
1239 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1240 lowners[p].rank = rank;
1241 lowners[p].index = leaves ? leaves[p] : p;
1242 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1243 lowners[p].rank = -2;
1244 lowners[p].index = -2;
1245 }
1246 }
1247 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1248 rowners[p].rank = -3;
1249 rowners[p].index = -3;
1250 }
1251 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1252 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1253 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1254 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1255 for (p = 0; p < numLeaves; ++p) {
1256 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1257 if (lowners[p].rank != rank) ++numGhostPoints;
1258 }
1259 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1260 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1261 for (p = 0, gp = 0; p < numLeaves; ++p) {
1262 if (lowners[p].rank != rank) {
1263 ghostPoints[gp] = leaves ? leaves[p] : p;
1264 remotePoints[gp].rank = lowners[p].rank;
1265 remotePoints[gp].index = lowners[p].index;
1266 ++gp;
1267 }
1268 }
1269 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1270 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1271 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1272 }
1273 {
1274 PetscBool useCone, useClosure, useAnchors;
1275
1276 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1277 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1278 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1279 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1280 }
1281 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1282 PetscFunctionReturn(0);
1283 }
1284
1285 /*@
1286 DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1287
1288 Input Parameters:
1289 + dm - The DMPlex object
1290 - flg - Balance the partition?
1291
1292 Level: intermediate
1293
1294 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1295 @*/
DMPlexSetPartitionBalance(DM dm,PetscBool flg)1296 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1297 {
1298 DM_Plex *mesh = (DM_Plex *)dm->data;
1299
1300 PetscFunctionBegin;
1301 mesh->partitionBalance = flg;
1302 PetscFunctionReturn(0);
1303 }
1304
1305 /*@
1306 DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1307
1308 Input Parameter:
1309 . dm - The DMPlex object
1310
1311 Output Parameter:
1312 . flg - Balance the partition?
1313
1314 Level: intermediate
1315
1316 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1317 @*/
DMPlexGetPartitionBalance(DM dm,PetscBool * flg)1318 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1319 {
1320 DM_Plex *mesh = (DM_Plex *)dm->data;
1321
1322 PetscFunctionBegin;
1323 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1324 PetscValidBoolPointer(flg, 2);
1325 *flg = mesh->partitionBalance;
1326 PetscFunctionReturn(0);
1327 }
1328
1329 typedef struct {
1330 PetscInt vote, rank, index;
1331 } Petsc3Int;
1332
1333 /* MaxLoc, but carry a third piece of information around */
MaxLocCarry(void * in_,void * inout_,PetscMPIInt * len_,MPI_Datatype * dtype)1334 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1335 {
1336 Petsc3Int *a = (Petsc3Int *)inout_;
1337 Petsc3Int *b = (Petsc3Int *)in_;
1338 PetscInt i, len = *len_;
1339 for (i = 0; i < len; i++) {
1340 if (a[i].vote < b[i].vote) {
1341 a[i].vote = b[i].vote;
1342 a[i].rank = b[i].rank;
1343 a[i].index = b[i].index;
1344 } else if (a[i].vote <= b[i].vote) {
1345 if (a[i].rank >= b[i].rank) {
1346 a[i].rank = b[i].rank;
1347 a[i].index = b[i].index;
1348 }
1349 }
1350 }
1351 }
1352
1353 /*@C
1354 DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1355
1356 Input Parameters:
1357 + dm - The source DMPlex object
1358 . migrationSF - The star forest that describes the parallel point remapping
1359 . ownership - Flag causing a vote to determine point ownership
1360
1361 Output Parameter:
1362 - pointSF - The star forest describing the point overlap in the remapped DM
1363
1364 Notes:
1365 Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1366
1367 Level: developer
1368
1369 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1370 @*/
DMPlexCreatePointSF(DM dm,PetscSF migrationSF,PetscBool ownership,PetscSF * pointSF)1371 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1372 {
1373 PetscMPIInt rank, size;
1374 PetscInt p, nroots, nleaves, idx, npointLeaves;
1375 PetscInt *pointLocal;
1376 const PetscInt *leaves;
1377 const PetscSFNode *roots;
1378 PetscSFNode *rootNodes, *leafNodes, *pointRemote;
1379 Vec shifts;
1380 const PetscInt numShifts = 13759;
1381 const PetscScalar *shift = NULL;
1382 const PetscBool shiftDebug = PETSC_FALSE;
1383 PetscBool balance;
1384 PetscErrorCode ierr;
1385
1386 PetscFunctionBegin;
1387 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1388 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1389 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1390 ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1391
1392 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1393 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1394 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1395 if (ownership) {
1396 MPI_Op op;
1397 MPI_Datatype datatype;
1398 Petsc3Int *rootVote = NULL, *leafVote = NULL;
1399 /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1400 if (balance) {
1401 PetscRandom r;
1402
1403 ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1404 ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1405 ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1406 ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1407 ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1408 ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1409 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1410 ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1411 }
1412
1413 ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1414 ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1415 /* Point ownership vote: Process with highest rank owns shared points */
1416 for (p = 0; p < nleaves; ++p) {
1417 if (shiftDebug) {
1418 ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1419 }
1420 /* Either put in a bid or we know we own it */
1421 leafVote[p].vote = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1422 leafVote[p].rank = rank;
1423 leafVote[p].index = p;
1424 }
1425 for (p = 0; p < nroots; p++) {
1426 /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1427 rootVote[p].vote = -3;
1428 rootVote[p].rank = -3;
1429 rootVote[p].index = -3;
1430 }
1431 ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRQ(ierr);
1432 ierr = MPI_Type_commit(&datatype);CHKERRQ(ierr);
1433 ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRQ(ierr);
1434 ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1435 ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1436 ierr = MPI_Op_free(&op);CHKERRQ(ierr);
1437 ierr = MPI_Type_free(&datatype);CHKERRQ(ierr);
1438 for (p = 0; p < nroots; p++) {
1439 rootNodes[p].rank = rootVote[p].rank;
1440 rootNodes[p].index = rootVote[p].index;
1441 }
1442 ierr = PetscFree(leafVote);CHKERRQ(ierr);
1443 ierr = PetscFree(rootVote);CHKERRQ(ierr);
1444 } else {
1445 for (p = 0; p < nroots; p++) {
1446 rootNodes[p].index = -1;
1447 rootNodes[p].rank = rank;
1448 }
1449 for (p = 0; p < nleaves; p++) {
1450 /* Write new local id into old location */
1451 if (roots[p].rank == rank) {
1452 rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1453 }
1454 }
1455 }
1456 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1457 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1458
1459 for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1460 if (leafNodes[p].rank != rank) npointLeaves++;
1461 }
1462 ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1463 ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1464 for (idx = 0, p = 0; p < nleaves; p++) {
1465 if (leafNodes[p].rank != rank) {
1466 /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1467 pointLocal[idx] = p;
1468 pointRemote[idx] = leafNodes[p];
1469 idx++;
1470 }
1471 }
1472 if (shift) {
1473 ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1474 ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1475 }
1476 if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1477 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1478 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1479 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1480 ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1481 ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1482 PetscFunctionReturn(0);
1483 }
1484
1485 /*@C
1486 DMPlexMigrate - Migrates internal DM data over the supplied star forest
1487
1488 Collective on dm
1489
1490 Input Parameters:
1491 + dm - The source DMPlex object
1492 . sf - The star forest communication context describing the migration pattern
1493
1494 Output Parameter:
1495 - targetDM - The target DMPlex object
1496
1497 Level: intermediate
1498
1499 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1500 @*/
DMPlexMigrate(DM dm,PetscSF sf,DM targetDM)1501 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1502 {
1503 MPI_Comm comm;
1504 PetscInt dim, cdim, nroots;
1505 PetscSF sfPoint;
1506 ISLocalToGlobalMapping ltogMigration;
1507 ISLocalToGlobalMapping ltogOriginal = NULL;
1508 PetscBool flg;
1509 PetscErrorCode ierr;
1510
1511 PetscFunctionBegin;
1512 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1513 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1514 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1515 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1516 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1517 ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1518 ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1519
1520 /* Check for a one-to-all distribution pattern */
1521 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1522 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1523 if (nroots >= 0) {
1524 IS isOriginal;
1525 PetscInt n, size, nleaves;
1526 PetscInt *numbering_orig, *numbering_new;
1527
1528 /* Get the original point numbering */
1529 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1530 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal);CHKERRQ(ierr);
1531 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1532 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1533 /* Convert to positive global numbers */
1534 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1535 /* Derive the new local-to-global mapping from the old one */
1536 ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1537 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1538 ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1539 ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1540 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr);
1541 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1542 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1543 } else {
1544 /* One-to-all distribution pattern: We can derive LToG from SF */
1545 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr);
1546 }
1547 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1548 if (flg) {
1549 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1550 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1551 }
1552 /* Migrate DM data to target DM */
1553 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1554 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1555 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1556 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1557 ierr = ISLocalToGlobalMappingDestroy(<ogOriginal);CHKERRQ(ierr);
1558 ierr = ISLocalToGlobalMappingDestroy(<ogMigration);CHKERRQ(ierr);
1559 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1560 PetscFunctionReturn(0);
1561 }
1562
1563 /*@C
1564 DMPlexDistribute - Distributes the mesh and any associated sections.
1565
1566 Collective on dm
1567
1568 Input Parameters:
1569 + dm - The original DMPlex object
1570 - overlap - The overlap of partitions, 0 is the default
1571
1572 Output Parameters:
1573 + sf - The PetscSF used for point distribution, or NULL if not needed
1574 - dmParallel - The distributed DMPlex object
1575
1576 Note: If the mesh was not distributed, the output dmParallel will be NULL.
1577
1578 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1579 representation on the mesh.
1580
1581 Level: intermediate
1582
1583 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1584 @*/
DMPlexDistribute(DM dm,PetscInt overlap,PetscSF * sf,DM * dmParallel)1585 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1586 {
1587 MPI_Comm comm;
1588 PetscPartitioner partitioner;
1589 IS cellPart;
1590 PetscSection cellPartSection;
1591 DM dmCoord;
1592 DMLabel lblPartition, lblMigration;
1593 PetscSF sfMigration, sfStratified, sfPoint;
1594 PetscBool flg, balance;
1595 PetscMPIInt rank, size;
1596 PetscErrorCode ierr;
1597
1598 PetscFunctionBegin;
1599 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1600 PetscValidLogicalCollectiveInt(dm, overlap, 2);
1601 if (sf) PetscValidPointer(sf,3);
1602 PetscValidPointer(dmParallel,4);
1603
1604 if (sf) *sf = NULL;
1605 *dmParallel = NULL;
1606 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1607 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1608 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1609 if (size == 1) PetscFunctionReturn(0);
1610
1611 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1612 /* Create cell partition */
1613 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1614 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1615 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1616 ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1617 ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1618 {
1619 /* Convert partition to DMLabel */
1620 IS is;
1621 PetscHSetI ht;
1622 const PetscInt *points;
1623 PetscInt *iranks;
1624 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks;
1625
1626 ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1627 /* Preallocate strata */
1628 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1629 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1630 for (proc = pStart; proc < pEnd; proc++) {
1631 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1632 if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1633 }
1634 ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1635 ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1636 ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1637 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1638 ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1639 ierr = PetscFree(iranks);CHKERRQ(ierr);
1640 /* Inline DMPlexPartitionLabelClosure() */
1641 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1642 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1643 for (proc = pStart; proc < pEnd; proc++) {
1644 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1645 if (!npoints) continue;
1646 ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1647 ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1648 ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1649 ierr = ISDestroy(&is);CHKERRQ(ierr);
1650 }
1651 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1652 }
1653 ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1654
1655 ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1656 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1657 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1658 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1659 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1660 sfMigration = sfStratified;
1661 ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1662 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1663 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1664 if (flg) {
1665 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1666 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1667 }
1668
1669 /* Create non-overlapping parallel DM and migrate internal data */
1670 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1671 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1672 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1673
1674 /* Build the point SF without overlap */
1675 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1676 ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1677 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1678 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1679 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1680 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1681 if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1682
1683 if (overlap > 0) {
1684 DM dmOverlap;
1685 PetscInt nroots, nleaves, noldleaves, l;
1686 const PetscInt *oldLeaves;
1687 PetscSFNode *newRemote, *permRemote;
1688 const PetscSFNode *oldRemote;
1689 PetscSF sfOverlap, sfOverlapPoint;
1690
1691 /* Add the partition overlap to the distributed DM */
1692 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1693 ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1694 *dmParallel = dmOverlap;
1695 if (flg) {
1696 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1697 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1698 }
1699
1700 /* Re-map the migration SF to establish the full migration pattern */
1701 ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1702 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1703 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1704 /* oldRemote: original root point mapping to original leaf point
1705 newRemote: original leaf point mapping to overlapped leaf point */
1706 if (oldLeaves) {
1707 /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1708 ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1709 for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1710 oldRemote = permRemote;
1711 }
1712 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1713 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1714 if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1715 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1716 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1717 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1718 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1719 sfMigration = sfOverlapPoint;
1720 }
1721 /* Cleanup Partition */
1722 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1723 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1724 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1725 ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1726 /* Copy BC */
1727 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1728 /* Create sfNatural */
1729 if (dm->useNatural) {
1730 PetscSection section;
1731
1732 ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
1733 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1734 ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1735 }
1736 /* Cleanup */
1737 if (sf) {*sf = sfMigration;}
1738 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1739 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1740 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1741 PetscFunctionReturn(0);
1742 }
1743
1744 /*@C
1745 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1746
1747 Collective on dm
1748
1749 Input Parameters:
1750 + dm - The non-overlapping distributed DMPlex object
1751 - overlap - The overlap of partitions (the same on all ranks)
1752
1753 Output Parameters:
1754 + sf - The PetscSF used for point distribution
1755 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1756
1757 Notes:
1758 If the mesh was not distributed, the return value is NULL.
1759
1760 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1761 representation on the mesh.
1762
1763 Level: advanced
1764
1765 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1766 @*/
DMPlexDistributeOverlap(DM dm,PetscInt overlap,PetscSF * sf,DM * dmOverlap)1767 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1768 {
1769 MPI_Comm comm;
1770 PetscMPIInt size, rank;
1771 PetscSection rootSection, leafSection;
1772 IS rootrank, leafrank;
1773 DM dmCoord;
1774 DMLabel lblOverlap;
1775 PetscSF sfOverlap, sfStratified, sfPoint;
1776 PetscErrorCode ierr;
1777
1778 PetscFunctionBegin;
1779 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1780 PetscValidLogicalCollectiveInt(dm, overlap, 2);
1781 if (sf) PetscValidPointer(sf, 3);
1782 PetscValidPointer(dmOverlap, 4);
1783
1784 if (sf) *sf = NULL;
1785 *dmOverlap = NULL;
1786 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1787 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1788 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1789 if (size == 1) PetscFunctionReturn(0);
1790
1791 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1792 /* Compute point overlap with neighbouring processes on the distributed DM */
1793 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1794 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1795 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1796 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1797 ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1798 /* Convert overlap label to stratified migration SF */
1799 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1800 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1801 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1802 sfOverlap = sfStratified;
1803 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1804 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1805
1806 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1807 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1808 ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1809 ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1810 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1811
1812 /* Build the overlapping DM */
1813 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1814 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1815 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1816 /* Store the overlap in the new DM */
1817 ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1818 /* Build the new point SF */
1819 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1820 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1821 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1822 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1823 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1824 /* Cleanup overlap partition */
1825 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1826 if (sf) *sf = sfOverlap;
1827 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1828 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1829 PetscFunctionReturn(0);
1830 }
1831
DMPlexGetOverlap_Plex(DM dm,PetscInt * overlap)1832 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1833 {
1834 DM_Plex *mesh = (DM_Plex*) dm->data;
1835
1836 PetscFunctionBegin;
1837 *overlap = mesh->overlap;
1838 PetscFunctionReturn(0);
1839 }
1840
1841 /*@
1842 DMPlexGetOverlap - Get the DMPlex partition overlap.
1843
1844 Not collective
1845
1846 Input Parameter:
1847 . dm - The DM
1848
1849 Output Parameter:
1850 . overlap - The overlap of this DM
1851
1852 Level: intermediate
1853
1854 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1855 @*/
DMPlexGetOverlap(DM dm,PetscInt * overlap)1856 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1857 {
1858 PetscErrorCode ierr;
1859
1860 PetscFunctionBegin;
1861 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1862 ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1863 PetscFunctionReturn(0);
1864 }
1865
1866
1867 /*@C
1868 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1869 root process of the original's communicator.
1870
1871 Collective on dm
1872
1873 Input Parameter:
1874 . dm - the original DMPlex object
1875
1876 Output Parameters:
1877 + sf - the PetscSF used for point distribution (optional)
1878 - gatherMesh - the gathered DM object, or NULL
1879
1880 Level: intermediate
1881
1882 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1883 @*/
DMPlexGetGatherDM(DM dm,PetscSF * sf,DM * gatherMesh)1884 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1885 {
1886 MPI_Comm comm;
1887 PetscMPIInt size;
1888 PetscPartitioner oldPart, gatherPart;
1889 PetscErrorCode ierr;
1890
1891 PetscFunctionBegin;
1892 PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1893 PetscValidPointer(gatherMesh,2);
1894 *gatherMesh = NULL;
1895 if (sf) *sf = NULL;
1896 comm = PetscObjectComm((PetscObject)dm);
1897 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1898 if (size == 1) PetscFunctionReturn(0);
1899 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1900 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1901 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1902 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1903 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1904 ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1905
1906 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1907 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1908 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1909 PetscFunctionReturn(0);
1910 }
1911
1912 /*@C
1913 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1914
1915 Collective on dm
1916
1917 Input Parameter:
1918 . dm - the original DMPlex object
1919
1920 Output Parameters:
1921 + sf - the PetscSF used for point distribution (optional)
1922 - redundantMesh - the redundant DM object, or NULL
1923
1924 Level: intermediate
1925
1926 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1927 @*/
DMPlexGetRedundantDM(DM dm,PetscSF * sf,DM * redundantMesh)1928 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1929 {
1930 MPI_Comm comm;
1931 PetscMPIInt size, rank;
1932 PetscInt pStart, pEnd, p;
1933 PetscInt numPoints = -1;
1934 PetscSF migrationSF, sfPoint, gatherSF;
1935 DM gatherDM, dmCoord;
1936 PetscSFNode *points;
1937 PetscErrorCode ierr;
1938
1939 PetscFunctionBegin;
1940 PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1941 PetscValidPointer(redundantMesh,2);
1942 *redundantMesh = NULL;
1943 comm = PetscObjectComm((PetscObject)dm);
1944 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1945 if (size == 1) {
1946 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1947 *redundantMesh = dm;
1948 if (sf) *sf = NULL;
1949 PetscFunctionReturn(0);
1950 }
1951 ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1952 if (!gatherDM) PetscFunctionReturn(0);
1953 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1954 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1955 numPoints = pEnd - pStart;
1956 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1957 ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1958 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1959 for (p = 0; p < numPoints; p++) {
1960 points[p].index = p;
1961 points[p].rank = 0;
1962 }
1963 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1964 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1965 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1966 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1967 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1968 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1969 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1970 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1971 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1972 if (sf) {
1973 PetscSF tsf;
1974
1975 ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1976 ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1977 ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1978 }
1979 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1980 ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1981 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1982 PetscFunctionReturn(0);
1983 }
1984
1985 /*@
1986 DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
1987
1988 Collective
1989
1990 Input Parameter:
1991 . dm - The DM object
1992
1993 Output Parameter:
1994 . distributed - Flag whether the DM is distributed
1995
1996 Level: intermediate
1997
1998 Notes:
1999 This currently finds out whether at least two ranks have any DAG points.
2000 This involves MPI_Allreduce() with one integer.
2001 The result is currently not stashed so every call to this routine involves this global communication.
2002
2003 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2004 @*/
DMPlexIsDistributed(DM dm,PetscBool * distributed)2005 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2006 {
2007 PetscInt pStart, pEnd, count;
2008 MPI_Comm comm;
2009 PetscErrorCode ierr;
2010
2011 PetscFunctionBegin;
2012 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2013 PetscValidPointer(distributed,2);
2014 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2015 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2016 count = !!(pEnd - pStart);
2017 ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2018 *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2019 PetscFunctionReturn(0);
2020 }
2021