1
2 /*
3 This file contains routines for basic map object implementation.
4 */
5
6 #include <petscis.h> /*I "petscis.h" I*/
7 #include <petscsf.h>
8 #include <petsc/private/isimpl.h>
9
10 /*@
11 PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.
12
13 Collective
14
15 Input Parameters:
16 . comm - the MPI communicator
17
18 Output Parameters:
19 . map - the new PetscLayout
20
21 Level: advanced
22
23 Notes:
24 Typical calling sequence
25 .vb
26 PetscLayoutCreate(MPI_Comm,PetscLayout *);
27 PetscLayoutSetBlockSize(PetscLayout,bs);
28 PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
29 PetscLayoutSetUp(PetscLayout);
30 .ve
31 Alternatively,
32 $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
33
34 Optionally use any of the following:
35
36 + PetscLayoutGetSize(PetscLayout,PetscInt *);
37 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
38 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
39 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
40 - PetscLayoutDestroy(PetscLayout*);
41
42 The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
43 user codes unless you really gain something in their use.
44
45 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
46 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(),
47 PetscLayoutCreateFromSizes()
48
49 @*/
PetscLayoutCreate(MPI_Comm comm,PetscLayout * map)50 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
51 {
52 PetscErrorCode ierr;
53
54 PetscFunctionBegin;
55 ierr = PetscNew(map);CHKERRQ(ierr);
56
57 (*map)->comm = comm;
58 (*map)->bs = -1;
59 (*map)->n = -1;
60 (*map)->N = -1;
61 (*map)->range = NULL;
62 (*map)->range_alloc = PETSC_TRUE;
63 (*map)->rstart = 0;
64 (*map)->rend = 0;
65 (*map)->setupcalled = PETSC_FALSE;
66 (*map)->oldn = -1;
67 (*map)->oldN = -1;
68 (*map)->oldbs = -1;
69 PetscFunctionReturn(0);
70 }
71
72 /*@
73 PetscLayoutCreateFromSizes - Allocates PetscLayout space, sets the layout sizes, and sets the layout up.
74
75 Collective
76
77 Input Parameters:
78 + comm - the MPI communicator
79 . n - the local size (or PETSC_DECIDE)
80 . N - the global size (or PETSC_DECIDE)
81 - bs - the block size (or PETSC_DECIDE)
82
83 Output Parameters:
84 . map - the new PetscLayout
85
86 Level: advanced
87
88 Notes:
89 $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
90 is a shorthand for
91 .vb
92 PetscLayoutCreate(comm,&layout);
93 PetscLayoutSetLocalSize(layout,n);
94 PetscLayoutSetSize(layout,N);
95 PetscLayoutSetBlockSize(layout,bs);
96 PetscLayoutSetUp(layout);
97 .ve
98
99 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
100 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromRanges()
101
102 @*/
PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout * map)103 PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout *map)
104 {
105 PetscErrorCode ierr;
106
107 PetscFunctionBegin;
108 ierr = PetscLayoutCreate(comm, map);CHKERRQ(ierr);
109 ierr = PetscLayoutSetLocalSize(*map, n);CHKERRQ(ierr);
110 ierr = PetscLayoutSetSize(*map, N);CHKERRQ(ierr);
111 ierr = PetscLayoutSetBlockSize(*map, bs);CHKERRQ(ierr);
112 ierr = PetscLayoutSetUp(*map);CHKERRQ(ierr);
113 PetscFunctionReturn(0);
114 }
115
116 /*@
117 PetscLayoutDestroy - Frees a map object and frees its range if that exists.
118
119 Collective
120
121 Input Parameters:
122 . map - the PetscLayout
123
124 Level: developer
125
126 Note:
127 The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
128 recommended they not be used in user codes unless you really gain something in their use.
129
130 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
131 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
132
133 @*/
PetscLayoutDestroy(PetscLayout * map)134 PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
135 {
136 PetscErrorCode ierr;
137
138 PetscFunctionBegin;
139 if (!*map) PetscFunctionReturn(0);
140 if (!(*map)->refcnt--) {
141 if ((*map)->range_alloc) {ierr = PetscFree((*map)->range);CHKERRQ(ierr);}
142 ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
143 ierr = PetscFree((*map));CHKERRQ(ierr);
144 }
145 *map = NULL;
146 PetscFunctionReturn(0);
147 }
148
149 /*@
150 PetscLayoutCreateFromRanges - Creates a new PetscLayout with the given ownership ranges and sets it up.
151
152 Collective
153
154 Input Parameters:
155 + comm - the MPI communicator
156 . range - the array of ownership ranges for each rank with length commsize+1
157 . mode - the copy mode for range
158 - bs - the block size (or PETSC_DECIDE)
159
160 Output Parameters:
161 . newmap - the new PetscLayout
162
163 Level: developer
164
165 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
166 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromSizes()
167
168 @*/
PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout * newmap)169 PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout *newmap)
170 {
171 PetscLayout map;
172 PetscMPIInt rank,size;
173 PetscErrorCode ierr;
174
175 PetscFunctionBegin;
176 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
177 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
178 ierr = PetscLayoutCreate(comm, &map);CHKERRQ(ierr);
179 ierr = PetscLayoutSetBlockSize(map, bs);CHKERRQ(ierr);
180 switch (mode) {
181 case PETSC_COPY_VALUES:
182 ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
183 ierr = PetscArraycpy(map->range, range, size+1);CHKERRQ(ierr);
184 break;
185 case PETSC_USE_POINTER:
186 map->range_alloc = PETSC_FALSE;
187 default:
188 map->range = (PetscInt*) range;
189 break;
190 }
191 map->rstart = map->range[rank];
192 map->rend = map->range[rank+1];
193 map->n = map->rend - map->rstart;
194 map->N = map->range[size];
195 if (PetscDefined(USE_DEBUG)) { /* just check that n, N and bs are consistent */
196 PetscInt tmp;
197 ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
198 if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
199 if (map->bs > 1) {
200 if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
201 }
202 if (map->bs > 1) {
203 if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
204 }
205 }
206 /* lock the layout */
207 map->setupcalled = PETSC_TRUE;
208 map->oldn = map->n;
209 map->oldN = map->N;
210 map->oldbs = map->bs;
211 *newmap = map;
212 PetscFunctionReturn(0);
213 }
214
215 /*@
216 PetscLayoutSetUp - given a map where you have set either the global or local
217 size sets up the map so that it may be used.
218
219 Collective
220
221 Input Parameters:
222 . map - pointer to the map
223
224 Level: developer
225
226 Notes:
227 Typical calling sequence
228 $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
229 $ PetscLayoutSetBlockSize(PetscLayout,1);
230 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
231 $ PetscLayoutSetUp(PetscLayout);
232 $ PetscLayoutGetSize(PetscLayout,PetscInt *);
233
234 If range exists, and local size is not set, everything gets computed from the range.
235
236 If the local size, global size are already set and range exists then this does nothing.
237
238 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
239 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
240 @*/
PetscLayoutSetUp(PetscLayout map)241 PetscErrorCode PetscLayoutSetUp(PetscLayout map)
242 {
243 PetscMPIInt rank,size;
244 PetscInt p;
245 PetscErrorCode ierr;
246
247 PetscFunctionBegin;
248 if (map->setupcalled && (map->n != map->oldn || map->N != map->oldN)) SETERRQ4(map->comm,PETSC_ERR_ARG_WRONGSTATE,"Layout is already setup with (local=%D,global=%D), cannot call setup again with (local=%D,global=%D)", map->oldn, map->oldN, map->n, map->N);
249 if (map->setupcalled) PetscFunctionReturn(0);
250
251 if (map->n > 0 && map->bs > 1) {
252 if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
253 }
254 if (map->N > 0 && map->bs > 1) {
255 if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
256 }
257
258 ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
259 ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
260 if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
261 if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
262 ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr);
263 map->n = map->n*PetscAbs(map->bs);
264 map->N = map->N*PetscAbs(map->bs);
265 if (!map->range) {
266 ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
267 }
268 ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr);
269
270 map->range[0] = 0;
271 for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
272
273 map->rstart = map->range[rank];
274 map->rend = map->range[rank+1];
275
276 /* lock the layout */
277 map->setupcalled = PETSC_TRUE;
278 map->oldn = map->n;
279 map->oldN = map->N;
280 map->oldbs = map->bs;
281 PetscFunctionReturn(0);
282 }
283
284 /*@
285 PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
286
287 Collective on PetscLayout
288
289 Input Parameter:
290 . in - input PetscLayout to be duplicated
291
292 Output Parameter:
293 . out - the copy
294
295 Level: developer
296
297 Notes:
298 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
299
300 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
301 @*/
PetscLayoutDuplicate(PetscLayout in,PetscLayout * out)302 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
303 {
304 PetscMPIInt size;
305 PetscErrorCode ierr;
306 MPI_Comm comm = in->comm;
307
308 PetscFunctionBegin;
309 ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
310 ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr);
311 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
312 ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr);
313 if (in->range) {
314 ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr);
315 ierr = PetscArraycpy((*out)->range,in->range,size+1);CHKERRQ(ierr);
316 }
317
318 (*out)->refcnt = 0;
319 PetscFunctionReturn(0);
320 }
321
322 /*@
323 PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
324
325 Collective on PetscLayout
326
327 Input Parameter:
328 . in - input PetscLayout to be copied
329
330 Output Parameter:
331 . out - the reference location
332
333 Level: developer
334
335 Notes:
336 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
337
338 If the out location already contains a PetscLayout it is destroyed
339
340 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
341 @*/
PetscLayoutReference(PetscLayout in,PetscLayout * out)342 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
343 {
344 PetscErrorCode ierr;
345
346 PetscFunctionBegin;
347 in->refcnt++;
348 ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
349 *out = in;
350 PetscFunctionReturn(0);
351 }
352
353 /*@
354 PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
355
356 Collective on PetscLayout
357
358 Input Parameter:
359 + in - input PetscLayout
360 - ltog - the local to global mapping
361
362
363 Level: developer
364
365 Notes:
366 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
367
368 If the ltog location already contains a PetscLayout it is destroyed
369
370 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
371 @*/
PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)372 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
373 {
374 PetscErrorCode ierr;
375 PetscInt bs;
376
377 PetscFunctionBegin;
378 ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
379 if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
380 ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
381 ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
382 in->mapping = ltog;
383 PetscFunctionReturn(0);
384 }
385
386 /*@
387 PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
388
389 Collective on PetscLayout
390
391 Input Parameters:
392 + map - pointer to the map
393 - n - the local size
394
395 Level: developer
396
397 Notes:
398 Call this after the call to PetscLayoutCreate()
399
400 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
401 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
402 @*/
PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)403 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
404 {
405 PetscFunctionBegin;
406 if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
407 map->n = n;
408 PetscFunctionReturn(0);
409 }
410
411 /*@C
412 PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
413
414 Not Collective
415
416 Input Parameters:
417 . map - pointer to the map
418
419 Output Parameters:
420 . n - the local size
421
422 Level: developer
423
424 Notes:
425 Call this after the call to PetscLayoutSetUp()
426
427 Fortran Notes:
428 Not available from Fortran
429
430 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
431 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
432
433 @*/
PetscLayoutGetLocalSize(PetscLayout map,PetscInt * n)434 PetscErrorCode PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
435 {
436 PetscFunctionBegin;
437 *n = map->n;
438 PetscFunctionReturn(0);
439 }
440
441 /*@
442 PetscLayoutSetSize - Sets the global size for a PetscLayout object.
443
444 Logically Collective on PetscLayout
445
446 Input Parameters:
447 + map - pointer to the map
448 - n - the global size
449
450 Level: developer
451
452 Notes:
453 Call this after the call to PetscLayoutCreate()
454
455 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
456 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
457 @*/
PetscLayoutSetSize(PetscLayout map,PetscInt n)458 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
459 {
460 PetscFunctionBegin;
461 map->N = n;
462 PetscFunctionReturn(0);
463 }
464
465 /*@
466 PetscLayoutGetSize - Gets the global size for a PetscLayout object.
467
468 Not Collective
469
470 Input Parameters:
471 . map - pointer to the map
472
473 Output Parameters:
474 . n - the global size
475
476 Level: developer
477
478 Notes:
479 Call this after the call to PetscLayoutSetUp()
480
481 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
482 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
483 @*/
PetscLayoutGetSize(PetscLayout map,PetscInt * n)484 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
485 {
486 PetscFunctionBegin;
487 *n = map->N;
488 PetscFunctionReturn(0);
489 }
490
491 /*@
492 PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
493
494 Logically Collective on PetscLayout
495
496 Input Parameters:
497 + map - pointer to the map
498 - bs - the size
499
500 Level: developer
501
502 Notes:
503 Call this after the call to PetscLayoutCreate()
504
505 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
506 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
507 @*/
PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)508 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
509 {
510 PetscFunctionBegin;
511 if (bs < 0) PetscFunctionReturn(0);
512 if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
513 if (map->mapping) {
514 PetscInt obs;
515 PetscErrorCode ierr;
516
517 ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr);
518 if (obs > 1) {
519 ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr);
520 }
521 }
522 map->bs = bs;
523 PetscFunctionReturn(0);
524 }
525
526 /*@
527 PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
528
529 Not Collective
530
531 Input Parameters:
532 . map - pointer to the map
533
534 Output Parameters:
535 . bs - the size
536
537 Level: developer
538
539 Notes:
540 Call this after the call to PetscLayoutSetUp()
541
542 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
543 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
544 @*/
PetscLayoutGetBlockSize(PetscLayout map,PetscInt * bs)545 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
546 {
547 PetscFunctionBegin;
548 *bs = PetscAbs(map->bs);
549 PetscFunctionReturn(0);
550 }
551
552 /*@
553 PetscLayoutGetRange - gets the range of values owned by this process
554
555 Not Collective
556
557 Input Parameters:
558 . map - pointer to the map
559
560 Output Parameters:
561 + rstart - first index owned by this process
562 - rend - one more than the last index owned by this process
563
564 Level: developer
565
566 Notes:
567 Call this after the call to PetscLayoutSetUp()
568
569 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
570 PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
571 @*/
PetscLayoutGetRange(PetscLayout map,PetscInt * rstart,PetscInt * rend)572 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
573 {
574 PetscFunctionBegin;
575 if (rstart) *rstart = map->rstart;
576 if (rend) *rend = map->rend;
577 PetscFunctionReturn(0);
578 }
579
580 /*@C
581 PetscLayoutGetRanges - gets the range of values owned by all processes
582
583 Not Collective
584
585 Input Parameters:
586 . map - pointer to the map
587
588 Output Parameters:
589 . range - start of each processors range of indices (the final entry is one more then the
590 last index on the last process)
591
592 Level: developer
593
594 Notes:
595 Call this after the call to PetscLayoutSetUp()
596
597 Fortran Notes:
598 Not available from Fortran
599
600 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
601 PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
602
603 @*/
PetscLayoutGetRanges(PetscLayout map,const PetscInt * range[])604 PetscErrorCode PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
605 {
606 PetscFunctionBegin;
607 *range = map->range;
608 PetscFunctionReturn(0);
609 }
610
611 /*@C
612 PetscLayoutsCreateSF - Creates a parallel star forest mapping two PetscLayout objects
613
614 Collective
615
616 Input Arguments:
617 + rmap - PetscLayout defining the global root space
618 - lmap - PetscLayout defining the global leaf space
619
620 Output Arguments:
621 . sf - The parallel star forest
622
623 Level: intermediate
624
625 .seealso: PetscSFCreate(), PetscLayoutCreate(), PetscSFSetGraphLayout()
626 @*/
PetscLayoutsCreateSF(PetscLayout rmap,PetscLayout lmap,PetscSF * sf)627 PetscErrorCode PetscLayoutsCreateSF(PetscLayout rmap, PetscLayout lmap, PetscSF* sf)
628 {
629 PetscErrorCode ierr;
630 PetscInt i,nroots,nleaves = 0;
631 PetscInt rN, lst, len;
632 PetscMPIInt owner = -1;
633 PetscSFNode *remote;
634 MPI_Comm rcomm = rmap->comm;
635 MPI_Comm lcomm = lmap->comm;
636 PetscMPIInt flg;
637
638 PetscFunctionBegin;
639 PetscValidPointer(sf,3);
640 if (!rmap->setupcalled) SETERRQ(rcomm,PETSC_ERR_ARG_WRONGSTATE,"Root layout not setup");
641 if (!lmap->setupcalled) SETERRQ(lcomm,PETSC_ERR_ARG_WRONGSTATE,"Leaf layout not setup");
642 ierr = MPI_Comm_compare(rcomm,lcomm,&flg);CHKERRQ(ierr);
643 if (flg != MPI_CONGRUENT && flg != MPI_IDENT) SETERRQ(rcomm,PETSC_ERR_SUP,"cannot map two layouts with non-matching communicators");
644 ierr = PetscSFCreate(rcomm,sf);CHKERRQ(ierr);
645 ierr = PetscLayoutGetLocalSize(rmap,&nroots);CHKERRQ(ierr);
646 ierr = PetscLayoutGetSize(rmap,&rN);CHKERRQ(ierr);
647 ierr = PetscLayoutGetRange(lmap,&lst,&len);CHKERRQ(ierr);
648 ierr = PetscMalloc1(len-lst,&remote);CHKERRQ(ierr);
649 for (i = lst; i < len && i < rN; i++) {
650 if (owner < -1 || i >= rmap->range[owner+1]) {
651 ierr = PetscLayoutFindOwner(rmap,i,&owner);CHKERRQ(ierr);
652 }
653 remote[nleaves].rank = owner;
654 remote[nleaves].index = i - rmap->range[owner];
655 nleaves++;
656 }
657 ierr = PetscSFSetGraph(*sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,remote,PETSC_COPY_VALUES);CHKERRQ(ierr);
658 ierr = PetscFree(remote);CHKERRQ(ierr);
659 PetscFunctionReturn(0);
660 }
661
662 /*@C
663 PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
664
665 Collective
666
667 Input Arguments:
668 + sf - star forest
669 . layout - PetscLayout defining the global space
670 . nleaves - number of leaf vertices on the current process, each of these references a root on any process
671 . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
672 . localmode - copy mode for ilocal
673 - iremote - remote locations of root vertices for each leaf on the current process
674
675 Level: intermediate
676
677 Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
678 encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
679 needed
680
681 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
682 @*/
PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt * ilocal,PetscCopyMode localmode,const PetscInt * iremote)683 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
684 {
685 PetscErrorCode ierr;
686 PetscInt i,nroots;
687 PetscSFNode *remote;
688
689 PetscFunctionBegin;
690 ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
691 ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
692 for (i=0; i<nleaves; i++) {
693 PetscMPIInt owner = -1;
694 ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
695 remote[i].rank = owner;
696 remote[i].index = iremote[i] - layout->range[owner];
697 }
698 ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
699 PetscFunctionReturn(0);
700 }
701
702 /*@
703 PetscLayoutCompare - Compares two layouts
704
705 Not Collective
706
707 Input Parameters:
708 + mapa - pointer to the first map
709 - mapb - pointer to the second map
710
711 Output Parameters:
712 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
713
714 Level: beginner
715
716 Notes:
717
718 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
719 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
720 @*/
PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool * congruent)721 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
722 {
723 PetscErrorCode ierr;
724 PetscMPIInt sizea,sizeb;
725
726 PetscFunctionBegin;
727 *congruent = PETSC_FALSE;
728 ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr);
729 ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr);
730 if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
731 ierr = PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);CHKERRQ(ierr);
732 }
733 PetscFunctionReturn(0);
734 }
735
736 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[],PetscInt * on,PetscInt ** oidxs,PetscInt ** ogidxs)737 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
738 {
739 PetscInt *owners = map->range;
740 PetscInt n = map->n;
741 PetscSF sf;
742 PetscInt *lidxs,*work = NULL;
743 PetscSFNode *ridxs;
744 PetscMPIInt rank, p = 0;
745 PetscInt r, len = 0;
746 PetscErrorCode ierr;
747
748 PetscFunctionBegin;
749 if (on) *on = 0; /* squelch -Wmaybe-uninitialized */
750 /* Create SF where leaves are input idxs and roots are owned idxs */
751 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
752 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
753 for (r = 0; r < n; ++r) lidxs[r] = -1;
754 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
755 for (r = 0; r < N; ++r) {
756 const PetscInt idx = idxs[r];
757 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
758 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
759 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
760 }
761 ridxs[r].rank = p;
762 ridxs[r].index = idxs[r] - owners[p];
763 }
764 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
765 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
766 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
767 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
768 if (ogidxs) { /* communicate global idxs */
769 PetscInt cum = 0,start,*work2;
770
771 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
772 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
773 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
774 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
775 start -= cum;
776 cum = 0;
777 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
778 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
779 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
780 ierr = PetscFree(work2);CHKERRQ(ierr);
781 }
782 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
783 /* Compress and put in indices */
784 for (r = 0; r < n; ++r)
785 if (lidxs[r] >= 0) {
786 if (work) work[len] = work[r];
787 lidxs[len++] = r;
788 }
789 if (on) *on = len;
790 if (oidxs) *oidxs = lidxs;
791 if (ogidxs) *ogidxs = work;
792 PetscFunctionReturn(0);
793 }
794