1 
2 #include <petscsys.h>           /*I    "petscsys.h" I*/
3 
4 /*@
5     PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6         (or global) length via a simple formula. Splits so each processors local size
7         is divisible by the block size.
8 
9    Collective (if N is PETSC_DECIDE)
10 
11    Input Parameters:
12 +    comm - MPI communicator that shares the object being divided
13 .    bs - block size
14 .    n - local length (or PETSC_DECIDE to have it set)
15 -    N - global length (or PETSC_DECIDE)
16 
17   Level: developer
18 
19    Notes:
20      n and N cannot be both PETSC_DECIDE
21 
22      If one processor calls this with N of PETSC_DECIDE then all processors
23      must, otherwise the program will hang.
24 
25 .seealso: PetscSplitOwnership()
26 
27 @*/
PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt * n,PetscInt * N)28 PetscErrorCode  PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
29 {
30   PetscErrorCode ierr;
31   PetscMPIInt    size,rank;
32 
33   PetscFunctionBegin;
34   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
35 
36   if (*N == PETSC_DECIDE) {
37     if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
38     ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
39   } else if (*n == PETSC_DECIDE) {
40     PetscInt Nbs = *N/bs;
41     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
42     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
43     *n   = bs*(Nbs/size + ((Nbs % size) > rank));
44   }
45   PetscFunctionReturn(0);
46 }
47 
48 
49 /*@
50     PetscSplitOwnership - Given a global (or local) length determines a local
51         (or global) length via a simple formula
52 
53    Collective (if n or N is PETSC_DECIDE)
54 
55    Input Parameters:
56 +    comm - MPI communicator that shares the object being divided
57 .    n - local length (or PETSC_DECIDE to have it set)
58 -    N - global length (or PETSC_DECIDE)
59 
60   Level: developer
61 
62    Notes:
63      n and N cannot be both PETSC_DECIDE
64 
65      If one processor calls this with n or N of PETSC_DECIDE then all processors
66      must. Otherwise, an error is thrown in debug mode while the program will hang
67      in optimized (i.e. configured --with-debugging=0) mode.
68 
69 .seealso: PetscSplitOwnershipBlock()
70 
71 @*/
PetscSplitOwnership(MPI_Comm comm,PetscInt * n,PetscInt * N)72 PetscErrorCode  PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
73 {
74   PetscErrorCode ierr;
75   PetscMPIInt    size,rank;
76 
77   PetscFunctionBegin;
78   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split");
79   if (PetscDefined(USE_DEBUG)) {
80     PetscMPIInt l[2],g[2];
81     l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
82     l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
83     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
84     ierr = MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
85     if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
86     if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
87   }
88 
89   if (*N == PETSC_DECIDE) {
90     PetscInt64 m = *n, M;
91     ierr = MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);CHKERRQ(ierr);
92     if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
93     else *N = (PetscInt)M;
94   } else if (*n == PETSC_DECIDE) {
95     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
96     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
97     *n   = *N/size + ((*N % size) > rank);
98   } else if (PetscDefined(USE_DEBUG)) {
99     PetscInt tmp;
100     ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
101     if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n);
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*@
108     PetscSplitOwnershipEqual - Given a global (or local) length determines a local
109         (or global) length via a simple formula, trying to have all local lengths equal
110 
111    Collective (if n or N is PETSC_DECIDE)
112 
113    Input Parameters:
114 +    comm - MPI communicator that shares the object being divided
115 .    n - local length (or PETSC_DECIDE to have it set)
116 -    N - global length (or PETSC_DECIDE)
117 
118    Level: developer
119 
120    Notes:
121      This is intended to be used with MATSCALAPACK, where the local size must
122      be equal in all processes (except possibly the last one). For instance,
123      the local sizes when spliting N=50 with 6 processes are 9,9,9,9,9,5
124 
125      n and N cannot be both PETSC_DECIDE
126 
127      If one processor calls this with n or N of PETSC_DECIDE then all processors
128      must. Otherwise, an error is thrown in debug mode while the program will hang
129      in optimized (i.e. configured --with-debugging=0) mode.
130 
131 .seealso: PetscSplitOwnership(), PetscSplitOwnershipBlock()
132 
133 @*/
PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt * n,PetscInt * N)134 PetscErrorCode  PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt *n,PetscInt *N)
135 {
136   PetscErrorCode ierr;
137   PetscMPIInt    size,rank;
138 
139   PetscFunctionBegin;
140   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
141   if (PetscDefined(USE_DEBUG)) {
142     PetscMPIInt l[2],g[2];
143     l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
144     l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
145     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
146     ierr = MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
147     if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size");
148     if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size");
149   }
150 
151   if (*N == PETSC_DECIDE) {
152     PetscInt64 m = *n, M;
153     ierr = MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);CHKERRQ(ierr);
154     if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
155     else *N = (PetscInt)M;
156   } else if (*n == PETSC_DECIDE) {
157     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
158     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
159     *n = *N/size;
160     if (*N % size) {
161       if ((rank+1)*(*n+1)<=*N)  *n = *n+1;
162       else if (rank*(*n+1)<=*N) *n = *N-rank*(*n+1);
163       else *n = 0;
164     }
165   } else if (PetscDefined(USE_DEBUG)) {
166     PetscInt tmp;
167     ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
168     if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D",tmp,*N,*n);
169   }
170   PetscFunctionReturn(0);
171 }
172 
173