1
2 /*
3 This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
6 #include <petscksp.h>
7
8 typedef struct {
9 KSP ksp;
10 Vec x,b;
11 VecScatter scatter;
12 IS is;
13 PetscInt dcnt,*drows; /* these are the local rows that have only diagonal entry */
14 PetscScalar *diag;
15 Vec work;
16 } PC_Redistribute;
17
PCView_Redistribute(PC pc,PetscViewer viewer)18 static PetscErrorCode PCView_Redistribute(PC pc,PetscViewer viewer)
19 {
20 PC_Redistribute *red = (PC_Redistribute*)pc->data;
21 PetscErrorCode ierr;
22 PetscBool iascii,isstring;
23 PetscInt ncnt,N;
24
25 PetscFunctionBegin;
26 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
27 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
28 if (iascii) {
29 ierr = MPIU_Allreduce(&red->dcnt,&ncnt,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
30 ierr = MatGetSize(pc->pmat,&N,NULL);CHKERRQ(ierr);
31 ierr = PetscViewerASCIIPrintf(viewer," Number rows eliminated %D Percentage rows eliminated %g\n",ncnt,100.0*((PetscReal)ncnt)/((PetscReal)N));CHKERRQ(ierr);
32 ierr = PetscViewerASCIIPrintf(viewer," Redistribute preconditioner: \n");CHKERRQ(ierr);
33 ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
34 } else if (isstring) {
35 ierr = PetscViewerStringSPrintf(viewer," Redistribute preconditioner");CHKERRQ(ierr);
36 ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
37 }
38 PetscFunctionReturn(0);
39 }
40
PCSetUp_Redistribute(PC pc)41 static PetscErrorCode PCSetUp_Redistribute(PC pc)
42 {
43 PC_Redistribute *red = (PC_Redistribute*)pc->data;
44 PetscErrorCode ierr;
45 MPI_Comm comm;
46 PetscInt rstart,rend,i,nz,cnt,*rows,ncnt,dcnt,*drows;
47 PetscLayout map,nmap;
48 PetscMPIInt size,imdex,tag,n;
49 PetscInt *source = NULL;
50 PetscMPIInt *sizes = NULL,nrecvs;
51 PetscInt j,nsends;
52 PetscInt *owner = NULL,*starts = NULL,count,slen;
53 PetscInt *rvalues,*svalues,recvtotal;
54 PetscMPIInt *onodes1,*olengths1;
55 MPI_Request *send_waits = NULL,*recv_waits = NULL;
56 MPI_Status recv_status,*send_status;
57 Vec tvec,diag;
58 Mat tmat;
59 const PetscScalar *d;
60
61 PetscFunctionBegin;
62 if (pc->setupcalled) {
63 ierr = KSPGetOperators(red->ksp,NULL,&tmat);CHKERRQ(ierr);
64 ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_REUSE_MATRIX,&tmat);CHKERRQ(ierr);
65 ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr);
66 } else {
67 PetscInt NN;
68
69 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
70 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
71 ierr = PetscObjectGetNewTag((PetscObject)pc,&tag);CHKERRQ(ierr);
72
73 /* count non-diagonal rows on process */
74 ierr = MatGetOwnershipRange(pc->mat,&rstart,&rend);CHKERRQ(ierr);
75 cnt = 0;
76 for (i=rstart; i<rend; i++) {
77 ierr = MatGetRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr);
78 if (nz > 1) cnt++;
79 ierr = MatRestoreRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr);
80 }
81 ierr = PetscMalloc1(cnt,&rows);CHKERRQ(ierr);
82 ierr = PetscMalloc1(rend - rstart - cnt,&drows);CHKERRQ(ierr);
83
84 /* list non-diagonal rows on process */
85 cnt = 0; dcnt = 0;
86 for (i=rstart; i<rend; i++) {
87 ierr = MatGetRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr);
88 if (nz > 1) rows[cnt++] = i;
89 else drows[dcnt++] = i - rstart;
90 ierr = MatRestoreRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr);
91 }
92
93 /* create PetscLayout for non-diagonal rows on each process */
94 ierr = PetscLayoutCreate(comm,&map);CHKERRQ(ierr);
95 ierr = PetscLayoutSetLocalSize(map,cnt);CHKERRQ(ierr);
96 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
97 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
98 rstart = map->rstart;
99 rend = map->rend;
100
101 /* create PetscLayout for load-balanced non-diagonal rows on each process */
102 ierr = PetscLayoutCreate(comm,&nmap);CHKERRQ(ierr);
103 ierr = MPIU_Allreduce(&cnt,&ncnt,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
104 ierr = PetscLayoutSetSize(nmap,ncnt);CHKERRQ(ierr);
105 ierr = PetscLayoutSetBlockSize(nmap,1);CHKERRQ(ierr);
106 ierr = PetscLayoutSetUp(nmap);CHKERRQ(ierr);
107
108 ierr = MatGetSize(pc->pmat,&NN,NULL);CHKERRQ(ierr);
109 ierr = PetscInfo2(pc,"Number of diagonal rows eliminated %d, percentage eliminated %g\n",NN-ncnt,((PetscReal)(NN-ncnt))/((PetscReal)(NN)));CHKERRQ(ierr);
110 /*
111 this code is taken from VecScatterCreate_PtoS()
112 Determines what rows need to be moved where to
113 load balance the non-diagonal rows
114 */
115 /* count number of contributors to each processor */
116 ierr = PetscMalloc2(size,&sizes,cnt,&owner);CHKERRQ(ierr);
117 ierr = PetscArrayzero(sizes,size);CHKERRQ(ierr);
118 j = 0;
119 nsends = 0;
120 for (i=rstart; i<rend; i++) {
121 if (i < nmap->range[j]) j = 0;
122 for (; j<size; j++) {
123 if (i < nmap->range[j+1]) {
124 if (!sizes[j]++) nsends++;
125 owner[i-rstart] = j;
126 break;
127 }
128 }
129 }
130 /* inform other processors of number of messages and max length*/
131 ierr = PetscGatherNumberOfMessages(comm,NULL,sizes,&nrecvs);CHKERRQ(ierr);
132 ierr = PetscGatherMessageLengths(comm,nsends,nrecvs,sizes,&onodes1,&olengths1);CHKERRQ(ierr);
133 ierr = PetscSortMPIIntWithArray(nrecvs,onodes1,olengths1);CHKERRQ(ierr);
134 recvtotal = 0; for (i=0; i<nrecvs; i++) recvtotal += olengths1[i];
135
136 /* post receives: rvalues - rows I will own; count - nu */
137 ierr = PetscMalloc3(recvtotal,&rvalues,nrecvs,&source,nrecvs,&recv_waits);CHKERRQ(ierr);
138 count = 0;
139 for (i=0; i<nrecvs; i++) {
140 ierr = MPI_Irecv((rvalues+count),olengths1[i],MPIU_INT,onodes1[i],tag,comm,recv_waits+i);CHKERRQ(ierr);
141 count += olengths1[i];
142 }
143
144 /* do sends:
145 1) starts[i] gives the starting index in svalues for stuff going to
146 the ith processor
147 */
148 ierr = PetscMalloc3(cnt,&svalues,nsends,&send_waits,size,&starts);CHKERRQ(ierr);
149 starts[0] = 0;
150 for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1];
151 for (i=0; i<cnt; i++) svalues[starts[owner[i]]++] = rows[i];
152 for (i=0; i<cnt; i++) rows[i] = rows[i] - rstart;
153 red->drows = drows;
154 red->dcnt = dcnt;
155 ierr = PetscFree(rows);CHKERRQ(ierr);
156
157 starts[0] = 0;
158 for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1];
159 count = 0;
160 for (i=0; i<size; i++) {
161 if (sizes[i]) {
162 ierr = MPI_Isend(svalues+starts[i],sizes[i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
163 }
164 }
165
166 /* wait on receives */
167 count = nrecvs;
168 slen = 0;
169 while (count) {
170 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
171 /* unpack receives into our local space */
172 ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
173 slen += n;
174 count--;
175 }
176 if (slen != recvtotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Total message lengths %D not expected %D",slen,recvtotal);
177
178 ierr = ISCreateGeneral(comm,slen,rvalues,PETSC_COPY_VALUES,&red->is);CHKERRQ(ierr);
179
180 /* free up all work space */
181 ierr = PetscFree(olengths1);CHKERRQ(ierr);
182 ierr = PetscFree(onodes1);CHKERRQ(ierr);
183 ierr = PetscFree3(rvalues,source,recv_waits);CHKERRQ(ierr);
184 ierr = PetscFree2(sizes,owner);CHKERRQ(ierr);
185 if (nsends) { /* wait on sends */
186 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
187 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
188 ierr = PetscFree(send_status);CHKERRQ(ierr);
189 }
190 ierr = PetscFree3(svalues,send_waits,starts);CHKERRQ(ierr);
191 ierr = PetscLayoutDestroy(&map);CHKERRQ(ierr);
192 ierr = PetscLayoutDestroy(&nmap);CHKERRQ(ierr);
193
194 ierr = VecCreateMPI(comm,slen,PETSC_DETERMINE,&red->b);CHKERRQ(ierr);
195 ierr = VecDuplicate(red->b,&red->x);CHKERRQ(ierr);
196 ierr = MatCreateVecs(pc->pmat,&tvec,NULL);CHKERRQ(ierr);
197 ierr = VecScatterCreate(tvec,red->is,red->b,NULL,&red->scatter);CHKERRQ(ierr);
198 ierr = VecDestroy(&tvec);CHKERRQ(ierr);
199 ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_INITIAL_MATRIX,&tmat);CHKERRQ(ierr);
200 ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr);
201 ierr = MatDestroy(&tmat);CHKERRQ(ierr);
202 }
203
204 /* get diagonal portion of matrix */
205 ierr = PetscFree(red->diag);CHKERRQ(ierr);
206 ierr = PetscMalloc1(red->dcnt,&red->diag);CHKERRQ(ierr);
207 ierr = MatCreateVecs(pc->pmat,&diag,NULL);CHKERRQ(ierr);
208 ierr = MatGetDiagonal(pc->pmat,diag);CHKERRQ(ierr);
209 ierr = VecGetArrayRead(diag,&d);CHKERRQ(ierr);
210 for (i=0; i<red->dcnt; i++) red->diag[i] = 1.0/d[red->drows[i]];
211 ierr = VecRestoreArrayRead(diag,&d);CHKERRQ(ierr);
212 ierr = VecDestroy(&diag);CHKERRQ(ierr);
213 ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
214 PetscFunctionReturn(0);
215 }
216
PCApply_Redistribute(PC pc,Vec b,Vec x)217 static PetscErrorCode PCApply_Redistribute(PC pc,Vec b,Vec x)
218 {
219 PC_Redistribute *red = (PC_Redistribute*)pc->data;
220 PetscErrorCode ierr;
221 PetscInt dcnt = red->dcnt,i;
222 const PetscInt *drows = red->drows;
223 PetscScalar *xwork;
224 const PetscScalar *bwork,*diag = red->diag;
225
226 PetscFunctionBegin;
227 if (!red->work) {
228 ierr = VecDuplicate(b,&red->work);CHKERRQ(ierr);
229 }
230 /* compute the rows of solution that have diagonal entries only */
231 ierr = VecSet(x,0.0);CHKERRQ(ierr); /* x = diag(A)^{-1} b */
232 ierr = VecGetArray(x,&xwork);CHKERRQ(ierr);
233 ierr = VecGetArrayRead(b,&bwork);CHKERRQ(ierr);
234 for (i=0; i<dcnt; i++) xwork[drows[i]] = diag[i]*bwork[drows[i]];
235 ierr = PetscLogFlops(dcnt);CHKERRQ(ierr);
236 ierr = VecRestoreArray(red->work,&xwork);CHKERRQ(ierr);
237 ierr = VecRestoreArrayRead(b,&bwork);CHKERRQ(ierr);
238 /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
239 ierr = MatMult(pc->pmat,x,red->work);CHKERRQ(ierr);
240 ierr = VecAYPX(red->work,-1.0,b);CHKERRQ(ierr); /* red->work = b - A x */
241
242 ierr = VecScatterBegin(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
243 ierr = VecScatterEnd(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
244 ierr = KSPSolve(red->ksp,red->b,red->x);CHKERRQ(ierr);
245 ierr = KSPCheckSolve(red->ksp,pc,red->x);CHKERRQ(ierr);
246 ierr = VecScatterBegin(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
247 ierr = VecScatterEnd(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
248 PetscFunctionReturn(0);
249 }
250
PCDestroy_Redistribute(PC pc)251 static PetscErrorCode PCDestroy_Redistribute(PC pc)
252 {
253 PC_Redistribute *red = (PC_Redistribute*)pc->data;
254 PetscErrorCode ierr;
255
256 PetscFunctionBegin;
257 ierr = VecScatterDestroy(&red->scatter);CHKERRQ(ierr);
258 ierr = ISDestroy(&red->is);CHKERRQ(ierr);
259 ierr = VecDestroy(&red->b);CHKERRQ(ierr);
260 ierr = VecDestroy(&red->x);CHKERRQ(ierr);
261 ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
262 ierr = VecDestroy(&red->work);CHKERRQ(ierr);
263 ierr = PetscFree(red->drows);CHKERRQ(ierr);
264 ierr = PetscFree(red->diag);CHKERRQ(ierr);
265 ierr = PetscFree(pc->data);CHKERRQ(ierr);
266 PetscFunctionReturn(0);
267 }
268
PCSetFromOptions_Redistribute(PetscOptionItems * PetscOptionsObject,PC pc)269 static PetscErrorCode PCSetFromOptions_Redistribute(PetscOptionItems *PetscOptionsObject,PC pc)
270 {
271 PetscErrorCode ierr;
272 PC_Redistribute *red = (PC_Redistribute*)pc->data;
273
274 PetscFunctionBegin;
275 ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
276 PetscFunctionReturn(0);
277 }
278
279 /*@
280 PCRedistributeGetKSP - Gets the KSP created by the PCREDISTRIBUTE
281
282 Not Collective
283
284 Input Parameter:
285 . pc - the preconditioner context
286
287 Output Parameter:
288 . innerksp - the inner KSP
289
290 Level: advanced
291
292 @*/
PCRedistributeGetKSP(PC pc,KSP * innerksp)293 PetscErrorCode PCRedistributeGetKSP(PC pc,KSP *innerksp)
294 {
295 PC_Redistribute *red = (PC_Redistribute*)pc->data;
296
297 PetscFunctionBegin;
298 PetscValidHeaderSpecific(pc,PC_CLASSID,1);
299 PetscValidPointer(innerksp,2);
300 *innerksp = red->ksp;
301 PetscFunctionReturn(0);
302 }
303
304 /* -------------------------------------------------------------------------------------*/
305 /*MC
306 PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows that only have a diagonal entry and then applys a KSP to that new matrix
307
308 Options for the redistribute preconditioners can be set with -redistribute_ksp_xxx <values> and -redistribute_pc_xxx <values>
309
310 Notes:
311 Usually run this with -ksp_type preonly
312
313 If you have used MatZeroRows() to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, -ksp_type preonly
314 -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc to take advantage of the symmetry.
315
316 This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same.
317
318 Developer Notes:
319 Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication.
320
321 Level: intermediate
322
323 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedistributeGetKSP()
324 M*/
325
PCCreate_Redistribute(PC pc)326 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc)
327 {
328 PetscErrorCode ierr;
329 PC_Redistribute *red;
330 const char *prefix;
331
332 PetscFunctionBegin;
333 ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
334 pc->data = (void*)red;
335
336 pc->ops->apply = PCApply_Redistribute;
337 pc->ops->applytranspose = NULL;
338 pc->ops->setup = PCSetUp_Redistribute;
339 pc->ops->destroy = PCDestroy_Redistribute;
340 pc->ops->setfromoptions = PCSetFromOptions_Redistribute;
341 pc->ops->view = PCView_Redistribute;
342
343 ierr = KSPCreate(PetscObjectComm((PetscObject)pc),&red->ksp);CHKERRQ(ierr);
344 ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
345 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
346 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
347 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
348 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
349 ierr = KSPAppendOptionsPrefix(red->ksp,"redistribute_");CHKERRQ(ierr);
350 PetscFunctionReturn(0);
351 }
352