1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool * isvalid)53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   PetscMPIInt    *ranks_f,*ranks_c;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRQ(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRQ(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRQ(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRQ(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRQ(ierr);
77   if (size_c_sum == 0) valid = 0;
78 
79   /* check we can map at least 1 rank in comm_c to comm_f */
80   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
81   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
82   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
83   for (k=0; k<size_c; k++) ranks_c[k] = k;
84 
85   /*
86    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
87    I do not want the code to terminate immediately if this occurs, rather I want to throw
88    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
89    that comm_c is not a valid sub-communicator.
90    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
91   */
92   count = 0;
93   if (comm_c != MPI_COMM_NULL) {
94     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
95     for (k=0; k<size_f; k++) {
96       if (ranks_f[k] == MPI_UNDEFINED) {
97         count++;
98       }
99     }
100   }
101   if (count == size_f) valid = 0;
102 
103   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRQ(ierr);
104   if (valid == 1) *isvalid = PETSC_TRUE;
105   else *isvalid = PETSC_FALSE;
106 
107   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
108   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
109   ierr = MPI_Group_free(&group_f);CHKERRQ(ierr);
110   if (comm_c != MPI_COMM_NULL) {
111     ierr = MPI_Group_free(&group_c);CHKERRQ(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
private_PCTelescopeGetSubDM(PC_Telescope sred)116 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117 {
118   DM subdm = NULL;
119 
120   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121   else {
122     switch (sred->sr_type) {
123     case TELESCOPE_DEFAULT: subdm = NULL;
124       break;
125     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126       break;
127     case TELESCOPE_DMPLEX:  subdm = NULL;
128       break;
129     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130       break;
131     }
132   }
133   return(subdm);
134 }
135 
PCTelescopeSetUp_default(PC pc,PC_Telescope sred)136 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137 {
138   PetscErrorCode ierr;
139   PetscInt       m,M,bs,st,ed;
140   Vec            x,xred,yred,xtmp;
141   Mat            B;
142   MPI_Comm       comm,subcomm;
143   VecScatter     scatter;
144   IS             isin;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155 
156   xred = NULL;
157   m    = 0;
158   if (PCTelescope_isActiveRank(sred)) {
159     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
160     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
161     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
162     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
163     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
169   }
170 
171   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
172   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
173   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
174   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
178     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
179   } else {
180     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
181     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
182   }
183   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
184 
185   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   ierr = VecDestroy(&x);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat * A)196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   PetscErrorCode ierr;
199   MPI_Comm       comm,subcomm;
200   Mat            Bred,B;
201   PetscInt       nr,nc;
202   IS             isrow,iscol;
203   Mat            Blocal,*_Blocal;
204 
205   PetscFunctionBegin;
206   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
207   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
208   subcomm = PetscSubcommChild(sred->psubcomm);
209   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
210   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
211   isrow = sred->isin;
212   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
213   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
214   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
215   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
216   Blocal = *_Blocal;
217   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
218   Bred = NULL;
219   if (PCTelescope_isActiveRank(sred)) {
220     PetscInt mm;
221 
222     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
223 
224     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
225     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
226   }
227   *A = Bred;
228   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
229   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace * sub_nullspace)233 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
234 {
235   PetscErrorCode ierr;
236   PetscBool      has_const;
237   const Vec      *vecs;
238   Vec            *sub_vecs = NULL;
239   PetscInt       i,k,n = 0;
240   MPI_Comm       subcomm;
241 
242   PetscFunctionBegin;
243   subcomm = PetscSubcommChild(sred->psubcomm);
244   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
245 
246   if (PCTelescope_isActiveRank(sred)) {
247     if (n) {
248       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
249     }
250   }
251 
252   /* copy entries */
253   for (k=0; k<n; k++) {
254     const PetscScalar *x_array;
255     PetscScalar       *LA_sub_vec;
256     PetscInt          st,ed;
257 
258     /* pull in vector x->xtmp */
259     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
260     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
261     if (sub_vecs) {
262       /* copy vector entries into xred */
263       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
264       if (sub_vecs[k]) {
265         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
266         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
267         for (i=0; i<ed-st; i++) {
268           LA_sub_vec[i] = x_array[i];
269         }
270         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
271       }
272       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
273     }
274   }
275 
276   if (PCTelescope_isActiveRank(sred)) {
277     /* create new (near) nullspace for redundant object */
278     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
279     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
280     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282   }
283   PetscFunctionReturn(0);
284 }
285 
PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)286 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287 {
288   PetscErrorCode ierr;
289   Mat            B;
290 
291   PetscFunctionBegin;
292   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
293   /* Propagate the nullspace if it exists */
294   {
295     MatNullSpace nullspace,sub_nullspace;
296     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
297     if (nullspace) {
298       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
299       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
300       if (PCTelescope_isActiveRank(sred)) {
301         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
302         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
303       }
304     }
305   }
306   /* Propagate the near nullspace if it exists */
307   {
308     MatNullSpace nearnullspace,sub_nearnullspace;
309     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
310     if (nearnullspace) {
311       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
312       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
313       if (PCTelescope_isActiveRank(sred)) {
314         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
315         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
316       }
317     }
318   }
319   PetscFunctionReturn(0);
320 }
321 
PCView_Telescope(PC pc,PetscViewer viewer)322 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
323 {
324   PC_Telescope   sred = (PC_Telescope)pc->data;
325   PetscErrorCode ierr;
326   PetscBool      iascii,isstring;
327   PetscViewer    subviewer;
328 
329   PetscFunctionBegin;
330   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
331   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
332   if (iascii) {
333     {
334       MPI_Comm    comm,subcomm;
335       PetscMPIInt comm_size,subcomm_size;
336       DM          dm = NULL,subdm = NULL;
337 
338       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
339       subdm = private_PCTelescopeGetSubDM(sred);
340 
341       if (sred->psubcomm) {
342         comm = PetscSubcommParent(sred->psubcomm);
343         subcomm = PetscSubcommChild(sred->psubcomm);
344         ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
345         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
346 
347         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
348         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
349         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
350         switch (sred->subcommtype) {
351         case PETSC_SUBCOMM_INTERLACED :
352           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
353           break;
354         case PETSC_SUBCOMM_CONTIGUOUS :
355           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
356           break;
357         default :
358           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
359         }
360         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
361       } else {
362         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
363         subcomm = sred->subcomm;
364         if (!PCTelescope_isActiveRank(sred)) {
365           subcomm = PETSC_COMM_SELF;
366         }
367 
368         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
369         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
370         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
371       }
372 
373       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
374       if (PCTelescope_isActiveRank(sred)) {
375         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
376 
377         if (dm && sred->ignore_dm) {
378           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
379         }
380         if (sred->ignore_kspcomputeoperators) {
381           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
382         }
383         switch (sred->sr_type) {
384         case TELESCOPE_DEFAULT:
385           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
386           break;
387         case TELESCOPE_DMDA:
388           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
389           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
390           break;
391         case TELESCOPE_DMPLEX:
392           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
393           break;
394         case TELESCOPE_COARSEDM:
395           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
396           break;
397         }
398 
399         if (dm) {
400           PetscObject obj = (PetscObject)dm;
401           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
402           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
403           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
404           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
405           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
406           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
407           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
408         } else {
409           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
410         }
411         if (subdm) {
412           PetscObject obj = (PetscObject)subdm;
413           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
414           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
415           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
416           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
417           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
418           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
419           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
420         } else {
421           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
422         }
423 
424         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
425         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
426       }
427       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
428     }
429   }
430   PetscFunctionReturn(0);
431 }
432 
PCSetUp_Telescope(PC pc)433 static PetscErrorCode PCSetUp_Telescope(PC pc)
434 {
435   PC_Telescope    sred = (PC_Telescope)pc->data;
436   PetscErrorCode  ierr;
437   MPI_Comm        comm,subcomm=0;
438   PCTelescopeType sr_type;
439 
440   PetscFunctionBegin;
441   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
442 
443   /* Determine type of setup/update */
444   if (!pc->setupcalled) {
445     PetscBool has_dm,same;
446     DM        dm;
447 
448     sr_type = TELESCOPE_DEFAULT;
449     has_dm = PETSC_FALSE;
450     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
451     if (dm) { has_dm = PETSC_TRUE; }
452     if (has_dm) {
453       /* check for dmda */
454       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
455       if (same) {
456         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
457         sr_type = TELESCOPE_DMDA;
458       }
459       /* check for dmplex */
460       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
461       if (same) {
462         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
463         sr_type = TELESCOPE_DMPLEX;
464       }
465 
466       if (sred->use_coarse_dm) {
467         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
468         sr_type = TELESCOPE_COARSEDM;
469       }
470 
471       if (sred->ignore_dm) {
472         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
473         sr_type = TELESCOPE_DEFAULT;
474       }
475     }
476     sred->sr_type = sr_type;
477   } else {
478     sr_type = sred->sr_type;
479   }
480 
481   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
482   switch (sr_type) {
483   case TELESCOPE_DEFAULT:
484     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
485     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
486     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
487     sred->pctelescope_reset_type              = NULL;
488     break;
489   case TELESCOPE_DMDA:
490     pc->ops->apply                            = PCApply_Telescope_dmda;
491     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
492     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
493     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
494     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
495     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
496     break;
497   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
498     break;
499   case TELESCOPE_COARSEDM:
500     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
501     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
502     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
503     sred->pctelescope_matcreate_type          = NULL;
504     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
505     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
506     break;
507   default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
508     break;
509   }
510 
511   /* subcomm definition */
512   if (!pc->setupcalled) {
513     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
514       if (!sred->psubcomm) {
515         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
516         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
517         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
518         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
519         sred->subcomm = PetscSubcommChild(sred->psubcomm);
520       }
521     } else { /* query PC for DM, check communicators */
522       DM          dm,dm_coarse_partition = NULL;
523       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
524       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
525       PetscBool   isvalidsubcomm;
526 
527       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
528       comm_fine = PetscObjectComm((PetscObject)dm);
529       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
530       if (dm_coarse_partition) { cnt = 1; }
531       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRQ(ierr);
532       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
533 
534       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRQ(ierr);
535       if (dm_coarse_partition) {
536         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
537         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRQ(ierr);
538       }
539 
540       cs[0] = csize_fine;
541       cs[1] = csize_coarse_partition;
542       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRQ(ierr);
543       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
544 
545       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
546       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
547       sred->subcomm = comm_coarse_partition;
548     }
549   }
550   subcomm = sred->subcomm;
551 
552   /* internal KSP */
553   if (!pc->setupcalled) {
554     const char *prefix;
555 
556     if (PCTelescope_isActiveRank(sred)) {
557       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
558       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
559       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
560       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
561       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
562       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
563       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
564       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
565     }
566   }
567 
568   /* setup */
569   if (!pc->setupcalled && sred->pctelescope_setup_type) {
570     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
571   }
572   /* update */
573   if (!pc->setupcalled) {
574     if (sred->pctelescope_matcreate_type) {
575       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
576     }
577     if (sred->pctelescope_matnullspacecreate_type) {
578       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
579     }
580   } else {
581     if (sred->pctelescope_matcreate_type) {
582       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
583     }
584   }
585 
586   /* common - no construction */
587   if (PCTelescope_isActiveRank(sred)) {
588     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
589     if (pc->setfromoptionscalled && !pc->setupcalled){
590       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
591     }
592   }
593   PetscFunctionReturn(0);
594 }
595 
PCApply_Telescope(PC pc,Vec x,Vec y)596 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
597 {
598   PC_Telescope      sred = (PC_Telescope)pc->data;
599   PetscErrorCode    ierr;
600   Vec               xtmp,xred,yred;
601   PetscInt          i,st,ed;
602   VecScatter        scatter;
603   PetscScalar       *array;
604   const PetscScalar *x_array;
605 
606   PetscFunctionBegin;
607   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
608 
609   xtmp    = sred->xtmp;
610   scatter = sred->scatter;
611   xred    = sred->xred;
612   yred    = sred->yred;
613 
614   /* pull in vector x->xtmp */
615   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
616   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
617 
618   /* copy vector entries into xred */
619   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
620   if (xred) {
621     PetscScalar *LA_xred;
622     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
623     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
624     for (i=0; i<ed-st; i++) {
625       LA_xred[i] = x_array[i];
626     }
627     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
628   }
629   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
630   /* solve */
631   if (PCTelescope_isActiveRank(sred)) {
632     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
633     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
634   }
635   /* return vector */
636   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
637   if (yred) {
638     const PetscScalar *LA_yred;
639     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
640     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
641     for (i=0; i<ed-st; i++) {
642       array[i] = LA_yred[i];
643     }
644     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
645   }
646   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
647   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
648   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
649   PetscFunctionReturn(0);
650 }
651 
PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol,PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt * outits,PCRichardsonConvergedReason * reason)652 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
653 {
654   PC_Telescope      sred = (PC_Telescope)pc->data;
655   PetscErrorCode    ierr;
656   Vec               xtmp,yred;
657   PetscInt          i,st,ed;
658   VecScatter        scatter;
659   const PetscScalar *x_array;
660   PetscBool         default_init_guess_value;
661 
662   PetscFunctionBegin;
663   xtmp    = sred->xtmp;
664   scatter = sred->scatter;
665   yred    = sred->yred;
666 
667   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
668   *reason = (PCRichardsonConvergedReason)0;
669 
670   if (!zeroguess) {
671     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
672     /* pull in vector y->xtmp */
673     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
674     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
675 
676     /* copy vector entries into xred */
677     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
678     if (yred) {
679       PetscScalar *LA_yred;
680       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
681       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
682       for (i=0; i<ed-st; i++) {
683         LA_yred[i] = x_array[i];
684       }
685       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
686     }
687     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
688   }
689 
690   if (PCTelescope_isActiveRank(sred)) {
691     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
692     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
693   }
694 
695   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
696 
697   if (PCTelescope_isActiveRank(sred)) {
698     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
699   }
700 
701   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
702   *outits = 1;
703   PetscFunctionReturn(0);
704 }
705 
PCReset_Telescope(PC pc)706 static PetscErrorCode PCReset_Telescope(PC pc)
707 {
708   PC_Telescope   sred = (PC_Telescope)pc->data;
709   PetscErrorCode ierr;
710 
711   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
712   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
713   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
714   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
715   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
716   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
717   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
718   if (sred->pctelescope_reset_type) {
719     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
720   }
721   PetscFunctionReturn(0);
722 }
723 
PCDestroy_Telescope(PC pc)724 static PetscErrorCode PCDestroy_Telescope(PC pc)
725 {
726   PC_Telescope   sred = (PC_Telescope)pc->data;
727   PetscErrorCode ierr;
728 
729   PetscFunctionBegin;
730   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
731   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
732   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
733   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
734   ierr = PetscFree(pc->data);CHKERRQ(ierr);
735   PetscFunctionReturn(0);
736 }
737 
PCSetFromOptions_Telescope(PetscOptionItems * PetscOptionsObject,PC pc)738 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
739 {
740   PC_Telescope     sred = (PC_Telescope)pc->data;
741   PetscErrorCode   ierr;
742   MPI_Comm         comm;
743   PetscMPIInt      size;
744   PetscBool        flg;
745   PetscSubcommType subcommtype;
746 
747   PetscFunctionBegin;
748   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
749   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
750   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
751   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
752   if (flg) {
753     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
754   }
755   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
756   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
757   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
758   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
759   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsTail();CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 /* PC simplementation specific API's */
765 
PCTelescopeGetKSP_Telescope(PC pc,KSP * ksp)766 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
767 {
768   PC_Telescope red = (PC_Telescope)pc->data;
769   PetscFunctionBegin;
770   if (ksp) *ksp = red->ksp;
771   PetscFunctionReturn(0);
772 }
773 
PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType * subcommtype)774 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
775 {
776   PC_Telescope red = (PC_Telescope)pc->data;
777   PetscFunctionBegin;
778   if (subcommtype) *subcommtype = red->subcommtype;
779   PetscFunctionReturn(0);
780 }
781 
PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)782 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
783 {
784   PC_Telescope     red = (PC_Telescope)pc->data;
785 
786   PetscFunctionBegin;
787   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
788   red->subcommtype = subcommtype;
789   PetscFunctionReturn(0);
790 }
791 
PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt * fact)792 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
793 {
794   PC_Telescope red = (PC_Telescope)pc->data;
795   PetscFunctionBegin;
796   if (fact) *fact = red->redfactor;
797   PetscFunctionReturn(0);
798 }
799 
PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)800 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
801 {
802   PC_Telescope     red = (PC_Telescope)pc->data;
803   PetscMPIInt      size;
804   PetscErrorCode   ierr;
805 
806   PetscFunctionBegin;
807   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
808   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
809   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
810   red->redfactor = fact;
811   PetscFunctionReturn(0);
812 }
813 
PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool * v)814 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
815 {
816   PC_Telescope red = (PC_Telescope)pc->data;
817   PetscFunctionBegin;
818   if (v) *v = red->ignore_dm;
819   PetscFunctionReturn(0);
820 }
821 
PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)822 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
823 {
824   PC_Telescope red = (PC_Telescope)pc->data;
825   PetscFunctionBegin;
826   red->ignore_dm = v;
827   PetscFunctionReturn(0);
828 }
829 
PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool * v)830 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
831 {
832   PC_Telescope red = (PC_Telescope)pc->data;
833   PetscFunctionBegin;
834   if (v) *v = red->use_coarse_dm;
835   PetscFunctionReturn(0);
836 }
837 
PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)838 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
839 {
840   PC_Telescope red = (PC_Telescope)pc->data;
841   PetscFunctionBegin;
842   red->use_coarse_dm = v;
843   PetscFunctionReturn(0);
844 }
845 
PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool * v)846 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
847 {
848   PC_Telescope red = (PC_Telescope)pc->data;
849   PetscFunctionBegin;
850   if (v) *v = red->ignore_kspcomputeoperators;
851   PetscFunctionReturn(0);
852 }
853 
PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)854 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
855 {
856   PC_Telescope red = (PC_Telescope)pc->data;
857   PetscFunctionBegin;
858   red->ignore_kspcomputeoperators = v;
859   PetscFunctionReturn(0);
860 }
861 
PCTelescopeGetDM_Telescope(PC pc,DM * dm)862 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
863 {
864   PC_Telescope red = (PC_Telescope)pc->data;
865   PetscFunctionBegin;
866   *dm = private_PCTelescopeGetSubDM(red);
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
872 
873  Not Collective
874 
875  Input Parameter:
876 .  pc - the preconditioner context
877 
878  Output Parameter:
879 .  subksp - the KSP defined the smaller set of processes
880 
881  Level: advanced
882 
883 @*/
PCTelescopeGetKSP(PC pc,KSP * subksp)884 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
885 {
886   PetscErrorCode ierr;
887   PetscFunctionBegin;
888   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 /*@
893  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
894 
895  Not Collective
896 
897  Input Parameter:
898 .  pc - the preconditioner context
899 
900  Output Parameter:
901 .  fact - the reduction factor
902 
903  Level: advanced
904 
905 @*/
PCTelescopeGetReductionFactor(PC pc,PetscInt * fact)906 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
907 {
908   PetscErrorCode ierr;
909   PetscFunctionBegin;
910   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /*@
915  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
916 
917  Not Collective
918 
919  Input Parameter:
920 .  pc - the preconditioner context
921 
922  Output Parameter:
923 .  fact - the reduction factor
924 
925  Level: advanced
926 
927 @*/
PCTelescopeSetReductionFactor(PC pc,PetscInt fact)928 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
929 {
930   PetscErrorCode ierr;
931   PetscFunctionBegin;
932   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 
936 /*@
937  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
938 
939  Not Collective
940 
941  Input Parameter:
942 .  pc - the preconditioner context
943 
944  Output Parameter:
945 .  v - the flag
946 
947  Level: advanced
948 
949 @*/
PCTelescopeGetIgnoreDM(PC pc,PetscBool * v)950 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
951 {
952   PetscErrorCode ierr;
953   PetscFunctionBegin;
954   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 /*@
959  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
960 
961  Not Collective
962 
963  Input Parameter:
964 .  pc - the preconditioner context
965 
966  Output Parameter:
967 .  v - Use PETSC_TRUE to ignore any DM
968 
969  Level: advanced
970 
971 @*/
PCTelescopeSetIgnoreDM(PC pc,PetscBool v)972 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
973 {
974   PetscErrorCode ierr;
975   PetscFunctionBegin;
976   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
977   PetscFunctionReturn(0);
978 }
979 
980 /*@
981  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
982 
983  Not Collective
984 
985  Input Parameter:
986 .  pc - the preconditioner context
987 
988  Output Parameter:
989 .  v - the flag
990 
991  Level: advanced
992 
993 @*/
PCTelescopeGetUseCoarseDM(PC pc,PetscBool * v)994 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
995 {
996   PetscErrorCode ierr;
997   PetscFunctionBegin;
998   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
999   PetscFunctionReturn(0);
1000 }
1001 
1002 /*@
1003  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1004 
1005  Not Collective
1006 
1007  Input Parameter:
1008 .  pc - the preconditioner context
1009 
1010  Output Parameter:
1011 .  v - Use PETSC_FALSE to ignore any coarse DM
1012 
1013  Notes:
1014  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1015  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1016  -pc_telescope_subcomm_type will no longer have any meaning.
1017  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1018  An error will occur of the size of the communicator associated with the coarse DM
1019  is the same as that of the parent DM.
1020  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1021  This will be checked at the time the preconditioner is setup and an error will occur if
1022  the coarse DM does not define a sub-communicator of that used by the parent DM.
1023 
1024  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1025  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1026 
1027  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1028 
1029  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1030  In the user code, this is achieved via
1031 .vb
1032    {
1033      DM dm_fine;
1034      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1035    }
1036 .ve
1037  The signature of the user provided field scatter method is
1038 .vb
1039    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1040 .ve
1041  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1042  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1043 
1044  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1045  of state variables between the fine and coarse DMs.
1046  In the context of a finite element discretization, an example state variable might be
1047  values associated with quadrature points within each element.
1048  A user provided state scatter method is composed via
1049 .vb
1050    {
1051      DM dm_fine;
1052      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1053    }
1054 .ve
1055  The signature of the user provided state scatter method is
1056 .vb
1057    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1058 .ve
1059  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1060  The user is only required to support mode = SCATTER_FORWARD.
1061  No assumption is made about the data type of the state variables.
1062  These must be managed by the user and must be accessible from the DM.
1063 
1064  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1065  associated with the sub-KSP residing within PCTelescope.
1066  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1067  KSPSetComputeOperators() should be "similar" in type or origin.
1068  Specifically the following rules are used to infer what context on the sub-KSP should be.
1069 
1070  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1071  Note that the special case of a DMSHELL context is queried.
1072 
1073 .vb
1074    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1075    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1076    DMShellGetContext(dm_fine,&dmfine_shellctx);
1077 
1078    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1079    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1080 .ve
1081 
1082  The following rules are then enforced:
1083 
1084  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1085  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1086 
1087  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1088  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1089  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1090 
1091  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1092  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1093  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1094 
1095  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1096  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1097  In this case, an additional mechanism is provided via a composed function which will return
1098  the actual context to be used. To use this feature you must compose the "getter" function
1099  with the coarse DM, e.g.
1100 .vb
1101    {
1102      DM dm_coarse;
1103      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1104    }
1105 .ve
1106  The signature of the user provided method is
1107 .vb
1108    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1109 .ve
1110 
1111  Level: advanced
1112 
1113 @*/
PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)1114 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1115 {
1116   PetscErrorCode ierr;
1117   PetscFunctionBegin;
1118   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 /*@
1123  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1124 
1125  Not Collective
1126 
1127  Input Parameter:
1128 .  pc - the preconditioner context
1129 
1130  Output Parameter:
1131 .  v - the flag
1132 
1133  Level: advanced
1134 
1135 @*/
PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool * v)1136 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1137 {
1138   PetscErrorCode ierr;
1139   PetscFunctionBegin;
1140   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 /*@
1145  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1146 
1147  Not Collective
1148 
1149  Input Parameter:
1150 .  pc - the preconditioner context
1151 
1152  Output Parameter:
1153 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1154 
1155  Level: advanced
1156 
1157 @*/
PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)1158 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1159 {
1160   PetscErrorCode ierr;
1161   PetscFunctionBegin;
1162   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 /*@
1167  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1168 
1169  Not Collective
1170 
1171  Input Parameter:
1172 .  pc - the preconditioner context
1173 
1174  Output Parameter:
1175 .  subdm - The re-partitioned DM
1176 
1177  Level: advanced
1178 
1179 @*/
PCTelescopeGetDM(PC pc,DM * subdm)1180 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1181 {
1182   PetscErrorCode ierr;
1183   PetscFunctionBegin;
1184   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 /*@
1189  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1190 
1191  Logically Collective
1192 
1193  Input Parameter:
1194 +  pc - the preconditioner context
1195 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1196 
1197  Level: advanced
1198 
1199 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1200 @*/
PCTelescopeSetSubcommType(PC pc,PetscSubcommType subcommtype)1201 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1202 {
1203   PetscErrorCode ierr;
1204   PetscFunctionBegin;
1205   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 /*@
1210  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1211 
1212  Not Collective
1213 
1214  Input Parameter:
1215 .  pc - the preconditioner context
1216 
1217  Output Parameter:
1218 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1219 
1220  Level: advanced
1221 
1222 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1223 @*/
PCTelescopeGetSubcommType(PC pc,PetscSubcommType * subcommtype)1224 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1225 {
1226   PetscErrorCode ierr;
1227   PetscFunctionBegin;
1228   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1229   PetscFunctionReturn(0);
1230 }
1231 
1232 /* -------------------------------------------------------------------------------------*/
1233 /*MC
1234    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1235 
1236    Options Database:
1237 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1238 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1239 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1240 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1241 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1242 
1243    Level: advanced
1244 
1245    Notes:
1246    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1247    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1248    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1249    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1250    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1251    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1252 
1253    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1254 
1255    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1256    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1257    communicators c and c' respectively.
1258 
1259    [1] Default setup
1260    The sub-communicator c' is created via PetscSubcommCreate().
1261    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1262    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1263    No support is provided for KSPSetComputeOperators().
1264    Currently there is no support for the flag -pc_use_amat.
1265 
1266    [2] DM aware setup
1267    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1268    c' is created via PetscSubcommCreate().
1269    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1270    Currently only support for re-partitioning a DMDA is provided.
1271    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1272    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1273    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1274    This is fragile since the user must ensure that their user context is valid for use on c'.
1275    Currently there is no support for the flag -pc_use_amat.
1276 
1277    [3] Coarse DM setup
1278    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1279    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1280    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1281    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1282    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1283    available with using multi-grid on unstructured meshes.
1284    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1285    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1286    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1287    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1288    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1289    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1290    Currently there is no support for the flag -pc_use_amat.
1291    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1292    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1293 
1294    Developer Notes:
1295    During PCSetup, the B operator is scattered onto c'.
1296    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1297    Then, KSPSolve() is executed on the c' communicator.
1298 
1299    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1300    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1301 
1302    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1303    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1304    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1305 
1306    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1307    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1308    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1309    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1310    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1311 
1312    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1313 
1314    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1315    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1316    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1317 
1318    Limitations/improvements include the following.
1319    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1320    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1321 
1322    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1323    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1324    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1325    consistent manner.
1326 
1327    Mapping of vectors (default setup mode) is performed in the following way.
1328    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1329    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1330    We perform the scatter to the sub-communicator in the following way.
1331    [1] Given a vector x defined on communicator c
1332 
1333 .vb
1334    rank(c)  local values of x
1335    ------- ----------------------------------------
1336         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1337         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1338         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1339         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1340 .ve
1341 
1342    scatter into xtmp defined also on comm c, so that we have the following values
1343 
1344 .vb
1345    rank(c)  local values of xtmp
1346    ------- ----------------------------------------------------------------------------
1347         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1348         1   [ ]
1349         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1350         3   [ ]
1351 .ve
1352 
1353    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1354 
1355 
1356    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1357    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1358 
1359 .vb
1360    rank(c')  local values of xred
1361    -------- ----------------------------------------------------------------------------
1362          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1363          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1364 .ve
1365 
1366   Contributed by Dave May
1367 
1368   Reference:
1369   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1370 
1371 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1372 M*/
PCCreate_Telescope(PC pc)1373 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1374 {
1375   PetscErrorCode       ierr;
1376   struct _PC_Telescope *sred;
1377 
1378   PetscFunctionBegin;
1379   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1380   sred->psubcomm       = NULL;
1381   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1382   sred->subcomm        = MPI_COMM_NULL;
1383   sred->redfactor      = 1;
1384   sred->ignore_dm      = PETSC_FALSE;
1385   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1386   sred->use_coarse_dm  = PETSC_FALSE;
1387   pc->data             = (void*)sred;
1388 
1389   pc->ops->apply           = PCApply_Telescope;
1390   pc->ops->applytranspose  = NULL;
1391   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1392   pc->ops->setup           = PCSetUp_Telescope;
1393   pc->ops->destroy         = PCDestroy_Telescope;
1394   pc->ops->reset           = PCReset_Telescope;
1395   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1396   pc->ops->view            = PCView_Telescope;
1397 
1398   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1399   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1400   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1401   sred->pctelescope_reset_type              = NULL;
1402 
1403   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1404   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1415   PetscFunctionReturn(0);
1416 }
1417