1 
2 #include <petsc/private/matimpl.h>               /*I "petscmat.h" I*/
3 
4 /* Logging support */
5 PetscClassId MAT_PARTITIONING_CLASSID;
6 
7 /*
8    Simplest partitioning, keeps the current partitioning.
9 */
MatPartitioningApply_Current(MatPartitioning part,IS * partitioning)10 static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
11 {
12   PetscErrorCode ierr;
13   PetscInt       m;
14   PetscMPIInt    rank,size;
15 
16   PetscFunctionBegin;
17   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);CHKERRQ(ierr);
18   if (part->n != size) {
19     const char *prefix;
20     ierr = PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);CHKERRQ(ierr);
21     SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
22   }
23   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);CHKERRQ(ierr);
24 
25   ierr = MatGetLocalSize(part->adj,&m,NULL);CHKERRQ(ierr);
26   ierr = ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);CHKERRQ(ierr);
27   PetscFunctionReturn(0);
28 }
29 
30 /*
31    partition an index to rebalance the computation
32 */
MatPartitioningApply_Average(MatPartitioning part,IS * partitioning)33 static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
34 {
35   PetscErrorCode ierr;
36   PetscInt       m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
37 
38   PetscFunctionBegin;
39   ierr   = MatGetSize(part->adj,&M,NULL);CHKERRQ(ierr);
40   ierr   = MatGetLocalSize(part->adj,&m,NULL);CHKERRQ(ierr);
41   nparts = part->n;
42   ierr   = PetscMalloc1(nparts,&parts);CHKERRQ(ierr);
43   d      = M/nparts;
44   for (i=0; i<nparts; i++) parts[i] = d;
45   r = M%nparts;
46   for (i=0; i<r; i++) parts[i] += 1;
47   for (i=1; i<nparts; i++) parts[i] += parts[i-1];
48   ierr = PetscMalloc1(m,&indices);CHKERRQ(ierr);
49   ierr = MatGetOwnershipRange(part->adj,&start,&end);CHKERRQ(ierr);
50   for (i=start; i<end; i++) {
51     ierr = PetscFindInt(i,nparts,parts,&loc);CHKERRQ(ierr);
52     if (loc<0) loc = -(loc+1);
53     else loc = loc+1;
54     indices[i-start] = loc;
55   }
56   ierr = PetscFree(parts);CHKERRQ(ierr);
57   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);CHKERRQ(ierr);
58   PetscFunctionReturn(0);
59 }
60 
MatPartitioningApply_Square(MatPartitioning part,IS * partitioning)61 static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
62 {
63   PetscErrorCode ierr;
64   PetscInt       cell,n,N,p,rstart,rend,*color;
65   PetscMPIInt    size;
66 
67   PetscFunctionBegin;
68   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);CHKERRQ(ierr);
69   if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
70   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
71   if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
72 
73   ierr = MatGetSize(part->adj,&N,NULL);CHKERRQ(ierr);
74   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
75   if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
76   if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
77   ierr = MatGetOwnershipRange(part->adj,&rstart,&rend);CHKERRQ(ierr);
78   ierr = PetscMalloc1(rend-rstart,&color);CHKERRQ(ierr);
79   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
80   for (cell=rstart; cell<rend; cell++) {
81     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
82   }
83   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
MatPartitioningCreate_Current(MatPartitioning part)87 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
88 {
89   PetscFunctionBegin;
90   part->ops->apply   = MatPartitioningApply_Current;
91   part->ops->view    = NULL;
92   part->ops->destroy = NULL;
93   PetscFunctionReturn(0);
94 }
95 
MatPartitioningCreate_Average(MatPartitioning part)96 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
97 {
98   PetscFunctionBegin;
99   part->ops->apply   = MatPartitioningApply_Average;
100   part->ops->view    = NULL;
101   part->ops->destroy = NULL;
102   PetscFunctionReturn(0);
103 }
104 
MatPartitioningCreate_Square(MatPartitioning part)105 PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106 {
107   PetscFunctionBegin;
108   part->ops->apply   = MatPartitioningApply_Square;
109   part->ops->view    = NULL;
110   part->ops->destroy = NULL;
111   PetscFunctionReturn(0);
112 }
113 
114 
115 /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
116        seps[  0 :         2*p) : the start and end node of each subdomain
117        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
118      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
119    The arrays must be large enough
120 */
MatPartitioningSizesToSep_Private(PetscInt p,PetscInt sizes[],PetscInt seps[],PetscInt level[])121 PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
122 {
123   PetscInt       l2p,i,pTree,pStartTree;
124   PetscErrorCode ierr;
125 
126   PetscFunctionBegin;
127   l2p = PetscLog2Real(p);
128   if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
129   if (!p) PetscFunctionReturn(0);
130   ierr = PetscArrayzero(seps,2*p-2);CHKERRQ(ierr);
131   ierr = PetscArrayzero(level,p-1);CHKERRQ(ierr);
132   seps[2*p-2] = sizes[2*p-2];
133   pTree = p;
134   pStartTree = 0;
135   while (pTree != 1) {
136     for (i = pStartTree; i < pStartTree + pTree; i++) {
137       seps[i] += sizes[i];
138       seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
139     }
140     pStartTree += pTree;
141     pTree = pTree/2;
142   }
143   seps[2*p-2] -= sizes[2*p-2];
144 
145   pStartTree = 2*p-2;
146   pTree      = 1;
147   while (pStartTree > 0) {
148     for (i = pStartTree; i < pStartTree + pTree; i++) {
149       PetscInt k = 2*i - (pStartTree +2*pTree);
150       PetscInt n = seps[k+1];
151 
152       seps[k+1]  = seps[i]   - sizes[k+1];
153       seps[k]    = seps[k+1] + sizes[k+1] - n - sizes[k];
154       level[i-p] = -pTree - i + pStartTree;
155     }
156     pTree *= 2;
157     pStartTree -= pTree;
158   }
159   /* I know there should be a formula */
160   ierr = PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);CHKERRQ(ierr);
161   for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + PetscMax(sizes[i] - 1,0); }
162   PetscFunctionReturn(0);
163 }
164 
165 /* ===========================================================================================*/
166 
167 PetscFunctionList MatPartitioningList              = NULL;
168 PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;
169 
170 
171 /*@C
172    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.
173 
174    Not Collective
175 
176    Input Parameters:
177 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
178 -  function - function pointer that creates the partitioning type
179 
180    Level: developer
181 
182    Sample usage:
183 .vb
184    MatPartitioningRegister("my_part",MyPartCreate);
185 .ve
186 
187    Then, your partitioner can be chosen with the procedural interface via
188 $     MatPartitioningSetType(part,"my_part")
189    or at runtime via the option
190 $     -mat_partitioning_type my_part
191 
192 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
193 @*/
MatPartitioningRegister(const char sname[],PetscErrorCode (* function)(MatPartitioning))194 PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
195 {
196   PetscErrorCode ierr;
197 
198   PetscFunctionBegin;
199   ierr = MatInitializePackage();CHKERRQ(ierr);
200   ierr = PetscFunctionListAdd(&MatPartitioningList,sname,function);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 /*@C
205    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
206         from the partitioning context.
207 
208    Not collective
209 
210    Input Parameter:
211 .  partitioning - the partitioning context
212 
213    Output Parameter:
214 .  type - partitioner type
215 
216    Level: intermediate
217 
218    Not Collective
219 
220 @*/
MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType * type)221 PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
222 {
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(partitioning,MAT_PARTITIONING_CLASSID,1);
225   PetscValidPointer(type,2);
226   *type = ((PetscObject)partitioning)->type_name;
227   PetscFunctionReturn(0);
228 }
229 
230 /*@C
231    MatPartitioningSetNParts - Set how many partitions need to be created;
232         by default this is one per processor. Certain partitioning schemes may
233         in fact only support that option.
234 
235    Not collective
236 
237    Input Parameter:
238 +  partitioning - the partitioning context
239 -  n - the number of partitions
240 
241    Level: intermediate
242 
243    Not Collective
244 
245 .seealso: MatPartitioningCreate(), MatPartitioningApply()
246 @*/
MatPartitioningSetNParts(MatPartitioning part,PetscInt n)247 PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
248 {
249   PetscFunctionBegin;
250   part->n = n;
251   PetscFunctionReturn(0);
252 }
253 
254 /*@
255    MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
256 
257    Collective on Mat
258 
259    Input Parameters:
260 .  matp - the matrix partitioning object
261 
262    Output Parameters:
263 .   partitioning - the partitioning. For each local node, a positive value indicates the processor
264                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).
265 
266    Level: beginner
267 
268    The user can define additional partitionings; see MatPartitioningRegister().
269 
270 .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
271            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
272            ISPartitioningCount()
273 @*/
MatPartitioningApplyND(MatPartitioning matp,IS * partitioning)274 PetscErrorCode  MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
275 {
276   PetscErrorCode ierr;
277 
278   PetscFunctionBegin;
279   PetscValidHeaderSpecific(matp,MAT_PARTITIONING_CLASSID,1);
280   PetscValidPointer(partitioning,2);
281   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
282   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
283   if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
284   ierr = PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);CHKERRQ(ierr);
285   ierr = (*matp->ops->applynd)(matp,partitioning);CHKERRQ(ierr);
286   ierr = PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);CHKERRQ(ierr);
287 
288   ierr = MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");CHKERRQ(ierr);
289   ierr = ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 /*@
294    MatPartitioningApply - Gets a partitioning for a matrix.
295 
296    Collective on Mat
297 
298    Input Parameters:
299 .  matp - the matrix partitioning object
300 
301    Output Parameters:
302 .   partitioning - the partitioning. For each local node this tells the processor
303                    number that that node is assigned to.
304 
305    Options Database Keys:
306    To specify the partitioning through the options database, use one of
307    the following
308 $    -mat_partitioning_type parmetis, -mat_partitioning current
309    To see the partitioning result
310 $    -mat_partitioning_view
311 
312    Level: beginner
313 
314    The user can define additional partitionings; see MatPartitioningRegister().
315 
316 .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
317            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
318            ISPartitioningCount()
319 @*/
MatPartitioningApply(MatPartitioning matp,IS * partitioning)320 PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
321 {
322   PetscErrorCode ierr;
323   PetscBool      viewbalance,improve;
324 
325   PetscFunctionBegin;
326   PetscValidHeaderSpecific(matp,MAT_PARTITIONING_CLASSID,1);
327   PetscValidPointer(partitioning,2);
328   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
329   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
330   if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
331   ierr = PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);CHKERRQ(ierr);
332   ierr = (*matp->ops->apply)(matp,partitioning);CHKERRQ(ierr);
333   ierr = PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);CHKERRQ(ierr);
334 
335   ierr = MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");CHKERRQ(ierr);
336   ierr = ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");CHKERRQ(ierr);
337 
338   ierr = PetscObjectOptionsBegin((PetscObject)matp);CHKERRQ(ierr);
339   viewbalance = PETSC_FALSE;
340   ierr = PetscOptionsBool("-mat_partitioning_view_imbalance","Display imbalance information of a partition",NULL,PETSC_FALSE,&viewbalance,NULL);CHKERRQ(ierr);
341   improve = PETSC_FALSE;
342   ierr = PetscOptionsBool("-mat_partitioning_improve","Improve the quality of a partition",NULL,PETSC_FALSE,&improve,NULL);CHKERRQ(ierr);
343   ierr = PetscOptionsEnd();CHKERRQ(ierr);
344 
345   if (improve) {
346     ierr = MatPartitioningImprove(matp,partitioning);CHKERRQ(ierr);
347   }
348 
349   if (viewbalance) {
350     ierr = MatPartitioningViewImbalance(matp,*partitioning);CHKERRQ(ierr);
351   }
352   PetscFunctionReturn(0);
353 }
354 
355 
356 /*@
357    MatPartitioningImprove - Improves the quality of a given partition.
358 
359    Collective on Mat
360 
361    Input Parameters:
362 +  matp - the matrix partitioning object
363 -  partitioning - the partitioning. For each local node this tells the processor
364                    number that that node is assigned to.
365 
366    Output Parameters:
367 .   partitioning - the partitioning. For each local node this tells the processor
368                    number that that node is assigned to.
369 
370    Options Database Keys:
371    To improve the quality of the partition
372 $    -mat_partitioning_improve
373 
374    Level: beginner
375 
376 
377 .seealso:  MatPartitioningApply(), MatPartitioningCreate(),
378            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
379            ISPartitioningCount()
380 @*/
MatPartitioningImprove(MatPartitioning matp,IS * partitioning)381 PetscErrorCode  MatPartitioningImprove(MatPartitioning matp,IS *partitioning)
382 {
383   PetscErrorCode ierr;
384 
385   PetscFunctionBegin;
386   PetscValidHeaderSpecific(matp,MAT_PARTITIONING_CLASSID,1);
387   PetscValidPointer(partitioning,2);
388   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
389   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
390   ierr = PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);CHKERRQ(ierr);
391   if (matp->ops->improve) {
392     ierr = (*matp->ops->improve)(matp,partitioning);CHKERRQ(ierr);
393   }
394   ierr = PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 
398 /*@
399    MatPartitioningViewImbalance - Display partitioning imbalance information.
400 
401    Collective on MatPartitioning
402 
403    Input Parameters:
404 +  matp - the matrix partitioning object
405 -  partitioning - the partitioning. For each local node this tells the processor
406                    number that that node is assigned to.
407 
408    Options Database Keys:
409    To see the partitioning imbalance information
410 $    -mat_partitioning_view_balance
411 
412    Level: beginner
413 
414 .seealso:  MatPartitioningApply(), MatPartitioningView()
415 @*/
MatPartitioningViewImbalance(MatPartitioning matp,IS partitioning)416 PetscErrorCode  MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
417 {
418   PetscErrorCode  ierr;
419   PetscInt        nparts,*subdomainsizes,*subdomainsizes_tmp,nlocal,i,maxsub,minsub,avgsub;
420   const PetscInt  *indices;
421   PetscViewer     viewer;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(matp,MAT_PARTITIONING_CLASSID,1);
425   PetscValidHeaderSpecific(partitioning,IS_CLASSID,2);
426   nparts = matp->n;
427   ierr = PetscCalloc2(nparts,&subdomainsizes,nparts,&subdomainsizes_tmp);CHKERRQ(ierr);
428   ierr = ISGetLocalSize(partitioning,&nlocal);CHKERRQ(ierr);
429   ierr = ISGetIndices(partitioning,&indices);CHKERRQ(ierr);
430   for (i=0;i<nlocal;i++) {
431     subdomainsizes_tmp[indices[i]] += matp->vertex_weights? matp->vertex_weights[i]:1;
432   }
433   ierr = MPI_Allreduce(subdomainsizes_tmp,subdomainsizes,nparts,MPIU_INT,MPI_SUM, PetscObjectComm((PetscObject)matp));CHKERRQ(ierr);
434   ierr = ISRestoreIndices(partitioning,&indices);CHKERRQ(ierr);
435   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub=0;
436   for (i=0; i<nparts; i++) {
437     minsub = PetscMin(minsub,subdomainsizes[i]);
438     maxsub = PetscMax(maxsub,subdomainsizes[i]);
439     avgsub += subdomainsizes[i];
440   }
441   avgsub /=nparts;
442   ierr = PetscFree2(subdomainsizes,subdomainsizes_tmp);CHKERRQ(ierr);
443   ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);CHKERRQ(ierr);
444   ierr = MatPartitioningView(matp,viewer);CHKERRQ(ierr);
445   ierr = PetscViewerASCIIPrintf(viewer,"Partitioning Imbalance Info: Max %D, Min %D, Avg %D, R %g\n",maxsub, minsub, avgsub, (double)(maxsub/(PetscReal)minsub));CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@
450    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
451       partitioned.
452 
453    Collective on MatPartitioning
454 
455    Input Parameters:
456 +  part - the partitioning context
457 -  adj - the adjacency matrix
458 
459    Level: beginner
460 
461 .seealso: MatPartitioningCreate()
462 @*/
MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)463 PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
464 {
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
467   PetscValidHeaderSpecific(adj,MAT_CLASSID,2);
468   part->adj = adj;
469   PetscFunctionReturn(0);
470 }
471 
472 /*@
473    MatPartitioningDestroy - Destroys the partitioning context.
474 
475    Collective on Partitioning
476 
477    Input Parameters:
478 .  part - the partitioning context
479 
480    Level: beginner
481 
482 .seealso: MatPartitioningCreate()
483 @*/
MatPartitioningDestroy(MatPartitioning * part)484 PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
485 {
486   PetscErrorCode ierr;
487 
488   PetscFunctionBegin;
489   if (!*part) PetscFunctionReturn(0);
490   PetscValidHeaderSpecific((*part),MAT_PARTITIONING_CLASSID,1);
491   if (--((PetscObject)(*part))->refct > 0) {*part = NULL; PetscFunctionReturn(0);}
492 
493   if ((*part)->ops->destroy) {
494     ierr = (*(*part)->ops->destroy)((*part));CHKERRQ(ierr);
495   }
496   ierr = PetscFree((*part)->vertex_weights);CHKERRQ(ierr);
497   ierr = PetscFree((*part)->part_weights);CHKERRQ(ierr);
498   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
499   PetscFunctionReturn(0);
500 }
501 
502 /*@C
503    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
504 
505    Logically Collective on Partitioning
506 
507    Input Parameters:
508 +  part - the partitioning context
509 -  weights - the weights, on each process this array must have the same size as the number of local rows
510 
511    Level: beginner
512 
513    Notes:
514       The array weights is freed by PETSc so the user should not free the array. In C/C++
515    the array must be obtained with a call to PetscMalloc(), not malloc().
516 
517 .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
518 @*/
MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])519 PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
520 {
521   PetscErrorCode ierr;
522 
523   PetscFunctionBegin;
524   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
525   ierr = PetscFree(part->vertex_weights);CHKERRQ(ierr);
526   part->vertex_weights = (PetscInt*)weights;
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531    MatPartitioningSetPartitionWeights - Sets the weights for each partition.
532 
533    Logically Collective on Partitioning
534 
535    Input Parameters:
536 +  part - the partitioning context
537 -  weights - An array of size nparts that is used to specify the fraction of
538              vertex weight that should be distributed to each sub-domain for
539              the balance constraint. If all of the sub-domains are to be of
540              the same size, then each of the nparts elements should be set
541              to a value of 1/nparts. Note that the sum of all of the weights
542              should be one.
543 
544    Level: beginner
545 
546    Notes:
547       The array weights is freed by PETSc so the user should not free the array. In C/C++
548    the array must be obtained with a call to PetscMalloc(), not malloc().
549 
550 .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
551 @*/
MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])552 PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
558   ierr = PetscFree(part->part_weights);CHKERRQ(ierr);
559   part->part_weights = (PetscReal*)weights;
560   PetscFunctionReturn(0);
561 }
562 
563 /*@
564    MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
565 
566    Logically Collective on Partitioning
567 
568    Input Parameters:
569 +  part - the partitioning context
570 -  use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
571                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
572                       edge weights are stored in an ADJ matrix.
573    Level: beginner
574 
575    Options Database Keys:
576 .  -mat_partitioning_use_edge_weights - (true or false)
577 
578 .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights()
579 @*/
MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)580 PetscErrorCode  MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)
581 {
582   PetscFunctionBegin;
583   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
584   part->use_edge_weights = use_edge_weights;
585   PetscFunctionReturn(0);
586 }
587 
588 /*@
589    MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
590 
591    Logically Collective on Partitioning
592 
593    Input Parameters:
594 .  part - the partitioning context
595 
596    Output Parameters:
597 .  use_edge_weights - the flag indicateing whether or not to edge weights are used.
598 
599    Level: beginner
600 
601 .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights(),
602           MatPartitioningSetUseEdgeWeights
603 @*/
MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool * use_edge_weights)604 PetscErrorCode  MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool *use_edge_weights)
605 {
606   PetscFunctionBegin;
607   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
608   PetscValidPointer(use_edge_weights,2);
609   *use_edge_weights = part->use_edge_weights;
610   PetscFunctionReturn(0);
611 }
612 
613 /*@
614    MatPartitioningCreate - Creates a partitioning context.
615 
616    Collective
617 
618    Input Parameter:
619 .   comm - MPI communicator
620 
621    Output Parameter:
622 .  newp - location to put the context
623 
624    Level: beginner
625 
626 .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
627           MatPartitioningSetAdjacency()
628 
629 @*/
MatPartitioningCreate(MPI_Comm comm,MatPartitioning * newp)630 PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
631 {
632   MatPartitioning part;
633   PetscErrorCode  ierr;
634   PetscMPIInt     size;
635 
636   PetscFunctionBegin;
637   *newp = NULL;
638 
639   ierr = MatInitializePackage();CHKERRQ(ierr);
640   ierr = PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);CHKERRQ(ierr);
641   part->vertex_weights = NULL;
642   part->part_weights   = NULL;
643   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
644 
645   ierr    = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
646   part->n = (PetscInt)size;
647 
648   *newp = part;
649   PetscFunctionReturn(0);
650 }
651 
652 /*@C
653    MatPartitioningViewFromOptions - View from Options
654 
655    Collective on MatPartitioning
656 
657    Input Parameters:
658 +  A - the partitioning context
659 .  obj - Optional object
660 -  name - command line option
661 
662    Level: intermediate
663 .seealso:  MatPartitioning, MatPartitioningView, PetscObjectViewFromOptions(), MatPartitioningCreate()
664 @*/
MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])665 PetscErrorCode  MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])
666 {
667   PetscErrorCode ierr;
668 
669   PetscFunctionBegin;
670   PetscValidHeaderSpecific(A,MAT_PARTITIONING_CLASSID,1);
671   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 /*@C
676    MatPartitioningView - Prints the partitioning data structure.
677 
678    Collective on MatPartitioning
679 
680    Input Parameters:
681 +  part - the partitioning context
682 -  viewer - optional visualization context
683 
684    Level: intermediate
685 
686    Note:
687    The available visualization contexts include
688 +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
689 -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
690          output where only the first processor opens
691          the file.  All other processors send their
692          data to the first processor to print.
693 
694    The user can open alternative visualization contexts with
695 .     PetscViewerASCIIOpen() - output to a specified file
696 
697 .seealso: PetscViewerASCIIOpen()
698 @*/
MatPartitioningView(MatPartitioning part,PetscViewer viewer)699 PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
700 {
701   PetscErrorCode ierr;
702   PetscBool      iascii;
703 
704   PetscFunctionBegin;
705   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
706   if (!viewer) {
707     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);CHKERRQ(ierr);
708   }
709   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
710   PetscCheckSameComm(part,1,viewer,2);
711 
712   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
713   if (iascii) {
714     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);CHKERRQ(ierr);
715     if (part->vertex_weights) {
716       ierr = PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");CHKERRQ(ierr);
717     }
718   }
719   if (part->ops->view) {
720     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
721     ierr = (*part->ops->view)(part,viewer);CHKERRQ(ierr);
722     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 /*@C
728    MatPartitioningSetType - Sets the type of partitioner to use
729 
730    Collective on MatPartitioning
731 
732    Input Parameter:
733 +  part - the partitioning context.
734 -  type - a known method
735 
736    Options Database Command:
737 $  -mat_partitioning_type  <type>
738 $      Use -help for a list of available methods
739 $      (for instance, parmetis)
740 
741    Level: intermediate
742 
743 .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
744 
745 @*/
MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)746 PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
747 {
748   PetscErrorCode ierr,(*r)(MatPartitioning);
749   PetscBool      match;
750 
751   PetscFunctionBegin;
752   PetscValidHeaderSpecific(part,MAT_PARTITIONING_CLASSID,1);
753   PetscValidCharPointer(type,2);
754 
755   ierr = PetscObjectTypeCompare((PetscObject)part,type,&match);CHKERRQ(ierr);
756   if (match) PetscFunctionReturn(0);
757 
758   if (part->ops->destroy) {
759     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
760     part->ops->destroy = NULL;
761   }
762   part->setupcalled = 0;
763   part->data        = NULL;
764   ierr = PetscMemzero(part->ops,sizeof(struct _MatPartitioningOps));CHKERRQ(ierr);
765 
766   ierr = PetscFunctionListFind(MatPartitioningList,type,&r);CHKERRQ(ierr);
767   if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
768 
769   ierr = (*r)(part);CHKERRQ(ierr);
770 
771   ierr = PetscFree(((PetscObject)part)->type_name);CHKERRQ(ierr);
772   ierr = PetscStrallocpy(type,&((PetscObject)part)->type_name);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*@
777    MatPartitioningSetFromOptions - Sets various partitioning options from the
778         options database.
779 
780    Collective on MatPartitioning
781 
782    Input Parameter:
783 .  part - the partitioning context.
784 
785    Options Database Command:
786 $  -mat_partitioning_type  <type>
787 $      Use -help for a list of available methods
788 $      (for instance, parmetis)
789 $  -mat_partitioning_nparts - number of subgraphs
790 
791 
792    Notes:
793     If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
794    no installed partitioners it uses current which means no repartioning.
795 
796    Level: beginner
797 
798 @*/
MatPartitioningSetFromOptions(MatPartitioning part)799 PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
800 {
801   PetscErrorCode ierr;
802   PetscBool      flag;
803   char           type[256];
804   const char     *def;
805 
806   PetscFunctionBegin;
807   ierr = PetscObjectOptionsBegin((PetscObject)part);CHKERRQ(ierr);
808   if (!((PetscObject)part)->type_name) {
809 #if defined(PETSC_HAVE_PARMETIS)
810     def = MATPARTITIONINGPARMETIS;
811 #elif defined(PETSC_HAVE_CHACO)
812     def = MATPARTITIONINGCHACO;
813 #elif defined(PETSC_HAVE_PARTY)
814     def = MATPARTITIONINGPARTY;
815 #elif defined(PETSC_HAVE_PTSCOTCH)
816     def = MATPARTITIONINGPTSCOTCH;
817 #else
818     def = MATPARTITIONINGCURRENT;
819 #endif
820   } else {
821     def = ((PetscObject)part)->type_name;
822   }
823   ierr = PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);CHKERRQ(ierr);
824   if (flag) {
825     ierr = MatPartitioningSetType(part,type);CHKERRQ(ierr);
826   }
827 
828   ierr = PetscOptionsInt("-mat_partitioning_nparts","number of fine parts",NULL,part->n,& part->n,&flag);CHKERRQ(ierr);
829 
830   ierr = PetscOptionsBool("-mat_partitioning_use_edge_weights","whether or not to use edge weights",NULL,part->use_edge_weights,&part->use_edge_weights,&flag);CHKERRQ(ierr);
831 
832   /*
833     Set the type if it was never set.
834   */
835   if (!((PetscObject)part)->type_name) {
836     ierr = MatPartitioningSetType(part,def);CHKERRQ(ierr);
837   }
838 
839   if (part->ops->setfromoptions) {
840     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
841   }
842   ierr = PetscOptionsEnd();CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845