1 
2 static char help[] = "Partition tiny grid.\n\n";
3 
4 /*T
5    Concepts: partitioning
6    Processors: 4
7 T*/
8 
9 /*
10   Include "petscmat.h" so that we can use matrices.  Note that this file
11   automatically includes:
12      petscsys.h       - base PETSc routines   petscvec.h - vectors
13      petscmat.h - matrices
14      petscis.h     - index sets
15      petscviewer.h - viewers
16 */
17 #include <petscmat.h>
18 
main(int argc,char ** args)19 int main(int argc,char **args)
20 {
21   Mat             A;
22   PetscErrorCode  ierr;
23   PetscMPIInt     rank,size;
24   PetscInt        *ia,*ja;
25   MatPartitioning part;
26   IS              is,isn;
27 
28   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
29   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
30   if (size != 4) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 4 processors");
31   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
32 
33   ierr = PetscMalloc1(5,&ia);CHKERRQ(ierr);
34   ierr = PetscMalloc1(16,&ja);CHKERRQ(ierr);
35   if (!rank) {
36     ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
37     ja[8] = 2; ja[9] = 7;
38     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
39   } else if (rank == 1) {
40     ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
41     ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
42     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
43   } else if (rank == 2) {
44     ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
45     ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
46     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
47   } else {
48     ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
49     ja[8] = 11; ja[9] = 14;
50     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
51   }
52 
53   ierr = MatCreateMPIAdj(PETSC_COMM_WORLD,4,16,ia,ja,NULL,&A);CHKERRQ(ierr);
54   ierr = MatView(A,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
55 
56   /*
57        Partition the graph of the matrix
58   */
59   ierr = MatPartitioningCreate(PETSC_COMM_WORLD,&part);CHKERRQ(ierr);
60   ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr);
61   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
62   /* get new processor owner number of each vertex */
63   ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr);
64   /* get new global number of each old global number */
65   ierr = ISPartitioningToNumbering(is,&isn);CHKERRQ(ierr);
66   ierr = ISView(isn,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
67   ierr = ISDestroy(&is);CHKERRQ(ierr);
68 
69   ierr = ISDestroy(&isn);CHKERRQ(ierr);
70   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
71 
72   /*
73        Free work space.  All PETSc objects should be destroyed when they
74        are no longer needed.
75   */
76   ierr = MatDestroy(&A);CHKERRQ(ierr);
77 
78 
79   ierr = PetscFinalize();
80   return ierr;
81 }
82 
83