1 
2 static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";
3 
4 /*T
5    Concepts: partitioning
6    Processors: n
7 T*/
8 
9 
10 
11 /*
12   Include "petscmat.h" so that we can use matrices.  Note that this file
13   automatically includes:
14      petscsys.h       - base PETSc routines   petscvec.h - vectors
15      petscmat.h - matrices
16      petscis.h     - index sets
17      petscviewer.h - viewers
18 
19   Example of usage:
20     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
21 */
22 #include <petscmat.h>
23 
main(int argc,char ** args)24 int main(int argc,char **args)
25 {
26   MatType         mtype = MATMPIAIJ; /* matrix format */
27   Mat             A,B;               /* matrix */
28   PetscViewer     fd;                /* viewer */
29   char            file[PETSC_MAX_PATH_LEN];         /* input file name */
30   PetscBool       flg,viewMats,viewIS,viewVecs,useND,noVecLoad = PETSC_FALSE;
31   PetscInt        ierr,*nlocal,m,n;
32   PetscMPIInt     rank,size;
33   MatPartitioning part;
34   IS              is,isn;
35   Vec             xin, xout;
36   VecScatter      scat;
37 
38   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
39   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
40   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
41   ierr = PetscOptionsHasName(NULL,NULL, "-view_mats", &viewMats);CHKERRQ(ierr);
42   ierr = PetscOptionsHasName(NULL,NULL, "-view_is", &viewIS);CHKERRQ(ierr);
43   ierr = PetscOptionsHasName(NULL,NULL, "-view_vecs", &viewVecs);CHKERRQ(ierr);
44   ierr = PetscOptionsHasName(NULL,NULL, "-use_nd", &useND);CHKERRQ(ierr);
45   ierr = PetscOptionsHasName(NULL,NULL, "-novec_load", &noVecLoad);CHKERRQ(ierr);
46 
47   /*
48      Determine file from which we read the matrix
49   */
50   ierr = PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);CHKERRQ(ierr);
51 
52   /*
53        Open binary file.  Note that we use FILE_MODE_READ to indicate
54        reading from this file.
55   */
56   ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);CHKERRQ(ierr);
57 
58   /*
59       Load the matrix and vector; then destroy the viewer.
60   */
61   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
62   ierr = MatSetType(A,mtype);CHKERRQ(ierr);
63   ierr = MatLoad(A,fd);CHKERRQ(ierr);
64   if (!noVecLoad) {
65     ierr = VecCreate(PETSC_COMM_WORLD,&xin);CHKERRQ(ierr);
66     ierr = VecLoad(xin,fd);CHKERRQ(ierr);
67   } else {
68     ierr = MatCreateVecs(A,&xin,NULL);CHKERRQ(ierr);
69     ierr = VecSetRandom(xin,NULL);CHKERRQ(ierr);
70   }
71   ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr);
72   if (viewMats) {
73     ierr = PetscPrintf(PETSC_COMM_WORLD,"Original matrix:\n");CHKERRQ(ierr);
74     ierr = MatView(A,PETSC_VIEWER_DRAW_WORLD);CHKERRQ(ierr);
75   }
76   if (viewVecs) {
77     ierr = PetscPrintf(PETSC_COMM_WORLD,"Original vector:\n");CHKERRQ(ierr);
78     ierr = VecView(xin,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
79   }
80 
81   /* Partition the graph of the matrix */
82   ierr = MatPartitioningCreate(PETSC_COMM_WORLD,&part);CHKERRQ(ierr);
83   ierr = MatPartitioningSetAdjacency(part,A);CHKERRQ(ierr);
84   ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
85 
86   /* get new processor owner number of each vertex */
87   if (useND) {
88     ierr = MatPartitioningApplyND(part,&is);CHKERRQ(ierr);
89   } else {
90     ierr = MatPartitioningApply(part,&is);CHKERRQ(ierr);
91   }
92   if (viewIS) {
93     ierr = PetscPrintf(PETSC_COMM_WORLD,"IS1 - new processor ownership:\n");CHKERRQ(ierr);
94     ierr = ISView(is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
95   }
96 
97   /* get new global number of each old global number */
98   ierr = ISPartitioningToNumbering(is,&isn);CHKERRQ(ierr);
99   if (viewIS) {
100     ierr = PetscPrintf(PETSC_COMM_WORLD,"IS2 - new global numbering:\n");CHKERRQ(ierr);
101     ierr = ISView(isn,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
102   }
103 
104   /* get number of new vertices for each processor */
105   ierr = PetscMalloc1(size,&nlocal);CHKERRQ(ierr);
106   ierr = ISPartitioningCount(is,size,nlocal);CHKERRQ(ierr);
107   ierr = ISDestroy(&is);CHKERRQ(ierr);
108 
109   /* get old global number of each new global number */
110   ierr = ISInvertPermutation(isn,useND ? PETSC_DECIDE : nlocal[rank],&is);CHKERRQ(ierr);
111   if (viewIS) {
112     ierr = PetscPrintf(PETSC_COMM_WORLD,"IS3=inv(IS2) - old global number of each new global number:\n");CHKERRQ(ierr);
113     ierr = ISView(is,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
114   }
115 
116   /* move the matrix rows to the new processes they have been assigned to by the permutation */
117   ierr = MatCreateSubMatrix(A,is,is,MAT_INITIAL_MATRIX,&B);CHKERRQ(ierr);
118   ierr = PetscFree(nlocal);CHKERRQ(ierr);
119   ierr = ISDestroy(&isn);CHKERRQ(ierr);
120   ierr = MatDestroy(&A);CHKERRQ(ierr);
121   ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
122   if (viewMats) {
123     ierr = PetscPrintf(PETSC_COMM_WORLD,"Partitioned matrix:\n");CHKERRQ(ierr);
124     ierr = MatView(B,PETSC_VIEWER_DRAW_WORLD);CHKERRQ(ierr);
125   }
126 
127   /* move the vector rows to the new processes they have been assigned to */
128   ierr = MatGetLocalSize(B,&m,&n);CHKERRQ(ierr);
129   ierr = VecCreateMPI(PETSC_COMM_WORLD,m,PETSC_DECIDE,&xout);CHKERRQ(ierr);
130   ierr = VecScatterCreate(xin,is,xout,NULL,&scat);CHKERRQ(ierr);
131   ierr = VecScatterBegin(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
132   ierr = VecScatterEnd(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
133   ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
134   if (viewVecs) {
135     ierr = PetscPrintf(PETSC_COMM_WORLD,"Mapped vector:\n");CHKERRQ(ierr);
136     ierr = VecView(xout,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
137   }
138   ierr = VecDestroy(&xout);CHKERRQ(ierr);
139   ierr = ISDestroy(&is);CHKERRQ(ierr);
140 
141   {
142     PetscInt          rstart,i,*nzd,*nzo,nzl,nzmax = 0,*ncols,nrow,j;
143     Mat               J;
144     const PetscInt    *cols;
145     const PetscScalar *vals;
146     PetscScalar       *nvals;
147 
148     ierr = MatGetOwnershipRange(B,&rstart,NULL);CHKERRQ(ierr);
149     ierr = PetscCalloc2(2*m,&nzd,2*m,&nzo);CHKERRQ(ierr);
150     for (i=0; i<m; i++) {
151       ierr = MatGetRow(B,i+rstart,&nzl,&cols,NULL);CHKERRQ(ierr);
152       for (j=0; j<nzl; j++) {
153         if (cols[j] >= rstart && cols[j] < rstart+n) {
154           nzd[2*i] += 2;
155           nzd[2*i+1] += 2;
156         } else {
157           nzo[2*i] += 2;
158           nzo[2*i+1] += 2;
159         }
160       }
161       nzmax = PetscMax(nzmax,nzd[2*i]+nzo[2*i]);
162       ierr  = MatRestoreRow(B,i+rstart,&nzl,&cols,NULL);CHKERRQ(ierr);
163     }
164     ierr = MatCreateAIJ(PETSC_COMM_WORLD,2*m,2*m,PETSC_DECIDE,PETSC_DECIDE,0,nzd,0,nzo,&J);CHKERRQ(ierr);
165     ierr = PetscInfo(0,"Created empty Jacobian matrix\n");CHKERRQ(ierr);
166     ierr = PetscFree2(nzd,nzo);CHKERRQ(ierr);
167     ierr = PetscMalloc2(nzmax,&ncols,nzmax,&nvals);CHKERRQ(ierr);
168     ierr = PetscArrayzero(nvals,nzmax);CHKERRQ(ierr);
169     for (i=0; i<m; i++) {
170       ierr = MatGetRow(B,i+rstart,&nzl,&cols,&vals);CHKERRQ(ierr);
171       for (j=0; j<nzl; j++) {
172         ncols[2*j]   = 2*cols[j];
173         ncols[2*j+1] = 2*cols[j]+1;
174       }
175       nrow = 2*(i+rstart);
176       ierr = MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);CHKERRQ(ierr);
177       nrow = 2*(i+rstart) + 1;
178       ierr = MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);CHKERRQ(ierr);
179       ierr = MatRestoreRow(B,i+rstart,&nzl,&cols,&vals);CHKERRQ(ierr);
180     }
181     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
182     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
183     if (viewMats) {
184       ierr = PetscPrintf(PETSC_COMM_WORLD,"Jacobian matrix structure:\n");CHKERRQ(ierr);
185       ierr = MatView(J,PETSC_VIEWER_DRAW_WORLD);CHKERRQ(ierr);
186     }
187     ierr = MatDestroy(&J);CHKERRQ(ierr);
188     ierr = PetscFree2(ncols,nvals);CHKERRQ(ierr);
189   }
190 
191   /*
192        Free work space.  All PETSc objects should be destroyed when they
193        are no longer needed.
194   */
195   ierr = MatDestroy(&B);CHKERRQ(ierr);
196   ierr = VecDestroy(&xin);CHKERRQ(ierr);
197   ierr = PetscFinalize();
198   return ierr;
199 }
200 
201 /*TEST
202 
203    test:
204       nsize: 3
205       requires: parmetis datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
206       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load
207 
208    test:
209       requires: parmetis !complex double !define(PETSC_USE_64BIT_INDICES)
210       output_file: output/ex73_1.out
211       suffix: parmetis_nd_32
212       nsize: 3
213       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
214 
215    test:
216       requires: parmetis !complex double define(PETSC_USE_64BIT_INDICES)
217       output_file: output/ex73_1.out
218       suffix: parmetis_nd_64
219       nsize: 3
220       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
221 
222    test:
223       requires: ptscotch !complex double !define(PETSC_USE_64BIT_INDICES) define(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
224       output_file: output/ex73_1.out
225       suffix: ptscotch_nd_32
226       nsize: 4
227       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
228 
229    test:
230       requires: ptscotch !complex double define(PETSC_USE_64BIT_INDICES) define(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
231       output_file: output/ex73_1.out
232       suffix: ptscotch_nd_64
233       nsize: 4
234       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
235 
236 TEST*/
237