1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 
7 /*
8    Produces a set of block column indices of the matrix row, one for each block represented in the original row
9 
10    n - the number of block indices in cc[]
11    cc - the block indices (must be large enough to contain the indices)
12 */
MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt * n,PetscInt * cc)13 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc)
14 {
15   PetscInt       cnt = -1,nidx,j;
16   const PetscInt *idx;
17   PetscErrorCode ierr;
18 
19   PetscFunctionBegin;
20   ierr = MatGetRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr);
21   if (nidx) {
22     cnt = 0;
23     cc[cnt] = idx[0]/bs;
24     for (j=1; j<nidx; j++) {
25       if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs;
26     }
27   }
28   ierr = MatRestoreRow(Amat,row,&nidx,&idx,NULL);CHKERRQ(ierr);
29   *n = cnt+1;
30   PetscFunctionReturn(0);
31 }
32 
33 /*
34     Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows
35 
36     ncollapsed - the number of block indices
37     collapsed - the block indices (must be large enough to contain the indices)
38 */
MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt * w0,PetscInt * w1,PetscInt * w2,PetscInt * ncollapsed,PetscInt ** collapsed)39 PETSC_STATIC_INLINE PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed)
40 {
41   PetscInt       i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp;
42   PetscErrorCode ierr;
43 
44   PetscFunctionBegin;
45   ierr = MatCollapseRow(Amat,start,bs,&nprev,cprev);CHKERRQ(ierr);
46   for (i=start+1; i<start+bs; i++) {
47     ierr  = MatCollapseRow(Amat,i,bs,&ncur,ccur);CHKERRQ(ierr);
48     ierr  = PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);CHKERRQ(ierr);
49     cprevtmp = cprev; cprev = merged; merged = cprevtmp;
50   }
51   *ncollapsed = nprev;
52   if (collapsed) *collapsed  = cprev;
53   PetscFunctionReturn(0);
54 }
55 
56 
57 /* -------------------------------------------------------------------------- */
58 /*
59    PCGAMGCreateGraph - create simple scaled scalar graph from matrix
60 
61  Input Parameter:
62  . Amat - matrix
63  Output Parameter:
64  . a_Gmaat - eoutput scalar graph (symmetric?)
65  */
PCGAMGCreateGraph(Mat Amat,Mat * a_Gmat)66 PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat)
67 {
68   PetscErrorCode ierr;
69   PetscInt       Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs;
70   MPI_Comm       comm;
71   Mat            Gmat;
72 
73   PetscFunctionBegin;
74   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
75   ierr = MatGetOwnershipRange(Amat, &Istart, &Iend);CHKERRQ(ierr);
76   ierr = MatGetSize(Amat, &MM, &NN);CHKERRQ(ierr);
77   ierr = MatGetBlockSize(Amat, &bs);CHKERRQ(ierr);
78   nloc = (Iend-Istart)/bs;
79 
80 #if defined PETSC_GAMG_USE_LOG
81   ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
82 #endif
83 
84   if (bs > 1) {
85     const PetscScalar *vals;
86     const PetscInt    *idx;
87     PetscInt          *d_nnz, *o_nnz,*w0,*w1,*w2;
88     PetscBool         ismpiaij,isseqaij;
89 
90     /*
91        Determine the preallocation needed for the scalar matrix derived from the vector matrix.
92     */
93 
94     ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
95     ierr = PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
96     ierr = PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);CHKERRQ(ierr);
97 
98     if (isseqaij) {
99       PetscInt max_d_nnz;
100 
101       /*
102           Determine exact preallocation count for (sequential) scalar matrix
103       */
104       ierr = MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);CHKERRQ(ierr);
105       max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr);
106       ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr);
107       for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) {
108         ierr = MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr);
109       }
110       ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr);
111 
112     } else if (ismpiaij) {
113       Mat            Daij,Oaij;
114       const PetscInt *garray;
115       PetscInt       max_d_nnz;
116 
117       ierr = MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);CHKERRQ(ierr);
118 
119       /*
120           Determine exact preallocation count for diagonal block portion of scalar matrix
121       */
122       ierr = MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);CHKERRQ(ierr);
123       max_d_nnz = PetscMin(nloc,bs*max_d_nnz);CHKERRQ(ierr);
124       ierr = PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);CHKERRQ(ierr);
125       for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
126         ierr = MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);CHKERRQ(ierr);
127       }
128       ierr = PetscFree3(w0,w1,w2);CHKERRQ(ierr);
129 
130       /*
131          Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix
132       */
133       for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
134         o_nnz[jj] = 0;
135         for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */
136           ierr = MatGetRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr);
137           o_nnz[jj] += ncols;
138           ierr = MatRestoreRow(Oaij,Ii+kk,&ncols,NULL,NULL);CHKERRQ(ierr);
139         }
140         if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc;
141       }
142 
143     } else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type");
144 
145     /* get scalar copy (norms) of matrix */
146     ierr = MatCreate(comm, &Gmat);CHKERRQ(ierr);
147     ierr = MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
148     ierr = MatSetBlockSizes(Gmat, 1, 1);CHKERRQ(ierr);
149     ierr = MatSetType(Gmat, MATAIJ);CHKERRQ(ierr);
150     ierr = MatSeqAIJSetPreallocation(Gmat,0,d_nnz);CHKERRQ(ierr);
151     ierr = MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
152     ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
153 
154     for (Ii = Istart; Ii < Iend; Ii++) {
155       PetscInt dest_row = Ii/bs;
156       ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
157       for (jj=0; jj<ncols; jj++) {
158         PetscInt    dest_col = idx[jj]/bs;
159         PetscScalar sv       = PetscAbs(PetscRealPart(vals[jj]));
160         ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);CHKERRQ(ierr);
161       }
162       ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
163     }
164     ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
165     ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
166   } else {
167     /* just copy scalar matrix - abs() not taken here but scaled later */
168     ierr = MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);CHKERRQ(ierr);
169   }
170   ierr = MatPropagateSymmetryOptions(Amat, Gmat);CHKERRQ(ierr);
171 
172 #if defined PETSC_GAMG_USE_LOG
173   ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
174 #endif
175 
176   *a_Gmat = Gmat;
177   PetscFunctionReturn(0);
178 }
179 
180 /* -------------------------------------------------------------------------- */
181 /*@C
182    PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested
183 
184    Collective on Mat
185 
186    Input Parameter:
187 +   a_Gmat - the graph
188 .   vfilter - threshold parameter [0,1)
189 -   symm - make the result symmetric
190 
191    Level: developer
192 
193    Notes:
194     This is called before graph coarsers are called.
195 
196 .seealso: PCGAMGSetThreshold()
197 @*/
PCGAMGFilterGraph(Mat * a_Gmat,PetscReal vfilter,PetscBool symm)198 PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm)
199 {
200   PetscErrorCode    ierr;
201   PetscInt          Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc;
202   PetscMPIInt       rank;
203   Mat               Gmat  = *a_Gmat, tGmat, matTrans;
204   MPI_Comm          comm;
205   const PetscScalar *vals;
206   const PetscInt    *idx;
207   PetscInt          *d_nnz, *o_nnz;
208   Vec               diag;
209 
210   PetscFunctionBegin;
211 #if defined PETSC_GAMG_USE_LOG
212   ierr = PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
213 #endif
214   /* scale Gmat for all values between -1 and 1 */
215   ierr = MatCreateVecs(Gmat, &diag, NULL);CHKERRQ(ierr);
216   ierr = MatGetDiagonal(Gmat, diag);CHKERRQ(ierr);
217   ierr = VecReciprocal(diag);CHKERRQ(ierr);
218   ierr = VecSqrtAbs(diag);CHKERRQ(ierr);
219   ierr = MatDiagonalScale(Gmat, diag, diag);CHKERRQ(ierr);
220   ierr = VecDestroy(&diag);CHKERRQ(ierr);
221 
222   if (vfilter < 0.0 && !symm) {
223     /* Just use the provided matrix as the graph but make all values positive */
224     MatInfo     info;
225     PetscScalar *avals;
226     PetscBool isaij,ismpiaij;
227     ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);CHKERRQ(ierr);
228     ierr = PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
229     if (!isaij && !ismpiaij) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require (MPI)AIJ matrix type");
230     if (isaij) {
231       ierr = MatGetInfo(Gmat,MAT_LOCAL,&info);CHKERRQ(ierr);
232       ierr = MatSeqAIJGetArray(Gmat,&avals);CHKERRQ(ierr);
233       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
234       ierr = MatSeqAIJRestoreArray(Gmat,&avals);CHKERRQ(ierr);
235     } else {
236       Mat_MPIAIJ  *aij = (Mat_MPIAIJ*)Gmat->data;
237       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
238       ierr = MatSeqAIJGetArray(aij->A,&avals);CHKERRQ(ierr);
239       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
240       ierr = MatSeqAIJRestoreArray(aij->A,&avals);CHKERRQ(ierr);
241       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
242       ierr = MatSeqAIJGetArray(aij->B,&avals);CHKERRQ(ierr);
243       for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
244       ierr = MatSeqAIJRestoreArray(aij->B,&avals);CHKERRQ(ierr);
245     }
246 #if defined PETSC_GAMG_USE_LOG
247     ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
248 #endif
249     PetscFunctionReturn(0);
250   }
251 
252   ierr = PetscObjectGetComm((PetscObject)Gmat,&comm);CHKERRQ(ierr);
253   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
254   ierr = MatGetOwnershipRange(Gmat, &Istart, &Iend);CHKERRQ(ierr);
255   nloc = Iend - Istart;
256   ierr = MatGetSize(Gmat, &MM, &NN);CHKERRQ(ierr);
257 
258   if (symm) {
259     ierr = MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);CHKERRQ(ierr);
260   }
261 
262   /* Determine upper bound on nonzeros needed in new filtered matrix */
263   ierr = PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);CHKERRQ(ierr);
264   for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
265     ierr      = MatGetRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
266     d_nnz[jj] = ncols;
267     o_nnz[jj] = ncols;
268     ierr      = MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
269     if (symm) {
270       ierr       = MatGetRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
271       d_nnz[jj] += ncols;
272       o_nnz[jj] += ncols;
273       ierr       = MatRestoreRow(matTrans,Ii,&ncols,NULL,NULL);CHKERRQ(ierr);
274     }
275     if (d_nnz[jj] > nloc) d_nnz[jj] = nloc;
276     if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc;
277   }
278   ierr = MatCreate(comm, &tGmat);CHKERRQ(ierr);
279   ierr = MatSetSizes(tGmat,nloc,nloc,MM,MM);CHKERRQ(ierr);
280   ierr = MatSetBlockSizes(tGmat, 1, 1);CHKERRQ(ierr);
281   ierr = MatSetType(tGmat, MATAIJ);CHKERRQ(ierr);
282   ierr = MatSeqAIJSetPreallocation(tGmat,0,d_nnz);CHKERRQ(ierr);
283   ierr = MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
284   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
285   if (symm) {
286     ierr = MatDestroy(&matTrans);CHKERRQ(ierr);
287   } else {
288     /* all entries are generated locally so MatAssembly will be slightly faster for large process counts */
289     ierr = MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
290   }
291 
292   for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) {
293     ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
294     for (jj=0; jj<ncols; jj++,nnz0++) {
295       PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
296       if (PetscRealPart(sv) > vfilter) {
297         nnz1++;
298         if (symm) {
299           sv  *= 0.5;
300           ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr);
301           ierr = MatSetValues(tGmat,1,&idx[jj],1,&Ii,&sv,ADD_VALUES);CHKERRQ(ierr);
302         } else {
303           ierr = MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);CHKERRQ(ierr);
304         }
305       }
306     }
307     ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);CHKERRQ(ierr);
308   }
309   ierr = MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
310   ierr = MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
311   if (symm) {
312     ierr = MatSetOption(tGmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
313   } else {
314     ierr = MatPropagateSymmetryOptions(Gmat,tGmat);CHKERRQ(ierr);
315   }
316 #if defined PETSC_GAMG_USE_LOG
317   ierr = PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr);
318 #endif
319 
320 #if defined(PETSC_USE_INFO)
321   {
322     double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc;
323     ierr = PetscInfo4(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);CHKERRQ(ierr);
324   }
325 #endif
326   ierr    = MatDestroy(&Gmat);CHKERRQ(ierr);
327   *a_Gmat = tGmat;
328   PetscFunctionReturn(0);
329 }
330 
331 /* -------------------------------------------------------------------------- */
332 /*
333    PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have size > 1
334 
335    Input Parameter:
336    . Gmat - MPIAIJ matrix for scattters
337    . data_sz - number of data terms per node (# cols in output)
338    . data_in[nloc*data_sz] - column oriented data
339    Output Parameter:
340    . a_stride - numbrt of rows of output
341    . a_data_out[stride*data_sz] - output data with ghosts
342 */
PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt * a_stride,PetscReal ** a_data_out)343 PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out)
344 {
345   PetscErrorCode ierr;
346   Vec            tmp_crds;
347   Mat_MPIAIJ     *mpimat = (Mat_MPIAIJ*)Gmat->data;
348   PetscInt       nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc;
349   PetscScalar    *data_arr;
350   PetscReal      *datas;
351   PetscBool      isMPIAIJ;
352 
353   PetscFunctionBegin;
354   ierr      = PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);CHKERRQ(ierr);
355   ierr      = MatGetOwnershipRange(Gmat, &my0, &Iend);CHKERRQ(ierr);
356   nloc      = Iend - my0;
357   ierr      = VecGetLocalSize(mpimat->lvec, &num_ghosts);CHKERRQ(ierr);
358   nnodes    = num_ghosts + nloc;
359   *a_stride = nnodes;
360   ierr      = MatCreateVecs(Gmat, &tmp_crds, NULL);CHKERRQ(ierr);
361 
362   ierr = PetscMalloc1(data_sz*nnodes, &datas);CHKERRQ(ierr);
363   for (dir=0; dir<data_sz; dir++) {
364     /* set local, and global */
365     for (kk=0; kk<nloc; kk++) {
366       PetscInt    gid = my0 + kk;
367       PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */
368       datas[dir*nnodes + kk] = PetscRealPart(crd);
369 
370       ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);CHKERRQ(ierr);
371     }
372     ierr = VecAssemblyBegin(tmp_crds);CHKERRQ(ierr);
373     ierr = VecAssemblyEnd(tmp_crds);CHKERRQ(ierr);
374     /* get ghost datas */
375     ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
376     ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
377     ierr = VecGetArray(mpimat->lvec, &data_arr);CHKERRQ(ierr);
378     for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]);
379     ierr = VecRestoreArray(mpimat->lvec, &data_arr);CHKERRQ(ierr);
380   }
381   ierr        = VecDestroy(&tmp_crds);CHKERRQ(ierr);
382   *a_data_out = datas;
383   PetscFunctionReturn(0);
384 }
385 
PCGAMGHashTableCreate(PetscInt a_size,PCGAMGHashTable * a_tab)386 PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab)
387 {
388   PetscErrorCode ierr;
389   PetscInt       kk;
390 
391   PetscFunctionBegin;
392   a_tab->size = a_size;
393   ierr = PetscMalloc2(a_size, &a_tab->table,a_size, &a_tab->data);CHKERRQ(ierr);
394   for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1;
395   PetscFunctionReturn(0);
396 }
397 
PCGAMGHashTableDestroy(PCGAMGHashTable * a_tab)398 PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab)
399 {
400   PetscErrorCode ierr;
401 
402   PetscFunctionBegin;
403   ierr = PetscFree2(a_tab->table,a_tab->data);CHKERRQ(ierr);
404   PetscFunctionReturn(0);
405 }
406 
PCGAMGHashTableAdd(PCGAMGHashTable * a_tab,PetscInt a_key,PetscInt a_data)407 PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data)
408 {
409   PetscInt kk,idx;
410 
411   PetscFunctionBegin;
412   if (a_key<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Negative key %D.",a_key);
413   for (kk = 0, idx = GAMG_HASH(a_key); kk < a_tab->size; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) {
414     if (a_tab->table[idx] == a_key) {
415       /* exists */
416       a_tab->data[idx] = a_data;
417       break;
418     } else if (a_tab->table[idx] == -1) {
419       /* add */
420       a_tab->table[idx] = a_key;
421       a_tab->data[idx]  = a_data;
422       break;
423     }
424   }
425   if (kk==a_tab->size) {
426     /* this is not to efficient, waiting until completely full */
427     PetscInt       oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data;
428     PetscErrorCode ierr;
429 
430     a_tab->size = new_size;
431     ierr = PetscMalloc2(a_tab->size, &a_tab->table,a_tab->size, &a_tab->data);CHKERRQ(ierr);
432     for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1;
433     for (kk=0;kk<oldsize;kk++) {
434       if (oldtable[kk] != -1) {
435         ierr = PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);CHKERRQ(ierr);
436        }
437     }
438     ierr = PetscFree2(oldtable,olddata);CHKERRQ(ierr);
439     ierr = PCGAMGHashTableAdd(a_tab, a_key, a_data);CHKERRQ(ierr);
440   }
441   PetscFunctionReturn(0);
442 }
443