1 /* Copyright 2007,2008,2011,2013,2014 IPB, Universite de Bordeaux, INRIA & CNRS
2 **
3 ** This file is part of the Scotch software package for static mapping,
4 ** graph partitioning and sparse matrix ordering.
5 **
6 ** This software is governed by the CeCILL-C license under French law
7 ** and abiding by the rules of distribution of free software. You can
8 ** use, modify and/or redistribute the software under the terms of the
9 ** CeCILL-C license as circulated by CEA, CNRS and INRIA at the following
10 ** URL: "http://www.cecill.info".
11 **
12 ** As a counterpart to the access to the source code and rights to copy,
13 ** modify and redistribute granted by the license, users are provided
14 ** only with a limited warranty and the software's author, the holder of
15 ** the economic rights, and the successive licensors have only limited
16 ** liability.
17 **
18 ** In this respect, the user's attention is drawn to the risks associated
19 ** with loading, using, modifying and/or developing or reproducing the
20 ** software by the user in light of its specific status of free software,
21 ** that may mean that it is complicated to manipulate, and that also
22 ** therefore means that it is reserved for developers and experienced
23 ** professionals having in-depth computer knowledge. Users are therefore
24 ** encouraged to load and test the software's suitability as regards
25 ** their requirements in conditions enabling the security of their
26 ** systems and/or data to be ensured and, more generally, to use and
27 ** operate it in the same conditions as regards security.
28 **
29 ** The fact that you are presently reading this means that you have had
30 ** knowledge of the CeCILL-C license and that you accept its terms.
31 */
32 /************************************************************/
33 /**                                                        **/
34 /**   NAME       : bdgraph_check.c                         **/
35 /**                                                        **/
36 /**   AUTHORS    : Jun-Ho HER (v6.0)                       **/
37 /**                Francois PELLEGRINI                     **/
38 /**                                                        **/
39 /**   FUNCTION   : This module contains the distributed    **/
40 /**                bipartition graph consistency checking  **/
41 /**                routine.                                **/
42 /**                                                        **/
43 /**   DATES      : # Version 5.1  : from : 10 sep 2007     **/
44 /**                                 to     22 jul 2008     **/
45 /**                # Version 6.0  : from : 03 sep 2011     **/
46 /**                                 to     31 aug 2014     **/
47 /**                                                        **/
48 /************************************************************/
49 
50 /*
51 **  The defines and includes.
52 */
53 
54 #include "module.h"
55 #include "common.h"
56 #include "arch.h"
57 #include "dgraph.h"
58 #include "bdgraph.h"
59 
60 int
bdgraphCheck(const Bdgraph * restrict const grafptr)61 bdgraphCheck (
62 const Bdgraph * restrict const grafptr)
63 {
64   Dgraph                    grafdat;              /* Dummy graph for ghost edge array */
65   MPI_Comm                  proccomm;             /* Graph communicator               */
66   int * restrict            flagloctax;           /* Frontier flag array              */
67   GraphPart * restrict      partgsttax;
68   Gnum                      fronlocnum;
69   Gnum                      vertlocnum;           /* Number of current vertex         */
70   Gnum                      complocload[2];
71   Gnum                      complocsize[2];
72   Gnum                      commcut[2];
73   Gnum                      commlocloadintn;
74   Gnum                      commlocloadextn;
75   Gnum                      commlocgainextn;
76   Gnum                      edlolocval;
77   Gnum                      reduloctab[21];       /* Arrays for reductions            */
78   Gnum                      reduglbtab[21];
79   int                       chekglbval;           /* Global consistency flag          */
80   int                       cheklocval;           /* Local consistency flag           */
81 
82   proccomm = grafptr->s.proccomm;
83   if (MPI_Barrier (proccomm) != MPI_SUCCESS) {    /* Synchronize */
84     errorPrint ("bdgraphCheck: communication error (1)");
85     return     (1);
86   }
87 
88   cheklocval = 0;                                 /* Assume everything is all right */
89   if ((grafptr->compglbload0min < 0) ||
90       (grafptr->compglbload0max > grafptr->s.veloglbsum)) {
91     errorPrint ("bdgraphCheck: invalid extrema loads");
92     cheklocval = 1;
93   }
94 
95   if (grafptr->compglbload0 != (grafptr->compglbload0avg + grafptr->compglbload0dlt)) {
96     errorPrint ("bdgraphCheck: invalid global balance");
97     cheklocval |= 2;
98   }
99 
100   if ((grafptr->fronlocnbr < 0) ||
101       (grafptr->fronlocnbr > grafptr->s.vertlocnbr)) {
102     errorPrint ("bdgraphCheck: invalid number of local frontier vertices");
103     cheklocval |= 4;
104   }
105 
106   if (grafptr->partgsttax != NULL) {
107     for (vertlocnum = grafptr->s.baseval; vertlocnum < grafptr->s.vertlocnnd; vertlocnum ++) {
108       if ((grafptr->partgsttax[vertlocnum] | 1) != 1) { /* If part is neither 0 nor 1 */
109         errorPrint ("bdgraphCheck: invalid local part array");
110         cheklocval |= 8;
111         break;
112       }
113     }
114   }
115 
116   grafdat = grafptr->s;                           /* Copy minimal distributed graph data      */
117   if (dgraphGhst (&grafdat) != 0) {               /* Create ghost edge array if did not exist */
118     errorPrint ("bdgraphCheck: cannot compute ghost edge array");
119     cheklocval |= 16;
120   }
121 
122   if (memAllocGroup ((void **) (void *)
123                      &partgsttax, (size_t) (grafdat.vertgstnbr    * sizeof (GraphPart)),
124                      &flagloctax, (size_t) (grafptr->s.vertlocnbr * sizeof (int)), NULL) == NULL) {
125     errorPrint ("bdgraphCheck: out of memory");
126     cheklocval |= 32;
127   }
128   else {
129     memSet (flagloctax, ~0, grafptr->s.vertlocnbr * sizeof (int));
130     flagloctax -= grafptr->s.baseval;
131 
132     for (fronlocnum = 0; fronlocnum < grafptr->fronlocnbr; fronlocnum ++) {
133       Gnum                vertlocnum;
134 
135       vertlocnum = grafptr->fronloctab[fronlocnum];
136       if ((vertlocnum < grafptr->s.baseval) || (vertlocnum >= grafptr->s.vertlocnnd)) {
137         errorPrint ("bdgraphCheck: invalid vertex index in frontier array");
138         cheklocval |= 64;
139         break;
140       }
141       if (flagloctax[vertlocnum] != ~0) {
142         errorPrint ("bdgraphCheck: duplicate vertex in frontier array");
143         cheklocval |= 128;
144         break;
145       }
146       flagloctax[vertlocnum] = 0;
147     }
148   }
149 
150   reduloctab[0]   =   grafptr->commglbload;
151   reduloctab[1]   = - grafptr->commglbload;
152   reduloctab[2]   =   grafptr->compglbload0;
153   reduloctab[3]   = - grafptr->compglbload0;
154   reduloctab[4]   =   grafptr->s.veloglbsum - grafptr->compglbload0;
155   reduloctab[5]   = - grafptr->s.veloglbsum + grafptr->compglbload0;
156   reduloctab[6]   =   grafptr->compglbsize0;
157   reduloctab[7]   = - grafptr->compglbsize0;
158   reduloctab[8]   =   grafptr->s.vertglbnbr - grafptr->compglbsize0;
159   reduloctab[9]   = - grafptr->s.vertglbnbr + grafptr->compglbsize0;
160   reduloctab[10]  =   grafptr->commglbgainextn;
161   reduloctab[11]  = - grafptr->commglbgainextn;
162   reduloctab[12]  =   grafptr->commglbgainextn0;
163   reduloctab[13]  = - grafptr->commglbgainextn0;
164   reduloctab[14]  =   grafptr->commglbloadextn0;
165   reduloctab[15]  = - grafptr->commglbloadextn0;
166   reduloctab[16]  =   grafptr->fronglbnbr;
167   reduloctab[17]  = - grafptr->fronglbnbr;
168   reduloctab[18]  =   grafptr->levlnum;
169   reduloctab[19]  = - grafptr->levlnum;
170   reduloctab[20]  =   cheklocval;
171 
172   if (MPI_Allreduce (reduloctab, reduglbtab, 21, GNUM_MPI, MPI_MAX, proccomm) != MPI_SUCCESS) {
173     errorPrint ("bdgraphCheck: communication error (2)");
174     return     (1);
175   }
176 
177   if (reduglbtab[20] != 0) {                      /* Exit and Return information of previous errors */
178     if (partgsttax != NULL)
179       memFree (partgsttax);                       /* Free yet unbased group leader */
180     return ((int) reduglbtab[20]);
181   }
182 
183   if ((reduglbtab[1]  != - reduglbtab[0])  ||
184       (reduglbtab[3]  != - reduglbtab[2])  ||
185       (reduglbtab[5]  != - reduglbtab[4])  ||
186       (reduglbtab[7]  != - reduglbtab[6])  ||
187       (reduglbtab[9]  != - reduglbtab[8])  ||
188       (reduglbtab[11] != - reduglbtab[10]) ||
189       (reduglbtab[13] != - reduglbtab[12]) ||
190       (reduglbtab[15] != - reduglbtab[14]) ||
191       (reduglbtab[17] != - reduglbtab[16]) ||
192       (reduglbtab[19] != - reduglbtab[18])) {
193     errorPrint ("bdgraphCheck: inconsistent global graph data");
194     return     (1);
195   }
196 
197   if (grafptr->partgsttax != NULL)
198     memCpy (partgsttax, grafptr->partgsttax + grafptr->s.baseval, grafptr->s.vertlocnbr * sizeof (GraphPart)); /* Copy local part data */
199   else
200     memSet (partgsttax, 0, grafptr->s.vertlocnbr * sizeof (GraphPart));
201   dgraphHaloSync (&grafdat, partgsttax, GRAPHPART_MPI); /* Spread yet unbased halo part data across neighboring processes */
202   partgsttax -= grafptr->s.baseval;
203   cheklocval  = 0;
204 
205   for (fronlocnum = 0; fronlocnum < grafptr->fronlocnbr; fronlocnum ++) {
206     Gnum                vertlocnum;
207     Gnum                edgelocnum;
208     GraphPart           commcut;
209     GraphPart           partval;
210 
211     vertlocnum = grafptr->fronloctab[fronlocnum];
212     partval    = partgsttax[vertlocnum];
213 
214     for (edgelocnum = grafptr->s.vertloctax[vertlocnum], commcut = 0;
215          edgelocnum < grafptr->s.vendloctax[vertlocnum]; edgelocnum ++) {
216       GraphPart           partdlt;
217 
218       partdlt  = partgsttax[grafdat.edgegsttax[edgelocnum]] ^ partval;
219       commcut |= partdlt;
220     }
221     if (commcut == 0) {
222       errorPrint ("bdgraphCheck: invalid vertex in frontier array");
223       cheklocval |= 1;
224       break;
225     }
226   }
227 
228   complocload[0]   =
229   complocload[1]   = 0;
230   complocsize[0]   =
231   complocsize[1]   = 0;
232   commlocloadintn  = 0;
233   commlocloadextn  = 0;
234   commlocgainextn  = 0;
235   edlolocval       = 1;                            /* Assume edges are not weighted */
236   for (vertlocnum = grafptr->s.baseval; vertlocnum < grafptr->s.vertlocnnd; vertlocnum ++) {
237     Gnum                partval;                  /* Part of current vertex */
238     Gnum                edgelocnum;               /* Number of current edge */
239 
240     partval = (Gnum) partgsttax[vertlocnum];
241     if (grafptr->veexloctax != NULL) {
242       Gnum                veexval;
243 
244       veexval = grafptr->veexloctax[vertlocnum];
245       commlocloadextn += veexval * partval;
246       commlocgainextn += veexval * (1 - 2 * partval);
247     }
248 
249     complocload[partval] += (grafptr->s.veloloctax == NULL) ? 1 : grafptr->s.veloloctax[vertlocnum];
250     complocsize[partval] ++;
251 
252     commcut[0] =
253     commcut[1] = 0;
254     for (edgelocnum = grafptr->s.vertloctax[vertlocnum]; edgelocnum < grafptr->s.vendloctax[vertlocnum]; edgelocnum ++) {
255       int                 partend;
256       int                 partdlt;
257 
258       if (grafptr->s.edloloctax != NULL)
259         edlolocval = grafptr->s.edloloctax[edgelocnum];
260       partend = partgsttax[grafdat.edgegsttax[edgelocnum]];
261       partdlt = partval ^ partend;
262       commcut[partend] ++;
263       commlocloadintn += partdlt * edlolocval;    /* Internal load is accounted for twice */
264     }
265 
266     if ((commcut[0] != 0) && (commcut[1] != 0) && /* If vertex should be in separator */
267         (flagloctax[vertlocnum] != 0)) {
268       errorPrint ("bdgraphCheck: vertex should be in separator");
269       cheklocval |= 2;
270     }
271   }
272 
273   if (grafptr->s.edgegsttax != grafdat.edgegsttax) /* If ghost edge array was allocated here, free it manually */
274     memFree (grafdat.edgegsttax + grafptr->s.baseval);
275   if (grafptr->s.procsidtab != grafdat.procsidtab) /* The same for procsidtab */
276     memFree (grafdat.procsidtab);
277   memFree (partgsttax + grafptr->s.baseval);      /* Free group leader */
278 
279   if ((cheklocval == 0) &&
280       ((complocsize[0] != grafptr->complocsize0) ||
281        (complocsize[1] != (grafptr->s.vertlocnbr - grafptr->complocsize0)))) {
282     errorPrint ("bdgraphCheck: invalid local part size");
283     cheklocval |= 4;
284   }
285 
286   reduloctab[0] = complocload[0];
287   reduloctab[1] = complocsize[0];
288   reduloctab[2] = commlocloadintn;                /* Twice the internal load; sum globally before dividing by two */
289   reduloctab[3] = commlocloadextn;
290   reduloctab[4] = commlocgainextn;
291   reduloctab[5] = cheklocval;
292 
293   if (MPI_Allreduce (reduloctab, reduglbtab, 6, GNUM_MPI, MPI_SUM, proccomm) != MPI_SUCCESS) {
294     errorPrint ("bdgraphCheck: communication error (3)");
295     return     (1);
296   }
297 
298   if (reduglbtab[5] != 0)                         /* Return from previous errors */
299     return (1);
300 
301   if (grafptr->compglbload0 != reduglbtab[0]) {
302     errorPrint ("bdgraphCheck: invalid global part loads");
303     cheklocval |= 8;
304   }
305 
306   if (grafptr->compglbsize0 != reduglbtab[1]) {
307     errorPrint ("bdgraphCheck: invalid global part sizes");
308     cheklocval |= 16;
309   }
310 
311   if (grafptr->commglbload != ((reduglbtab[2] / 2) * grafptr->domndist + reduglbtab[3] + grafptr->commglbloadextn0)) {
312     errorPrint ("bdgraphCheck: invalid global communication loads");
313     cheklocval |= 32;
314   }
315 
316   if (grafptr->commglbgainextn != reduglbtab[4]) {
317     errorPrint ("bdgraphCheck: invalid global communication gains");
318     cheklocval |= 64;
319   }
320 
321   if (MPI_Allreduce (&cheklocval, &chekglbval, 1, MPI_INT, MPI_MAX, proccomm) != MPI_SUCCESS) {
322     errorPrint ("bdgraphCheck: communication error (4)");
323     return     (1);
324   }
325 
326   return (chekglbval);
327 }
328