1 
2 /*************************************xxt.c************************************
3 Module Name: xxt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xxt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xxt_solver_info {
26   PetscInt    n, m, n_global, m_global;
27   PetscInt    nnz, max_nnz, msg_buf_sz;
28   PetscInt    *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt    *col_sz, *col_indices;
30   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
31   PetscInt    nsolves;
32   PetscScalar tot_solve_time;
33 } xxt_info;
34 
35 typedef struct matvec_info {
36   PetscInt     n, m, n_global, m_global;
37   PetscInt     *local2global;
38   PCTFS_gs_ADT PCTFS_gs_handle;
39   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
40   void *grid_data;
41 } mv_info;
42 
43 struct xxt_CDT {
44   PetscInt id;
45   PetscInt ns;
46   PetscInt level;
47   xxt_info *info;
48   mv_info  *mvi;
49 };
50 
51 static PetscInt n_xxt        =0;
52 static PetscInt n_xxt_handles=0;
53 
54 /* prototypes */
55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
56 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
57 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
60 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
61 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);
62 
63 /**************************************xxt.c***********************************/
XXT_new(void)64 xxt_ADT XXT_new(void)
65 {
66   xxt_ADT xxt_handle;
67 
68   /* rolling count on n_xxt ... pot. problem here */
69   n_xxt_handles++;
70   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
71   xxt_handle->id   = ++n_xxt;
72   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;
73 
74   return(xxt_handle);
75 }
76 
77 /**************************************xxt.c***********************************/
XXT_factor(xxt_ADT xxt_handle,PetscInt * local2global,PetscInt n,PetscInt m,PetscErrorCode (* matvec)(void *,PetscScalar *,PetscScalar *),void * grid_data)78 PetscErrorCode XXT_factor(xxt_ADT xxt_handle,     /* prev. allocated xxt  handle */
79                     PetscInt *local2global, /* global column mapping       */
80                     PetscInt n,             /* local num rows              */
81                     PetscInt m,             /* local num cols              */
82                     PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc         */
83                     void *grid_data)        /* grid data for matvec        */
84 {
85   PCTFS_comm_init();
86   check_handle(xxt_handle);
87 
88   /* only 2^k for now and all nodes participating */
89   if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
90 
91   /* space for X info */
92   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));
93 
94   /* set up matvec handles */
95   xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);
96 
97   /* matrix is assumed to be of full rank */
98   /* LATER we can reset to indicate rank def. */
99   xxt_handle->ns=0;
100 
101   /* determine separators and generate firing order - NB xxt info set here */
102   det_separators(xxt_handle);
103 
104   return(do_xxt_factor(xxt_handle));
105 }
106 
107 /**************************************xxt.c***********************************/
XXT_solve(xxt_ADT xxt_handle,PetscScalar * x,PetscScalar * b)108 PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
109 {
110 
111   PCTFS_comm_init();
112   check_handle(xxt_handle);
113 
114   /* need to copy b into x? */
115   if (b) PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);
116   return do_xxt_solve(xxt_handle,x);
117 }
118 
119 /**************************************xxt.c***********************************/
XXT_free(xxt_ADT xxt_handle)120 PetscInt XXT_free(xxt_ADT xxt_handle)
121 {
122 
123   PCTFS_comm_init();
124   check_handle(xxt_handle);
125   n_xxt_handles--;
126 
127   free(xxt_handle->info->nsep);
128   free(xxt_handle->info->lnsep);
129   free(xxt_handle->info->fo);
130   free(xxt_handle->info->stages);
131   free(xxt_handle->info->solve_uu);
132   free(xxt_handle->info->solve_w);
133   free(xxt_handle->info->x);
134   free(xxt_handle->info->col_vals);
135   free(xxt_handle->info->col_sz);
136   free(xxt_handle->info->col_indices);
137   free(xxt_handle->info);
138   free(xxt_handle->mvi->local2global);
139   PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle);
140   free(xxt_handle->mvi);
141   free(xxt_handle);
142 
143   /* if the check fails we nuke */
144   /* if NULL pointer passed to free we nuke */
145   /* if the calls to free fail that's not my problem */
146   return(0);
147 }
148 
149 /**************************************xxt.c***********************************/
XXT_stats(xxt_ADT xxt_handle)150 PetscErrorCode XXT_stats(xxt_ADT xxt_handle)
151 {
152   PetscInt       op[]  = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
153   PetscInt       fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
154   PetscInt       vals[9],  work[9];
155   PetscScalar    fvals[3], fwork[3];
156   PetscErrorCode ierr;
157 
158   PCTFS_comm_init();
159   check_handle(xxt_handle);
160 
161   /* if factorization not done there are no stats */
162   if (!xxt_handle->info||!xxt_handle->mvi) {
163     if (!PCTFS_my_id) { ierr = PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");CHKERRQ(ierr); }
164     return 1;
165   }
166 
167   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
168   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
169   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
170   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
171 
172   fvals[0]=fvals[1]=fvals[2] =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
173   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
174 
175   if (!PCTFS_my_id) {
176     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",PCTFS_my_id,vals[0]);CHKERRQ(ierr);
177     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",PCTFS_my_id,vals[1]);CHKERRQ(ierr);
178     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);CHKERRQ(ierr);
179     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",PCTFS_my_id,vals[2]);CHKERRQ(ierr);
180     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.5)));CHKERRQ(ierr);
181     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.6667)));CHKERRQ(ierr);
182     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",PCTFS_my_id,vals[3]);CHKERRQ(ierr);
183     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",PCTFS_my_id,vals[4]);CHKERRQ(ierr);
184     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);CHKERRQ(ierr);
185     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",PCTFS_my_id,vals[5]);CHKERRQ(ierr);
186     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",PCTFS_my_id,vals[6]);CHKERRQ(ierr);
187     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",PCTFS_my_id,vals[7]);CHKERRQ(ierr);
188     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);CHKERRQ(ierr);
189     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",PCTFS_my_id,fvals[0]);CHKERRQ(ierr);
190     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",PCTFS_my_id,fvals[1]);CHKERRQ(ierr);
191     ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);CHKERRQ(ierr);
192   }
193 
194   return(0);
195 }
196 
197 /*************************************xxt.c************************************
198 
199 Description: get A_local, local portion of global coarse matrix which
200 is a row dist. nxm matrix w/ n<m.
201    o my_ml holds address of ML struct associated w/A_local and coarse grid
202    o local2global holds global number of column i (i=0,...,m-1)
203    o local2global holds global number of row    i (i=0,...,n-1)
204    o mylocmatvec performs A_local . vec_local (note that gs is performed using
205    PCTFS_gs_init/gop).
206 
207 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
208 mylocmatvec (void :: void *data, double *in, double *out)
209 **************************************xxt.c***********************************/
do_xxt_factor(xxt_ADT xxt_handle)210 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
211 {
212   return xxt_generate(xxt_handle);
213 }
214 
215 /**************************************xxt.c***********************************/
xxt_generate(xxt_ADT xxt_handle)216 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
217 {
218   PetscInt       i,j,k,idex;
219   PetscInt       dim, col;
220   PetscScalar    *u, *uu, *v, *z, *w, alpha, alpha_w;
221   PetscInt       *segs;
222   PetscInt       op[] = {GL_ADD,0};
223   PetscInt       off, len;
224   PetscScalar    *x_ptr;
225   PetscInt       *iptr, flag;
226   PetscInt       start =0, end, work;
227   PetscInt       op2[] = {GL_MIN,0};
228   PCTFS_gs_ADT   PCTFS_gs_handle;
229   PetscInt       *nsep, *lnsep, *fo;
230   PetscInt       a_n            =xxt_handle->mvi->n;
231   PetscInt       a_m            =xxt_handle->mvi->m;
232   PetscInt       *a_local2global=xxt_handle->mvi->local2global;
233   PetscInt       level;
234   PetscInt       xxt_nnz=0, xxt_max_nnz=0;
235   PetscInt       n, m;
236   PetscInt       *col_sz, *col_indices, *stages;
237   PetscScalar    **col_vals, *x;
238   PetscInt       n_global;
239   PetscInt       xxt_zero_nnz  =0;
240   PetscInt       xxt_zero_nnz_0=0;
241   PetscBLASInt   i1            = 1,dlen;
242   PetscScalar    dm1           = -1.0;
243   PetscErrorCode ierr;
244 
245   n               = xxt_handle->mvi->n;
246   nsep            = xxt_handle->info->nsep;
247   lnsep           = xxt_handle->info->lnsep;
248   fo              = xxt_handle->info->fo;
249   end             = lnsep[0];
250   level           = xxt_handle->level;
251   PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
252 
253   /* is there a null space? */
254   /* LATER add in ability to detect null space by checking alpha */
255   for (i=0, j=0; i<=level; i++) j+=nsep[i];
256 
257   m = j-xxt_handle->ns;
258   if (m!=j) {
259     ierr = PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);CHKERRQ(ierr);
260   }
261 
262   /* get and initialize storage for x local         */
263   /* note that x local is nxm and stored by columns */
264   col_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
265   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
266   col_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
267   for (i=j=0; i<m; i++, j+=2) {
268     col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
269     col_vals[i]   = NULL;
270   }
271   col_indices[j]=-1;
272 
273   /* size of separators for each sub-hc working from bottom of tree to top */
274   /* this looks like nsep[]=segments */
275   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
276   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
277   PCTFS_ivec_zero(stages,level+1);
278   PCTFS_ivec_copy(segs,nsep,level+1);
279   for (i=0; i<level; i++) segs[i+1] += segs[i];
280   stages[0] = segs[0];
281 
282   /* temporary vectors  */
283   u  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
284   z  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
285   v  = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
286   uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
287   w  = (PetscScalar*) malloc(m*sizeof(PetscScalar));
288 
289   /* extra nnz due to replication of vertices across separators */
290   for (i=1, j=0; i<=level; i++) j+=nsep[i];
291 
292   /* storage for sparse x values */
293   n_global    = xxt_handle->info->n_global;
294   xxt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
295   x           = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
296   xxt_nnz     = 0;
297 
298   /* LATER - can embed next sep to fire in gs */
299   /* time to make the donuts - generate X factor */
300   for (dim=i=j=0; i<m; i++) {
301     /* time to move to the next level? */
302     while (i==segs[dim]) {
303       if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
304       stages[dim++]=i;
305       end         +=lnsep[dim];
306     }
307     stages[dim]=i;
308 
309     /* which column are we firing? */
310     /* i.e. set v_l */
311     /* use new seps and do global min across hc to determine which one to fire */
312     (start<end) ? (col=fo[start]) : (col=INT_MAX);
313     PCTFS_giop_hc(&col,&work,1,op2,dim);
314 
315     /* shouldn't need this */
316     if (col==INT_MAX) {
317       ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
318       continue;
319     }
320 
321     /* do I own it? I should */
322     PCTFS_rvec_zero(v,a_m);
323     if (col==fo[start]) {
324       start++;
325       idex=PCTFS_ivec_linear_search(col, a_local2global, a_n);
326       if (idex!=-1) {
327         v[idex] = 1.0; j++;
328       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
329     } else {
330       idex=PCTFS_ivec_linear_search(col, a_local2global, a_m);
331       if (idex!=-1) v[idex] = 1.0;
332     }
333 
334     /* perform u = A.v_l */
335     PCTFS_rvec_zero(u,n);
336     do_matvec(xxt_handle->mvi,v,u);
337 
338     /* uu =  X^T.u_l (local portion) */
339     /* technically only need to zero out first i entries */
340     /* later turn this into an XXT_solve call ? */
341     PCTFS_rvec_zero(uu,m);
342     x_ptr=x;
343     iptr = col_indices;
344     for (k=0; k<i; k++) {
345       off   = *iptr++;
346       len   = *iptr++;
347       ierr  = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
348       PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1));
349       x_ptr+=len;
350     }
351 
352 
353     /* uu = X^T.u_l (comm portion) */
354     ierr = PCTFS_ssgl_radd  (uu, w, dim, stages);CHKERRQ(ierr);
355 
356     /* z = X.uu */
357     PCTFS_rvec_zero(z,n);
358     x_ptr=x;
359     iptr = col_indices;
360     for (k=0; k<i; k++) {
361       off  = *iptr++;
362       len  = *iptr++;
363       ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
364       PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
365       x_ptr+=len;
366     }
367 
368     /* compute v_l = v_l - z */
369     PCTFS_rvec_zero(v+a_n,a_m-a_n);
370     ierr = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
371     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));
372 
373     /* compute u_l = A.v_l */
374     if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
375     PCTFS_rvec_zero(u,n);
376     do_matvec(xxt_handle->mvi,v,u);
377 
378     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
379     ierr  = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
380     PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,v,&i1));
381     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
382     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
383 
384     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
385 
386     /* check for small alpha                             */
387     /* LATER use this to detect and determine null space */
388     if (PetscAbsScalar(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
389 
390     /* compute v_l = v_l/sqrt(alpha) */
391     PCTFS_rvec_scale(v,1.0/alpha,n);
392 
393     /* add newly generated column, v_l, to X */
394     flag = 1;
395     off=len=0;
396     for (k=0; k<n; k++) {
397       if (v[k]!=0.0) {
398         len=k;
399         if (flag) { off=k; flag=0; }
400       }
401     }
402 
403     len -= (off-1);
404 
405     if (len>0) {
406       if ((xxt_nnz+len)>xxt_max_nnz) {
407         ierr         = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
408         xxt_max_nnz *= 2;
409         x_ptr        = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
410         PCTFS_rvec_copy(x_ptr,x,xxt_nnz);
411         free(x);
412         x     = x_ptr;
413         x_ptr+=xxt_nnz;
414       }
415       xxt_nnz += len;
416       PCTFS_rvec_copy(x_ptr,v+off,len);
417 
418       /* keep track of number of zeros */
419       if (dim) {
420         for (k=0; k<len; k++) {
421           if (x_ptr[k]==0.0) xxt_zero_nnz++;
422         }
423       } else {
424         for (k=0; k<len; k++) {
425           if (x_ptr[k]==0.0) xxt_zero_nnz_0++;
426         }
427       }
428       col_indices[2*i] = off;
429       col_sz[i] = col_indices[2*i+1] = len;
430       col_vals[i] = x_ptr;
431     }
432     else {
433       col_indices[2*i] = 0;
434       col_sz[i]        = col_indices[2*i+1] = 0;
435       col_vals[i]      = x_ptr;
436     }
437   }
438 
439   /* close off stages for execution phase */
440   while (dim!=level) {
441     stages[dim++] = i;
442     ierr          = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
443   }
444   stages[dim]=i;
445 
446   xxt_handle->info->n              = xxt_handle->mvi->n;
447   xxt_handle->info->m              = m;
448   xxt_handle->info->nnz            = xxt_nnz;
449   xxt_handle->info->max_nnz        = xxt_max_nnz;
450   xxt_handle->info->msg_buf_sz     = stages[level]-stages[0];
451   xxt_handle->info->solve_uu       = (PetscScalar*) malloc(m*sizeof(PetscScalar));
452   xxt_handle->info->solve_w        = (PetscScalar*) malloc(m*sizeof(PetscScalar));
453   xxt_handle->info->x              = x;
454   xxt_handle->info->col_vals       = col_vals;
455   xxt_handle->info->col_sz         = col_sz;
456   xxt_handle->info->col_indices    = col_indices;
457   xxt_handle->info->stages         = stages;
458   xxt_handle->info->nsolves        = 0;
459   xxt_handle->info->tot_solve_time = 0.0;
460 
461   free(segs);
462   free(u);
463   free(v);
464   free(uu);
465   free(z);
466   free(w);
467 
468   return(0);
469 }
470 
471 /**************************************xxt.c***********************************/
do_xxt_solve(xxt_ADT xxt_handle,PetscScalar * uc)472 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
473 {
474   PetscInt       off, len, *iptr;
475   PetscInt       level        = xxt_handle->level;
476   PetscInt       n            = xxt_handle->info->n;
477   PetscInt       m            = xxt_handle->info->m;
478   PetscInt       *stages      = xxt_handle->info->stages;
479   PetscInt       *col_indices = xxt_handle->info->col_indices;
480   PetscScalar    *x_ptr, *uu_ptr;
481   PetscScalar    *solve_uu = xxt_handle->info->solve_uu;
482   PetscScalar    *solve_w  = xxt_handle->info->solve_w;
483   PetscScalar    *x        = xxt_handle->info->x;
484   PetscBLASInt   i1        = 1,dlen;
485   PetscErrorCode ierr;
486 
487   PetscFunctionBegin;
488   uu_ptr=solve_uu;
489   PCTFS_rvec_zero(uu_ptr,m);
490 
491   /* x  = X.Y^T.b */
492   /* uu = Y^T.b */
493   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
494     off       =*iptr++;
495     len       =*iptr++;
496     ierr      = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
497     PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1));
498   }
499 
500   /* comunication of beta */
501   uu_ptr=solve_uu;
502   if (level) {ierr = PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);CHKERRQ(ierr);}
503 
504   PCTFS_rvec_zero(uc,n);
505 
506   /* x = X.uu */
507   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
508     off  =*iptr++;
509     len  =*iptr++;
510     ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
511     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
512   }
513   PetscFunctionReturn(0);
514 }
515 
516 /**************************************xxt.c***********************************/
check_handle(xxt_ADT xxt_handle)517 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
518 {
519   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
520 
521   PetscFunctionBegin;
522   if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);
523 
524   vals[0]=vals[1]=xxt_handle->id;
525   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
526   if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);
527   PetscFunctionReturn(0);
528 }
529 
530 /**************************************xxt.c***********************************/
det_separators(xxt_ADT xxt_handle)531 static PetscErrorCode det_separators(xxt_ADT xxt_handle)
532 {
533   PetscInt     i, ct, id;
534   PetscInt     mask, edge, *iptr;
535   PetscInt     *dir, *used;
536   PetscInt     sum[4], w[4];
537   PetscScalar  rsum[4], rw[4];
538   PetscInt     op[] = {GL_ADD,0};
539   PetscScalar  *lhs, *rhs;
540   PetscInt     *nsep, *lnsep, *fo, nfo=0;
541   PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
542   PetscInt     *local2global   = xxt_handle->mvi->local2global;
543   PetscInt     n               = xxt_handle->mvi->n;
544   PetscInt     m               = xxt_handle->mvi->m;
545   PetscInt     level           = xxt_handle->level;
546   PetscInt     shared          = 0;
547 
548   PetscFunctionBegin;
549   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
550   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
551   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
552   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
553   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
554 
555   PCTFS_ivec_zero(dir,level+1);
556   PCTFS_ivec_zero(nsep,level+1);
557   PCTFS_ivec_zero(lnsep,level+1);
558   PCTFS_ivec_set (fo,-1,n+1);
559   PCTFS_ivec_zero(used,n);
560 
561   lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
562   rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
563 
564   /* determine the # of unique dof */
565   PCTFS_rvec_zero(lhs,m);
566   PCTFS_rvec_set(lhs,1.0,n);
567   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
568   PCTFS_rvec_zero(rsum,2);
569   for (i=0; i<n; i++) {
570     if (lhs[i]!=0.0) {
571       rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];
572     }
573   }
574   PCTFS_grop_hc(rsum,rw,2,op,level);
575   rsum[0]+=0.1;
576   rsum[1]+=0.1;
577 
578   if (PetscAbsScalar(rsum[0]-rsum[1])>EPS) shared=1;
579 
580   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
581   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];
582 
583   /* determine separator sets top down */
584   if (shared) {
585     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
586 
587       /* set rsh of hc, fire, and collect lhs responses */
588       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
589       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
590 
591       /* set lsh of hc, fire, and collect rhs responses */
592       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
593       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
594 
595       for (i=0;i<n;i++) {
596         if (id< mask) {
597           if (lhs[i]!=0.0) lhs[i]=1.0;
598         }
599         if (id>=mask) {
600           if (rhs[i]!=0.0) rhs[i]=1.0;
601         }
602       }
603 
604       if (id< mask) PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1);
605       else          PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1);
606 
607       /* count number of dofs I own that have signal and not in sep set */
608       PCTFS_rvec_zero(rsum,4);
609       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
610         if (!used[i]) {
611           /* number of unmarked dofs on node */
612           ct++;
613           /* number of dofs to be marked on lhs hc */
614           if (id< mask) {
615             if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; }
616           }
617           /* number of dofs to be marked on rhs hc */
618           if (id>=mask) {
619             if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; }
620           }
621         }
622       }
623 
624       /* go for load balance - choose half with most unmarked dofs, bias LHS */
625       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
626       (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
627       PCTFS_giop_hc(sum,w,4,op,edge);
628       PCTFS_grop_hc(rsum,rw,4,op,edge);
629       rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
630 
631       if (id<mask) {
632         /* mark dofs I own that have signal and not in sep set */
633         for (ct=i=0;i<n;i++) {
634           if ((!used[i])&&(lhs[i]!=0.0)) {
635             ct++; nfo++;
636 
637             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
638 
639             *--iptr = local2global[i];
640             used[i] = edge;
641           }
642         }
643         if (ct>1) PCTFS_ivec_sort(iptr,ct);
644 
645         lnsep[edge]=ct;
646         nsep[edge]=(PetscInt) rsum[0];
647         dir [edge]=LEFT;
648       }
649 
650       if (id>=mask) {
651         /* mark dofs I own that have signal and not in sep set */
652         for (ct=i=0;i<n;i++) {
653           if ((!used[i])&&(rhs[i]!=0.0)) {
654             ct++; nfo++;
655 
656             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
657 
658             *--iptr = local2global[i];
659             used[i] = edge;
660           }
661         }
662         if (ct>1) PCTFS_ivec_sort(iptr,ct);
663 
664         lnsep[edge] = ct;
665         nsep[edge]  = (PetscInt) rsum[1];
666         dir [edge]  = RIGHT;
667       }
668 
669       /* LATER or we can recur on these to order seps at this level */
670       /* do we need full set of separators for this?                */
671 
672       /* fold rhs hc into lower */
673       if (id>=mask) id-=mask;
674     }
675   } else {
676     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
677       /* set rsh of hc, fire, and collect lhs responses */
678       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
679       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
680 
681       /* set lsh of hc, fire, and collect rhs responses */
682       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
683       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
684 
685       /* count number of dofs I own that have signal and not in sep set */
686       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
687         if (!used[i]) {
688           /* number of unmarked dofs on node */
689           ct++;
690           /* number of dofs to be marked on lhs hc */
691           if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
692           /* number of dofs to be marked on rhs hc */
693           if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
694         }
695       }
696 
697       /* go for load balance - choose half with most unmarked dofs, bias LHS */
698       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
699       PCTFS_giop_hc(sum,w,4,op,edge);
700 
701       /* lhs hc wins */
702       if (sum[2]>=sum[3]) {
703         if (id<mask) {
704           /* mark dofs I own that have signal and not in sep set */
705           for (ct=i=0;i<n;i++) {
706             if ((!used[i])&&(lhs[i]!=0.0)) {
707               ct++; nfo++;
708               *--iptr = local2global[i];
709               used[i]=edge;
710             }
711           }
712           if (ct>1) PCTFS_ivec_sort(iptr,ct);
713           lnsep[edge]=ct;
714         }
715         nsep[edge]=sum[0];
716         dir [edge]=LEFT;
717       } else { /* rhs hc wins */
718         if (id>=mask) {
719           /* mark dofs I own that have signal and not in sep set */
720           for (ct=i=0;i<n;i++) {
721             if ((!used[i])&&(rhs[i]!=0.0)) {
722               ct++; nfo++;
723               *--iptr = local2global[i];
724               used[i]=edge;
725             }
726           }
727           if (ct>1) PCTFS_ivec_sort(iptr,ct);
728           lnsep[edge]=ct;
729         }
730         nsep[edge]=sum[1];
731         dir [edge]=RIGHT;
732       }
733       /* LATER or we can recur on these to order seps at this level */
734       /* do we need full set of separators for this?                */
735 
736       /* fold rhs hc into lower */
737       if (id>=mask) id-=mask;
738     }
739   }
740 
741 
742   /* level 0 is on processor case - so mark the remainder */
743   for (ct=i=0; i<n; i++) {
744     if (!used[i]) {
745       ct++; nfo++;
746       *--iptr = local2global[i];
747       used[i] = edge;
748     }
749   }
750   if (ct>1) PCTFS_ivec_sort(iptr,ct);
751   lnsep[edge]=ct;
752   nsep [edge]=ct;
753   dir  [edge]=LEFT;
754 
755   xxt_handle->info->nsep  = nsep;
756   xxt_handle->info->lnsep = lnsep;
757   xxt_handle->info->fo    = fo;
758   xxt_handle->info->nfo   = nfo;
759 
760   free(dir);
761   free(lhs);
762   free(rhs);
763   free(used);
764   PetscFunctionReturn(0);
765 }
766 
767 /**************************************xxt.c***********************************/
set_mvi(PetscInt * local2global,PetscInt n,PetscInt m,PetscErrorCode (* matvec)(mv_info *,PetscScalar *,PetscScalar *),void * grid_data)768 static mv_info *set_mvi(PetscInt *local2global,PetscInt n,PetscInt m,PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*),void *grid_data)
769 {
770   mv_info *mvi;
771 
772 
773   mvi               = (mv_info*)malloc(sizeof(mv_info));
774   mvi->n            = n;
775   mvi->m            = m;
776   mvi->n_global     = -1;
777   mvi->m_global     = -1;
778   mvi->local2global = (PetscInt*)malloc((m+1)*sizeof(PetscInt));
779   PCTFS_ivec_copy(mvi->local2global,local2global,m);
780   mvi->local2global[m] = INT_MAX;
781   mvi->matvec          = matvec;
782   mvi->grid_data       = grid_data;
783 
784   /* set xxt communication handle to perform restricted matvec */
785   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
786 
787   return(mvi);
788 }
789 
790 /**************************************xxt.c***********************************/
do_matvec(mv_info * A,PetscScalar * v,PetscScalar * u)791 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
792 {
793   PetscFunctionBegin;
794   A->matvec((mv_info*)A->grid_data,v,u);
795   PetscFunctionReturn(0);
796 }
797