1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 #include <petsc/private/hashmapiv.h>
13 #include <petsc/private/hashseti.h>
14 #include <petscsf.h>
15 
MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)16 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
17 {
18   PetscErrorCode    ierr;
19   PetscBool         iascii;
20   PetscViewerFormat format;
21   Mat_APMPI         *ptap;
22 
23   PetscFunctionBegin;
24   MatCheckProduct(A,1);
25   ptap = (Mat_APMPI*)A->product->data;
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
27   if (iascii) {
28     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
29     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
30       if (ptap->algType == 0) {
31         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
32       } else if (ptap->algType == 1) {
33         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
34       } else if (ptap->algType == 2) {
35         ierr = PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
36       } else if (ptap->algType == 3) {
37         ierr = PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
38       }
39     }
40   }
41   PetscFunctionReturn(0);
42 }
43 
MatDestroy_MPIAIJ_PtAP(void * data)44 PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *data)
45 {
46   PetscErrorCode      ierr;
47   Mat_APMPI           *ptap = (Mat_APMPI*)data;
48   Mat_Merge_SeqsToMPI *merge;
49 
50   PetscFunctionBegin;
51   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
52   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
53   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
54   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
55   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
56   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
57   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
58   if (ptap->AP_loc) { /* used by alg_rap */
59     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
60     ierr = PetscFree(ap->i);CHKERRQ(ierr);
61     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
62     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
63   } else { /* used by alg_ptap */
64     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
65     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
66   }
67   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
68   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
69   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
70 
71   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
72 
73   merge = ptap->merge;
74   if (merge) { /* used by alg_ptap */
75     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
76     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
77     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
78     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
79     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
80     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
81     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
82     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
83     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
84     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
85     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
86     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
87     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
88     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
89   }
90   ierr = ISLocalToGlobalMappingDestroy(&ptap->ltog);CHKERRQ(ierr);
91 
92   ierr = PetscSFDestroy(&ptap->sf);CHKERRQ(ierr);
93   ierr = PetscFree(ptap->c_othi);CHKERRQ(ierr);
94   ierr = PetscFree(ptap->c_rmti);CHKERRQ(ierr);
95   ierr = PetscFree(ptap);CHKERRQ(ierr);
96   PetscFunctionReturn(0);
97 }
98 
MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)99 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
100 {
101   PetscErrorCode    ierr;
102   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
103   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
104   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
105   Mat_APMPI         *ptap;
106   Mat               AP_loc,C_loc,C_oth;
107   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
108   PetscScalar       *apa;
109   const PetscInt    *cols;
110   const PetscScalar *vals;
111 
112   PetscFunctionBegin;
113   MatCheckProduct(C,3);
114   ptap = (Mat_APMPI*)C->product->data;
115   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
116   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");
117 
118   ierr = MatZeroEntries(C);CHKERRQ(ierr);
119 
120   /* 1) get R = Pd^T,Ro = Po^T */
121   if (ptap->reuse == MAT_REUSE_MATRIX) {
122     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
123     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
124   }
125 
126   /* 2) get AP_loc */
127   AP_loc = ptap->AP_loc;
128   ap = (Mat_SeqAIJ*)AP_loc->data;
129 
130   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
131   /*-----------------------------------------------------*/
132   if (ptap->reuse == MAT_REUSE_MATRIX) {
133     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
134     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
135     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
136   }
137 
138   /* 2-2) compute numeric A_loc*P - dominating part */
139   /* ---------------------------------------------- */
140   /* get data from symbolic products */
141   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
142   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
143 
144   api   = ap->i;
145   apj   = ap->j;
146   ierr = ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);CHKERRQ(ierr);
147   for (i=0; i<am; i++) {
148     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
149     apnz = api[i+1] - api[i];
150     apa = ap->a + api[i];
151     ierr = PetscArrayzero(apa,apnz);CHKERRQ(ierr);
152     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
153   }
154   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);CHKERRQ(ierr);
155   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);
156 
157   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
158   /* Always use scalable version since we are in the MPI scalable version */
159   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
160   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
161 
162   C_loc = ptap->C_loc;
163   C_oth = ptap->C_oth;
164 
165   /* add C_loc and Co to to C */
166   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
167 
168   /* C_loc -> C */
169   cm    = C_loc->rmap->N;
170   c_seq = (Mat_SeqAIJ*)C_loc->data;
171   cols = c_seq->j;
172   vals = c_seq->a;
173   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
174 
175   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
176   /* when there are no off-processor parts.  */
177   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
178   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
179   /* a table, and other, more complex stuff has to be done. */
180   if (C->assembled) {
181     C->was_assembled = PETSC_TRUE;
182     C->assembled     = PETSC_FALSE;
183   }
184   if (C->was_assembled) {
185     for (i=0; i<cm; i++) {
186       ncols = c_seq->i[i+1] - c_seq->i[i];
187       row = rstart + i;
188       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
189       cols += ncols; vals += ncols;
190     }
191   } else {
192     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
193   }
194   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
195   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
196 
197   /* Co -> C, off-processor part */
198   cm = C_oth->rmap->N;
199   c_seq = (Mat_SeqAIJ*)C_oth->data;
200   cols = c_seq->j;
201   vals = c_seq->a;
202   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
203   for (i=0; i<cm; i++) {
204     ncols = c_seq->i[i+1] - c_seq->i[i];
205     row = p->garray[i];
206     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
207     cols += ncols; vals += ncols;
208   }
209   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
210   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
211 
212   ptap->reuse = MAT_REUSE_MATRIX;
213 
214   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
215   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
216   PetscFunctionReturn(0);
217 }
218 
MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat Cmpi)219 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat Cmpi)
220 {
221   PetscErrorCode      ierr;
222   Mat_APMPI           *ptap;
223   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
224   MPI_Comm            comm;
225   PetscMPIInt         size,rank;
226   Mat                 P_loc,P_oth;
227   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
228   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
229   PetscInt            *lnk,i,k,pnz,row,nsend;
230   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
231   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
232   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
233   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
234   MPI_Request         *swaits,*rwaits;
235   MPI_Status          *sstatus,rstatus;
236   PetscLayout         rowmap;
237   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
238   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
239   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
240   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
241   PetscScalar         *apv;
242   PetscTable          ta;
243   MatType             mtype;
244   const char          *prefix;
245 #if defined(PETSC_USE_INFO)
246   PetscReal           apfill;
247 #endif
248 
249   PetscFunctionBegin;
250   MatCheckProduct(Cmpi,4);
251   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
252   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
253   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
254   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
255 
256   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
257 
258   /* create symbolic parallel matrix Cmpi */
259   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
260   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
261 
262   /* create struct Mat_APMPI and attached it to C later */
263   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
264   ptap->reuse = MAT_INITIAL_MATRIX;
265   ptap->algType = 0;
266 
267   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
268   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
269   /* get P_loc by taking all local rows of P */
270   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
271 
272   ptap->P_loc = P_loc;
273   ptap->P_oth = P_oth;
274 
275   /* (0) compute Rd = Pd^T, Ro = Po^T  */
276   /* --------------------------------- */
277   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
278   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
279 
280   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
281   /* ----------------------------------------------------------------- */
282   p_loc  = (Mat_SeqAIJ*)P_loc->data;
283   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
284 
285   /* create and initialize a linked list */
286   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
287   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
288   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
289   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
290 
291   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
292 
293   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
294   if (ao) {
295     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
296   } else {
297     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
298   }
299   current_space = free_space;
300   nspacedouble  = 0;
301 
302   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
303   api[0] = 0;
304   for (i=0; i<am; i++) {
305     /* diagonal portion: Ad[i,:]*P */
306     ai = ad->i; pi = p_loc->i;
307     nzi = ai[i+1] - ai[i];
308     aj  = ad->j + ai[i];
309     for (j=0; j<nzi; j++) {
310       row  = aj[j];
311       pnz  = pi[row+1] - pi[row];
312       Jptr = p_loc->j + pi[row];
313       /* add non-zero cols of P into the sorted linked list lnk */
314       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
315     }
316     /* off-diagonal portion: Ao[i,:]*P */
317     if (ao) {
318       ai = ao->i; pi = p_oth->i;
319       nzi = ai[i+1] - ai[i];
320       aj  = ao->j + ai[i];
321       for (j=0; j<nzi; j++) {
322         row  = aj[j];
323         pnz  = pi[row+1] - pi[row];
324         Jptr = p_oth->j + pi[row];
325         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
326       }
327     }
328     apnz     = lnk[0];
329     api[i+1] = api[i] + apnz;
330 
331     /* if free space is not available, double the total space in the list */
332     if (current_space->local_remaining<apnz) {
333       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
334       nspacedouble++;
335     }
336 
337     /* Copy data into free space, then initialize lnk */
338     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
339 
340     current_space->array           += apnz;
341     current_space->local_used      += apnz;
342     current_space->local_remaining -= apnz;
343   }
344   /* Allocate space for apj and apv, initialize apj, and */
345   /* destroy list of free space and other temporary array(s) */
346   ierr = PetscCalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
347   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
348   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
349 
350   /* Create AP_loc for reuse */
351   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
352   ierr = MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);CHKERRQ(ierr);
353 
354 #if defined(PETSC_USE_INFO)
355   if (ao) {
356     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
357   } else {
358     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
359   }
360   ptap->AP_loc->info.mallocs           = nspacedouble;
361   ptap->AP_loc->info.fill_ratio_given  = fill;
362   ptap->AP_loc->info.fill_ratio_needed = apfill;
363 
364   if (api[am]) {
365     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
366     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
367   } else {
368     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
369   }
370 #endif
371 
372   /* (2-1) compute symbolic Co = Ro*AP_loc  */
373   /* -------------------------------------- */
374   ierr = MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);CHKERRQ(ierr);
375   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
376   ierr = MatSetOptionsPrefix(ptap->C_oth,prefix);CHKERRQ(ierr);
377   ierr = MatAppendOptionsPrefix(ptap->C_oth,"inner_offdiag_");CHKERRQ(ierr);
378 
379   ierr = MatProductSetType(ptap->C_oth,MATPRODUCT_AB);CHKERRQ(ierr);
380   ierr = MatProductSetAlgorithm(ptap->C_oth,"sorted");CHKERRQ(ierr);
381   ierr = MatProductSetFill(ptap->C_oth,fill);CHKERRQ(ierr);
382   ierr = MatProductSetFromOptions(ptap->C_oth);CHKERRQ(ierr);
383   ierr = MatProductSymbolic(ptap->C_oth);CHKERRQ(ierr);
384 
385   /* (3) send coj of C_oth to other processors  */
386   /* ------------------------------------------ */
387   /* determine row ownership */
388   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
389   rowmap->n  = pn;
390   rowmap->bs = 1;
391   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
392   owners = rowmap->range;
393 
394   /* determine the number of messages to send, their lengths */
395   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
396   ierr = PetscArrayzero(len_s,size);CHKERRQ(ierr);
397   ierr = PetscArrayzero(len_si,size);CHKERRQ(ierr);
398 
399   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
400   coi   = c_oth->i; coj = c_oth->j;
401   con   = ptap->C_oth->rmap->n;
402   proc  = 0;
403   ierr = ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);CHKERRQ(ierr);
404   for (i=0; i<con; i++) {
405     while (prmap[i] >= owners[proc+1]) proc++;
406     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
407     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
408   }
409 
410   len          = 0; /* max length of buf_si[], see (4) */
411   owners_co[0] = 0;
412   nsend        = 0;
413   for (proc=0; proc<size; proc++) {
414     owners_co[proc+1] = owners_co[proc] + len_si[proc];
415     if (len_s[proc]) {
416       nsend++;
417       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
418       len         += len_si[proc];
419     }
420   }
421 
422   /* determine the number and length of messages to receive for coi and coj  */
423   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
424   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
425 
426   /* post the Irecv and Isend of coj */
427   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
428   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
429   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
430   for (proc=0, k=0; proc<size; proc++) {
431     if (!len_s[proc]) continue;
432     i    = owners_co[proc];
433     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
434     k++;
435   }
436 
437   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
438   /* ---------------------------------------- */
439   ierr = MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);CHKERRQ(ierr);
440   ierr = MatProductSetType(ptap->C_loc,MATPRODUCT_AB);CHKERRQ(ierr);
441   ierr = MatProductSetAlgorithm(ptap->C_loc,"default");CHKERRQ(ierr);
442   ierr = MatProductSetFill(ptap->C_loc,fill);CHKERRQ(ierr);
443 
444   ierr = MatSetOptionsPrefix(ptap->C_loc,prefix);CHKERRQ(ierr);
445   ierr = MatAppendOptionsPrefix(ptap->C_loc,"inner_diag_");CHKERRQ(ierr);
446 
447   ierr = MatProductSetFromOptions(ptap->C_loc);CHKERRQ(ierr);
448   ierr = MatProductSymbolic(ptap->C_loc);CHKERRQ(ierr);
449 
450   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
451   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);CHKERRQ(ierr);
452 
453   /* receives coj are complete */
454   for (i=0; i<nrecv; i++) {
455     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
456   }
457   ierr = PetscFree(rwaits);CHKERRQ(ierr);
458   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
459 
460   /* add received column indices into ta to update Crmax */
461   for (k=0; k<nrecv; k++) {/* k-th received message */
462     Jptr = buf_rj[k];
463     for (j=0; j<len_r[k]; j++) {
464       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
465     }
466   }
467   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
468   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
469 
470   /* (4) send and recv coi */
471   /*-----------------------*/
472   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
473   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
474   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
475   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
476   for (proc=0,k=0; proc<size; proc++) {
477     if (!len_s[proc]) continue;
478     /* form outgoing message for i-structure:
479          buf_si[0]:                 nrows to be sent
480                [1:nrows]:           row index (global)
481                [nrows+1:2*nrows+1]: i-structure index
482     */
483     /*-------------------------------------------*/
484     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
485     buf_si_i    = buf_si + nrows+1;
486     buf_si[0]   = nrows;
487     buf_si_i[0] = 0;
488     nrows       = 0;
489     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
490       nzi = coi[i+1] - coi[i];
491       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
492       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
493       nrows++;
494     }
495     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
496     k++;
497     buf_si += len_si[proc];
498   }
499   for (i=0; i<nrecv; i++) {
500     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
501   }
502   ierr = PetscFree(rwaits);CHKERRQ(ierr);
503   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
504 
505   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
506   ierr = PetscFree(len_ri);CHKERRQ(ierr);
507   ierr = PetscFree(swaits);CHKERRQ(ierr);
508   ierr = PetscFree(buf_s);CHKERRQ(ierr);
509 
510   /* (5) compute the local portion of Cmpi      */
511   /* ------------------------------------------ */
512   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
513   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
514   current_space = free_space;
515 
516   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
517   for (k=0; k<nrecv; k++) {
518     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
519     nrows       = *buf_ri_k[k];
520     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
521     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
522   }
523 
524   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
525   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
526   for (i=0; i<pn; i++) {
527     /* add C_loc into Cmpi */
528     nzi  = c_loc->i[i+1] - c_loc->i[i];
529     Jptr = c_loc->j + c_loc->i[i];
530     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
531 
532     /* add received col data into lnk */
533     for (k=0; k<nrecv; k++) { /* k-th received message */
534       if (i == *nextrow[k]) { /* i-th row */
535         nzi  = *(nextci[k]+1) - *nextci[k];
536         Jptr = buf_rj[k] + *nextci[k];
537         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
538         nextrow[k]++; nextci[k]++;
539       }
540     }
541     nzi = lnk[0];
542 
543     /* copy data into free space, then initialize lnk */
544     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
545     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
546   }
547   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
548   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
549   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
550 
551   /* local sizes and preallocation */
552   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
553   if (P->cmap->bs > 0) {
554     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
555     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
556   }
557   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
558   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
559 
560   /* members in merge */
561   ierr = PetscFree(id_r);CHKERRQ(ierr);
562   ierr = PetscFree(len_r);CHKERRQ(ierr);
563   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
564   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
565   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
566   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
567   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
568 
569   nout = 0;
570   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);CHKERRQ(ierr);
571   if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
572   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);CHKERRQ(ierr);
573   if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);
574 
575   /* attach the supporting struct to Cmpi for reuse */
576   Cmpi->product->data    = ptap;
577   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
578   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
579 
580   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
581   Cmpi->assembled        = PETSC_FALSE;
582   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
583   PetscFunctionReturn(0);
584 }
585 
MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt * map,PetscInt dof,PetscInt i,PetscHSetI dht,PetscHSetI oht)586 PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHSetI dht,PetscHSetI oht)
587 {
588   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
589   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
590   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
591   PetscInt             pcstart,pcend,column,offset;
592   PetscErrorCode       ierr;
593 
594   PetscFunctionBegin;
595   pcstart = P->cmap->rstart;
596   pcstart *= dof;
597   pcend   = P->cmap->rend;
598   pcend   *= dof;
599   /* diagonal portion: Ad[i,:]*P */
600   ai = ad->i;
601   nzi = ai[i+1] - ai[i];
602   aj  = ad->j + ai[i];
603   for (j=0; j<nzi; j++) {
604     row  = aj[j];
605     offset = row%dof;
606     row   /= dof;
607     nzpi = pd->i[row+1] - pd->i[row];
608     pj  = pd->j + pd->i[row];
609     for (k=0; k<nzpi; k++) {
610       ierr = PetscHSetIAdd(dht,pj[k]*dof+offset+pcstart);CHKERRQ(ierr);
611     }
612   }
613   /* off diag P */
614   for (j=0; j<nzi; j++) {
615     row  = aj[j];
616     offset = row%dof;
617     row   /= dof;
618     nzpi = po->i[row+1] - po->i[row];
619     pj  = po->j + po->i[row];
620     for (k=0; k<nzpi; k++) {
621       ierr = PetscHSetIAdd(oht,p->garray[pj[k]]*dof+offset);CHKERRQ(ierr);
622     }
623   }
624 
625   /* off diagonal part: Ao[i, :]*P_oth */
626   if (ao) {
627     ai = ao->i;
628     pi = p_oth->i;
629     nzi = ai[i+1] - ai[i];
630     aj  = ao->j + ai[i];
631     for (j=0; j<nzi; j++) {
632       row  = aj[j];
633       offset = a->garray[row]%dof;
634       row  = map[row];
635       pnz  = pi[row+1] - pi[row];
636       p_othcols = p_oth->j + pi[row];
637       for (col=0; col<pnz; col++) {
638         column = p_othcols[col] * dof + offset;
639         if (column>=pcstart && column<pcend) {
640           ierr = PetscHSetIAdd(dht,column);CHKERRQ(ierr);
641         } else {
642           ierr = PetscHSetIAdd(oht,column);CHKERRQ(ierr);
643         }
644       }
645     }
646   } /* end if (ao) */
647   PetscFunctionReturn(0);
648 }
649 
MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt * map,PetscInt dof,PetscInt i,PetscHMapIV hmap)650 PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHMapIV hmap)
651 {
652   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
653   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
654   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi,offset;
655   PetscScalar          ra,*aa,*pa;
656   PetscErrorCode       ierr;
657 
658   PetscFunctionBegin;
659   pcstart = P->cmap->rstart;
660   pcstart *= dof;
661 
662   /* diagonal portion: Ad[i,:]*P */
663   ai  = ad->i;
664   nzi = ai[i+1] - ai[i];
665   aj  = ad->j + ai[i];
666   aa  = ad->a + ai[i];
667   for (j=0; j<nzi; j++) {
668     ra   = aa[j];
669     row  = aj[j];
670     offset = row%dof;
671     row    /= dof;
672     nzpi = pd->i[row+1] - pd->i[row];
673     pj = pd->j + pd->i[row];
674     pa = pd->a + pd->i[row];
675     for (k=0; k<nzpi; k++) {
676       ierr = PetscHMapIVAddValue(hmap,pj[k]*dof+offset+pcstart,ra*pa[k]);CHKERRQ(ierr);
677     }
678     ierr = PetscLogFlops(2.0*nzpi);CHKERRQ(ierr);
679   }
680   for (j=0; j<nzi; j++) {
681     ra   = aa[j];
682     row  = aj[j];
683     offset = row%dof;
684     row   /= dof;
685     nzpi = po->i[row+1] - po->i[row];
686     pj = po->j + po->i[row];
687     pa = po->a + po->i[row];
688     for (k=0; k<nzpi; k++) {
689       ierr = PetscHMapIVAddValue(hmap,p->garray[pj[k]]*dof+offset,ra*pa[k]);CHKERRQ(ierr);
690     }
691     ierr = PetscLogFlops(2.0*nzpi);CHKERRQ(ierr);
692   }
693 
694   /* off diagonal part: Ao[i, :]*P_oth */
695   if (ao) {
696     ai = ao->i;
697     pi = p_oth->i;
698     nzi = ai[i+1] - ai[i];
699     aj  = ao->j + ai[i];
700     aa  = ao->a + ai[i];
701     for (j=0; j<nzi; j++) {
702       row  = aj[j];
703       offset = a->garray[row]%dof;
704       row    = map[row];
705       ra   = aa[j];
706       pnz  = pi[row+1] - pi[row];
707       p_othcols = p_oth->j + pi[row];
708       pa   = p_oth->a + pi[row];
709       for (col=0; col<pnz; col++) {
710         ierr = PetscHMapIVAddValue(hmap,p_othcols[col]*dof+offset,ra*pa[col]);CHKERRQ(ierr);
711       }
712       ierr = PetscLogFlops(2.0*pnz);CHKERRQ(ierr);
713     }
714   } /* end if (ao) */
715 
716   PetscFunctionReturn(0);
717 }
718 
719 PetscErrorCode MatGetBrowsOfAcols_MPIXAIJ(Mat,Mat,PetscInt dof,MatReuse,Mat*);
720 
MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,Mat C)721 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,Mat C)
722 {
723   PetscErrorCode    ierr;
724   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
725   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
726   Mat_APMPI         *ptap;
727   PetscHMapIV       hmap;
728   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
729   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
730   PetscInt          offset,ii,pocol;
731   const PetscInt    *mappingindices;
732   IS                map;
733 
734   PetscFunctionBegin;
735   MatCheckProduct(C,4);
736   ptap = (Mat_APMPI*)C->product->data;
737   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
738   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");
739 
740   ierr = MatZeroEntries(C);CHKERRQ(ierr);
741 
742   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
743   /*-----------------------------------------------------*/
744   if (ptap->reuse == MAT_REUSE_MATRIX) {
745     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
746     ierr =  MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
747   }
748   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
749 
750   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
751   pon *= dof;
752   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
753   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
754   cmaxr = 0;
755   for (i=0; i<pon; i++) {
756     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
757   }
758   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
759   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
760   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
761   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
762   for (i=0; i<am && pon; i++) {
763     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
764     offset = i%dof;
765     ii     = i/dof;
766     nzi = po->i[ii+1] - po->i[ii];
767     if (!nzi) continue;
768     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
769     voff = 0;
770     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
771     if (!voff) continue;
772 
773     /* Form C(ii, :) */
774     poj = po->j + po->i[ii];
775     poa = po->a + po->i[ii];
776     for (j=0; j<nzi; j++) {
777       pocol = poj[j]*dof+offset;
778       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
779       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
780       for (jj=0; jj<voff; jj++) {
781         apvaluestmp[jj] = apvalues[jj]*poa[j];
782         /*If the row is empty */
783         if (!c_rmtc[pocol]) {
784           c_rmtjj[jj] = apindices[jj];
785           c_rmtaa[jj] = apvaluestmp[jj];
786           c_rmtc[pocol]++;
787         } else {
788           ierr = PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);CHKERRQ(ierr);
789           if (loc>=0){ /* hit */
790             c_rmtaa[loc] += apvaluestmp[jj];
791             ierr = PetscLogFlops(1.0);CHKERRQ(ierr);
792           } else { /* new element */
793             loc = -(loc+1);
794             /* Move data backward */
795             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
796               c_rmtjj[kk] = c_rmtjj[kk-1];
797               c_rmtaa[kk] = c_rmtaa[kk-1];
798             }/* End kk */
799             c_rmtjj[loc] = apindices[jj];
800             c_rmtaa[loc] = apvaluestmp[jj];
801             c_rmtc[pocol]++;
802           }
803         }
804         ierr = PetscLogFlops(voff);CHKERRQ(ierr);
805       } /* End jj */
806     } /* End j */
807   } /* End i */
808 
809   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
810   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
811 
812   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
813   pn *= dof;
814   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
815 
816   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
817   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
818   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
819   pcstart = pcstart*dof;
820   pcend   = pcend*dof;
821   cd = (Mat_SeqAIJ*)(c->A)->data;
822   co = (Mat_SeqAIJ*)(c->B)->data;
823 
824   cmaxr = 0;
825   for (i=0; i<pn; i++) {
826     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
827   }
828   ierr = PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);CHKERRQ(ierr);
829   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
830   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
831   for (i=0; i<am && pn; i++) {
832     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
833     offset = i%dof;
834     ii     = i/dof;
835     nzi = pd->i[ii+1] - pd->i[ii];
836     if (!nzi) continue;
837     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
838     voff = 0;
839     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
840     if (!voff) continue;
841     /* Form C(ii, :) */
842     pdj = pd->j + pd->i[ii];
843     pda = pd->a + pd->i[ii];
844     for (j=0; j<nzi; j++) {
845       row = pcstart + pdj[j] * dof + offset;
846       for (jj=0; jj<voff; jj++) {
847         apvaluestmp[jj] = apvalues[jj]*pda[j];
848       }
849       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
850       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
851     }
852   }
853   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
854   ierr = MatGetOwnershipRangeColumn(C,&ccstart,&ccend);CHKERRQ(ierr);
855   ierr = PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);CHKERRQ(ierr);
856   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
857   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
858   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
859   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
860 
861   /* Add contributions from remote */
862   for (i = 0; i < pn; i++) {
863     row = i + pcstart;
864     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
865   }
866   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
867 
868   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
869   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
870 
871   ptap->reuse = MAT_REUSE_MATRIX;
872   PetscFunctionReturn(0);
873 }
874 
MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)875 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
876 {
877   PetscErrorCode      ierr;
878 
879   PetscFunctionBegin;
880 
881   ierr = MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(A,P,1,C);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,Mat C)885 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,Mat C)
886 {
887   PetscErrorCode    ierr;
888   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
889   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
890   Mat_APMPI         *ptap;
891   PetscHMapIV       hmap;
892   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
893   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
894   PetscInt          offset,ii,pocol;
895   const PetscInt    *mappingindices;
896   IS                map;
897 
898   PetscFunctionBegin;
899   MatCheckProduct(C,4);
900   ptap = (Mat_APMPI*)C->product->data;
901   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
902   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");
903 
904   ierr = MatZeroEntries(C);CHKERRQ(ierr);
905 
906   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
907   /*-----------------------------------------------------*/
908   if (ptap->reuse == MAT_REUSE_MATRIX) {
909     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
910     ierr =  MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
911   }
912   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
913   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
914   pon *= dof;
915   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
916   pn  *= dof;
917 
918   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
919   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
920   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
921   pcstart *= dof;
922   pcend   *= dof;
923   cmaxr = 0;
924   for (i=0; i<pon; i++) {
925     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
926   }
927   cd = (Mat_SeqAIJ*)(c->A)->data;
928   co = (Mat_SeqAIJ*)(c->B)->data;
929   for (i=0; i<pn; i++) {
930     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
931   }
932   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
933   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
934   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
935   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
936   for (i=0; i<am && (pon || pn); i++) {
937     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
938     offset = i%dof;
939     ii     = i/dof;
940     nzi  = po->i[ii+1] - po->i[ii];
941     dnzi = pd->i[ii+1] - pd->i[ii];
942     if (!nzi && !dnzi) continue;
943     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);CHKERRQ(ierr);
944     voff = 0;
945     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
946     if (!voff) continue;
947 
948     /* Form remote C(ii, :) */
949     poj = po->j + po->i[ii];
950     poa = po->a + po->i[ii];
951     for (j=0; j<nzi; j++) {
952       pocol = poj[j]*dof+offset;
953       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
954       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
955       for (jj=0; jj<voff; jj++) {
956         apvaluestmp[jj] = apvalues[jj]*poa[j];
957         /*If the row is empty */
958         if (!c_rmtc[pocol]) {
959           c_rmtjj[jj] = apindices[jj];
960           c_rmtaa[jj] = apvaluestmp[jj];
961           c_rmtc[pocol]++;
962         } else {
963           ierr = PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);CHKERRQ(ierr);
964           if (loc>=0){ /* hit */
965             c_rmtaa[loc] += apvaluestmp[jj];
966             ierr = PetscLogFlops(1.0);CHKERRQ(ierr);
967           } else { /* new element */
968             loc = -(loc+1);
969             /* Move data backward */
970             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
971               c_rmtjj[kk] = c_rmtjj[kk-1];
972               c_rmtaa[kk] = c_rmtaa[kk-1];
973             }/* End kk */
974             c_rmtjj[loc] = apindices[jj];
975             c_rmtaa[loc] = apvaluestmp[jj];
976             c_rmtc[pocol]++;
977           }
978         }
979       } /* End jj */
980       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
981     } /* End j */
982 
983     /* Form local C(ii, :) */
984     pdj = pd->j + pd->i[ii];
985     pda = pd->a + pd->i[ii];
986     for (j=0; j<dnzi; j++) {
987       row = pcstart + pdj[j] * dof + offset;
988       for (jj=0; jj<voff; jj++) {
989         apvaluestmp[jj] = apvalues[jj]*pda[j];
990       }/* End kk */
991       ierr = PetscLogFlops(voff);CHKERRQ(ierr);
992       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
993     }/* End j */
994   } /* End i */
995 
996   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
997   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
998   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
999   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
1000 
1001   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1002   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1003   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1004   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1005   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
1006 
1007   /* Add contributions from remote */
1008   for (i = 0; i < pn; i++) {
1009     row = i + pcstart;
1010     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
1011   }
1012   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
1013 
1014   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1015   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1016 
1017   ptap->reuse = MAT_REUSE_MATRIX;
1018   PetscFunctionReturn(0);
1019 }
1020 
MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)1021 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
1022 {
1023   PetscErrorCode      ierr;
1024 
1025   PetscFunctionBegin;
1026 
1027   ierr = MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,C);CHKERRQ(ierr);
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /* TODO: move algorithm selection to MatProductSetFromOptions */
MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)1032 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1033 {
1034   Mat_APMPI           *ptap;
1035   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1036   MPI_Comm            comm;
1037   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1038   MatType             mtype;
1039   PetscSF             sf;
1040   PetscSFNode         *iremote;
1041   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1042   const PetscInt      *rootdegrees;
1043   PetscHSetI          ht,oht,*hta,*hto;
1044   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1045   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1046   PetscInt            nalg=2,alg=0,offset,ii;
1047   PetscMPIInt         owner;
1048   const PetscInt      *mappingindices;
1049   PetscBool           flg;
1050   const char          *algTypes[2] = {"overlapping","merged"};
1051   IS                  map;
1052   PetscErrorCode      ierr;
1053 
1054   PetscFunctionBegin;
1055   MatCheckProduct(Cmpi,5);
1056   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1057   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1058 
1059   /* Create symbolic parallel matrix Cmpi */
1060   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1061   pn *= dof;
1062   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1063   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1064   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1065 
1066   ierr = PetscNew(&ptap);CHKERRQ(ierr);
1067   ptap->reuse = MAT_INITIAL_MATRIX;
1068   ptap->algType = 2;
1069 
1070   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1071   ierr = MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
1072   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
1073   /* This equals to the number of offdiag columns in P */
1074   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1075   pon *= dof;
1076   /* offsets */
1077   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1078   /* The number of columns we will send to remote ranks */
1079   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1080   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1081   for (i=0; i<pon; i++) {
1082     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1083   }
1084   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1085   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1086   /* Create hash table to merge all columns for C(i, :) */
1087   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1088 
1089   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
1090   ptap->c_rmti[0] = 0;
1091   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1092   for (i=0; i<am && pon; i++) {
1093     /* Form one row of AP */
1094     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1095     offset = i%dof;
1096     ii     = i/dof;
1097     /* If the off diag is empty, we should not do any calculation */
1098     nzi = po->i[ii+1] - po->i[ii];
1099     if (!nzi) continue;
1100 
1101     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,ht);CHKERRQ(ierr);
1102     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1103     /* If AP is empty, just continue */
1104     if (!htsize) continue;
1105     /* Form C(ii, :) */
1106     poj = po->j + po->i[ii];
1107     for (j=0; j<nzi; j++) {
1108       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);CHKERRQ(ierr);
1109     }
1110   }
1111 
1112   for (i=0; i<pon; i++) {
1113     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1114     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1115     c_rmtc[i] = htsize;
1116   }
1117 
1118   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1119 
1120   for (i=0; i<pon; i++) {
1121     off = 0;
1122     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1123     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1124   }
1125   ierr = PetscFree(hta);CHKERRQ(ierr);
1126 
1127   ierr = PetscMalloc1(pon,&iremote);CHKERRQ(ierr);
1128   for (i=0; i<pon; i++) {
1129     owner = 0; lidx = 0;
1130     offset = i%dof;
1131     ii     = i/dof;
1132     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);CHKERRQ(ierr);
1133     iremote[i].index = lidx*dof + offset;
1134     iremote[i].rank  = owner;
1135   }
1136 
1137   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1138   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1139   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1140   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1141   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1142   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1143   /* How many neighbors have contributions to my rows? */
1144   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1145   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1146   rootspacesize = 0;
1147   for (i = 0; i < pn; i++) {
1148     rootspacesize += rootdegrees[i];
1149   }
1150   ierr = PetscMalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1151   ierr = PetscMalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1152   /* Get information from leaves
1153    * Number of columns other people contribute to my rows
1154    * */
1155   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1156   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1157   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1158   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1159   /* The number of columns is received for each row */
1160   ptap->c_othi[0] = 0;
1161   rootspacesize = 0;
1162   rootspaceoffsets[0] = 0;
1163   for (i = 0; i < pn; i++) {
1164     rcvncols = 0;
1165     for (j = 0; j<rootdegrees[i]; j++) {
1166       rcvncols += rootspace[rootspacesize];
1167       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1168       rootspacesize++;
1169     }
1170     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1171   }
1172   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1173 
1174   ierr = PetscMalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1175   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1176   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1177   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1178   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1179 
1180   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1181   nleaves = 0;
1182   for (i = 0; i<pon; i++) {
1183     owner = 0;
1184     ii = i/dof;
1185     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);CHKERRQ(ierr);
1186     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1187     for (j=0; j<sendncols; j++) {
1188       iremote[nleaves].rank = owner;
1189       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1190     }
1191   }
1192   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1193   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1194 
1195   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1196   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1197   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1198   /* One to one map */
1199   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1200 
1201   ierr = PetscMalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1202   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1203   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1204   pcstart *= dof;
1205   pcend   *= dof;
1206   ierr = PetscMalloc2(pn,&hta,pn,&hto);CHKERRQ(ierr);
1207   for (i=0; i<pn; i++) {
1208     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1209     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1210   }
1211   /* Work on local part */
1212   /* 4) Pass 1: Estimate memory for C_loc */
1213   for (i=0; i<am && pn; i++) {
1214     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1215     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1216     offset = i%dof;
1217     ii     = i/dof;
1218     nzi = pd->i[ii+1] - pd->i[ii];
1219     if (!nzi) continue;
1220 
1221     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);CHKERRQ(ierr);
1222     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1223     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1224     if (!(htsize+htosize)) continue;
1225     /* Form C(ii, :) */
1226     pdj = pd->j + pd->i[ii];
1227     for (j=0; j<nzi; j++) {
1228       ierr = PetscHSetIUpdate(hta[pdj[j]*dof+offset],ht);CHKERRQ(ierr);
1229       ierr = PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);CHKERRQ(ierr);
1230     }
1231   }
1232 
1233   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
1234 
1235   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1236   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1237 
1238   /* Get remote data */
1239   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1240   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1241 
1242   for (i = 0; i < pn; i++) {
1243     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1244     rdj = c_othj + ptap->c_othi[i];
1245     for (j = 0; j < nzi; j++) {
1246       col = rdj[j];
1247       /* diag part */
1248       if (col>=pcstart && col<pcend) {
1249         ierr = PetscHSetIAdd(hta[i],col);CHKERRQ(ierr);
1250       } else { /* off diag */
1251         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1252       }
1253     }
1254     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1255     dnz[i] = htsize;
1256     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1257     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1258     onz[i] = htsize;
1259     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1260   }
1261 
1262   ierr = PetscFree2(hta,hto);CHKERRQ(ierr);
1263   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1264 
1265   /* local sizes and preallocation */
1266   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1267   ierr = MatSetBlockSizes(Cmpi,dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);CHKERRQ(ierr);
1268   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1269   ierr = MatSetUp(Cmpi);CHKERRQ(ierr);
1270   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1271 
1272   /* attach the supporting struct to Cmpi for reuse */
1273   Cmpi->product->data    = ptap;
1274   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1275   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
1276 
1277   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1278   Cmpi->assembled = PETSC_FALSE;
1279   /* pick an algorithm */
1280   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1281   alg = 0;
1282   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1283   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1284   switch (alg) {
1285     case 0:
1286       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1287       break;
1288     case 1:
1289       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1290       break;
1291     default:
1292       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1293   }
1294   PetscFunctionReturn(0);
1295 }
1296 
MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat C)1297 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat C)
1298 {
1299   PetscErrorCode ierr;
1300 
1301   PetscFunctionBegin;
1302   ierr = MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(A,P,1,fill,C);CHKERRQ(ierr);
1303   PetscFunctionReturn(0);
1304 }
1305 
MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)1306 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1307 {
1308   Mat_APMPI           *ptap;
1309   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1310   MPI_Comm            comm;
1311   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1312   MatType             mtype;
1313   PetscSF             sf;
1314   PetscSFNode         *iremote;
1315   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1316   const PetscInt      *rootdegrees;
1317   PetscHSetI          ht,oht,*hta,*hto,*htd;
1318   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1319   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1320   PetscInt            nalg=2,alg=0,offset,ii;
1321   PetscMPIInt         owner;
1322   PetscBool           flg;
1323   const char          *algTypes[2] = {"merged","overlapping"};
1324   const PetscInt      *mappingindices;
1325   IS                  map;
1326   PetscErrorCode      ierr;
1327 
1328   PetscFunctionBegin;
1329   MatCheckProduct(Cmpi,5);
1330   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1331   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1332 
1333   /* Create symbolic parallel matrix Cmpi */
1334   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1335   pn *= dof;
1336   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1337   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1338   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1339 
1340   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1341   ptap->reuse = MAT_INITIAL_MATRIX;
1342   ptap->algType = 3;
1343 
1344   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1345   ierr = MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);CHKERRQ(ierr);
1346   ierr = PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);CHKERRQ(ierr);
1347 
1348   /* This equals to the number of offdiag columns in P */
1349   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1350   pon *= dof;
1351   /* offsets */
1352   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1353   /* The number of columns we will send to remote ranks */
1354   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1355   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1356   for (i=0; i<pon; i++) {
1357     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1358   }
1359   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1360   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1361   /* Create hash table to merge all columns for C(i, :) */
1362   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1363   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1364   ierr = PetscMalloc2(pn,&htd,pn,&hto);CHKERRQ(ierr);
1365   for (i=0; i<pn; i++) {
1366     ierr = PetscHSetICreate(&htd[i]);CHKERRQ(ierr);
1367     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1368   }
1369 
1370   ierr = ISGetIndices(map,&mappingindices);CHKERRQ(ierr);
1371   ptap->c_rmti[0] = 0;
1372   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1373   for (i=0; i<am && (pon || pn); i++) {
1374     /* Form one row of AP */
1375     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1376     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1377     offset = i%dof;
1378     ii     = i/dof;
1379     /* If the off diag is empty, we should not do any calculation */
1380     nzi = po->i[ii+1] - po->i[ii];
1381     dnzi = pd->i[ii+1] - pd->i[ii];
1382     if (!nzi && !dnzi) continue;
1383 
1384     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);CHKERRQ(ierr);
1385     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1386     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1387     /* If AP is empty, just continue */
1388     if (!(htsize+htosize)) continue;
1389 
1390     /* Form remote C(ii, :) */
1391     poj = po->j + po->i[ii];
1392     for (j=0; j<nzi; j++) {
1393       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);CHKERRQ(ierr);
1394       ierr = PetscHSetIUpdate(hta[poj[j]*dof+offset],oht);CHKERRQ(ierr);
1395     }
1396 
1397     /* Form local C(ii, :) */
1398     pdj = pd->j + pd->i[ii];
1399     for (j=0; j<dnzi; j++) {
1400       ierr = PetscHSetIUpdate(htd[pdj[j]*dof+offset],ht);CHKERRQ(ierr);
1401       ierr = PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);CHKERRQ(ierr);
1402     }
1403   }
1404 
1405   ierr = ISRestoreIndices(map,&mappingindices);CHKERRQ(ierr);
1406 
1407   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1408   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1409 
1410   for (i=0; i<pon; i++) {
1411     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1412     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1413     c_rmtc[i] = htsize;
1414   }
1415 
1416   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1417 
1418   for (i=0; i<pon; i++) {
1419     off = 0;
1420     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1421     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1422   }
1423   ierr = PetscFree(hta);CHKERRQ(ierr);
1424 
1425   ierr = PetscMalloc1(pon,&iremote);CHKERRQ(ierr);
1426   for (i=0; i<pon; i++) {
1427     owner = 0; lidx = 0;
1428     offset = i%dof;
1429     ii     = i/dof;
1430     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);CHKERRQ(ierr);
1431     iremote[i].index = lidx*dof+offset;
1432     iremote[i].rank  = owner;
1433   }
1434 
1435   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1436   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1437   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1438   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1439   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1440   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1441   /* How many neighbors have contributions to my rows? */
1442   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1443   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1444   rootspacesize = 0;
1445   for (i = 0; i < pn; i++) {
1446     rootspacesize += rootdegrees[i];
1447   }
1448   ierr = PetscMalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1449   ierr = PetscMalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1450   /* Get information from leaves
1451    * Number of columns other people contribute to my rows
1452    * */
1453   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1454   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1455   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1456   ierr = PetscMalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1457   /* The number of columns is received for each row */
1458   ptap->c_othi[0]     = 0;
1459   rootspacesize       = 0;
1460   rootspaceoffsets[0] = 0;
1461   for (i = 0; i < pn; i++) {
1462     rcvncols = 0;
1463     for (j = 0; j<rootdegrees[i]; j++) {
1464       rcvncols += rootspace[rootspacesize];
1465       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1466       rootspacesize++;
1467     }
1468     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1469   }
1470   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1471 
1472   ierr = PetscMalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1473   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1474   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1475   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1476   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1477 
1478   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1479   nleaves = 0;
1480   for (i = 0; i<pon; i++) {
1481     owner = 0;
1482     ii    = i/dof;
1483     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);CHKERRQ(ierr);
1484     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1485     for (j=0; j<sendncols; j++) {
1486       iremote[nleaves].rank    = owner;
1487       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1488     }
1489   }
1490   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1491   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1492 
1493   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1494   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1495   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1496   /* One to one map */
1497   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1498   /* Get remote data */
1499   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1500   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1501   ierr = PetscMalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1502   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1503   pcstart *= dof;
1504   pcend   *= dof;
1505   for (i = 0; i < pn; i++) {
1506     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1507     rdj = c_othj + ptap->c_othi[i];
1508     for (j = 0; j < nzi; j++) {
1509       col =  rdj[j];
1510       /* diag part */
1511       if (col>=pcstart && col<pcend) {
1512         ierr = PetscHSetIAdd(htd[i],col);CHKERRQ(ierr);
1513       } else { /* off diag */
1514         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1515       }
1516     }
1517     ierr = PetscHSetIGetSize(htd[i],&htsize);CHKERRQ(ierr);
1518     dnz[i] = htsize;
1519     ierr = PetscHSetIDestroy(&htd[i]);CHKERRQ(ierr);
1520     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1521     onz[i] = htsize;
1522     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1523   }
1524 
1525   ierr = PetscFree2(htd,hto);CHKERRQ(ierr);
1526   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1527 
1528   /* local sizes and preallocation */
1529   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1530   ierr = MatSetBlockSizes(Cmpi, dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);CHKERRQ(ierr);
1531   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1532   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1533 
1534   /* attach the supporting struct to Cmpi for reuse */
1535   Cmpi->product->data    = ptap;
1536   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1537   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
1538 
1539   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1540   Cmpi->assembled = PETSC_FALSE;
1541   /* pick an algorithm */
1542   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1543   alg = 0;
1544   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1545   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1546   switch (alg) {
1547     case 0:
1548       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1549       break;
1550     case 1:
1551       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1552       break;
1553     default:
1554       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1555   }
1556   PetscFunctionReturn(0);
1557 }
1558 
MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat C)1559 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat C)
1560 {
1561   PetscErrorCode ierr;
1562 
1563   PetscFunctionBegin;
1564   ierr = MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,fill,C);CHKERRQ(ierr);
1565   PetscFunctionReturn(0);
1566 }
1567 
MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat Cmpi)1568 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat Cmpi)
1569 {
1570   PetscErrorCode      ierr;
1571   Mat_APMPI           *ptap;
1572   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1573   MPI_Comm            comm;
1574   PetscMPIInt         size,rank;
1575   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1576   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1577   PetscInt            *lnk,i,k,pnz,row,nsend;
1578   PetscBT             lnkbt;
1579   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1580   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1581   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1582   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1583   MPI_Request         *swaits,*rwaits;
1584   MPI_Status          *sstatus,rstatus;
1585   PetscLayout         rowmap;
1586   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1587   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1588   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1589   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1590   PetscScalar         *apv;
1591   PetscTable          ta;
1592   MatType             mtype;
1593   const char          *prefix;
1594 #if defined(PETSC_USE_INFO)
1595   PetscReal           apfill;
1596 #endif
1597 
1598   PetscFunctionBegin;
1599   MatCheckProduct(Cmpi,4);
1600   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1601   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1602   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1603   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1604 
1605   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
1606 
1607   /* create symbolic parallel matrix Cmpi */
1608   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1609   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1610 
1611   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1612   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
1613 
1614   /* create struct Mat_APMPI and attached it to C later */
1615   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1616   ptap->reuse = MAT_INITIAL_MATRIX;
1617   ptap->algType = 1;
1618 
1619   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1620   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1621   /* get P_loc by taking all local rows of P */
1622   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1623 
1624   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1625   /* --------------------------------- */
1626   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1627   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1628 
1629   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1630   /* ----------------------------------------------------------------- */
1631   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1632   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1633 
1634   /* create and initialize a linked list */
1635   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
1636   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1637   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1638   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
1639   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
1640 
1641   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1642 
1643   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1644   if (ao) {
1645     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
1646   } else {
1647     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
1648   }
1649   current_space = free_space;
1650   nspacedouble  = 0;
1651 
1652   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
1653   api[0] = 0;
1654   for (i=0; i<am; i++) {
1655     /* diagonal portion: Ad[i,:]*P */
1656     ai = ad->i; pi = p_loc->i;
1657     nzi = ai[i+1] - ai[i];
1658     aj  = ad->j + ai[i];
1659     for (j=0; j<nzi; j++) {
1660       row  = aj[j];
1661       pnz  = pi[row+1] - pi[row];
1662       Jptr = p_loc->j + pi[row];
1663       /* add non-zero cols of P into the sorted linked list lnk */
1664       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1665     }
1666     /* off-diagonal portion: Ao[i,:]*P */
1667     if (ao) {
1668       ai = ao->i; pi = p_oth->i;
1669       nzi = ai[i+1] - ai[i];
1670       aj  = ao->j + ai[i];
1671       for (j=0; j<nzi; j++) {
1672         row  = aj[j];
1673         pnz  = pi[row+1] - pi[row];
1674         Jptr = p_oth->j + pi[row];
1675         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1676       }
1677     }
1678     apnz     = lnk[0];
1679     api[i+1] = api[i] + apnz;
1680     if (ap_rmax < apnz) ap_rmax = apnz;
1681 
1682     /* if free space is not available, double the total space in the list */
1683     if (current_space->local_remaining<apnz) {
1684       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
1685       nspacedouble++;
1686     }
1687 
1688     /* Copy data into free space, then initialize lnk */
1689     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1690 
1691     current_space->array           += apnz;
1692     current_space->local_used      += apnz;
1693     current_space->local_remaining -= apnz;
1694   }
1695   /* Allocate space for apj and apv, initialize apj, and */
1696   /* destroy list of free space and other temporary array(s) */
1697   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
1698   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
1699   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1700 
1701   /* Create AP_loc for reuse */
1702   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
1703 
1704 #if defined(PETSC_USE_INFO)
1705   if (ao) {
1706     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1707   } else {
1708     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1709   }
1710   ptap->AP_loc->info.mallocs           = nspacedouble;
1711   ptap->AP_loc->info.fill_ratio_given  = fill;
1712   ptap->AP_loc->info.fill_ratio_needed = apfill;
1713 
1714   if (api[am]) {
1715     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
1716     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
1717   } else {
1718     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
1719   }
1720 #endif
1721 
1722   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1723   /* ------------------------------------ */
1724   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1725   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
1726   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
1727 
1728   ierr = MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);CHKERRQ(ierr);
1729   ierr = MatGetOptionsPrefix(Cmpi,&prefix);CHKERRQ(ierr);
1730   ierr = MatSetOptionsPrefix(ptap->C_oth,prefix);CHKERRQ(ierr);
1731   ierr = MatAppendOptionsPrefix(ptap->C_oth,"inner_C_oth_");CHKERRQ(ierr);
1732 
1733   ierr = MatProductSetType(ptap->C_oth,MATPRODUCT_AB);CHKERRQ(ierr);
1734   ierr = MatProductSetAlgorithm(ptap->C_oth,"default");CHKERRQ(ierr);
1735   ierr = MatProductSetFill(ptap->C_oth,fill);CHKERRQ(ierr);
1736   ierr = MatProductSetFromOptions(ptap->C_oth);CHKERRQ(ierr);
1737   ierr = MatProductSymbolic(ptap->C_oth);CHKERRQ(ierr);
1738 
1739   /* (3) send coj of C_oth to other processors  */
1740   /* ------------------------------------------ */
1741   /* determine row ownership */
1742   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
1743   rowmap->n  = pn;
1744   rowmap->bs = 1;
1745   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
1746   owners = rowmap->range;
1747 
1748   /* determine the number of messages to send, their lengths */
1749   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
1750   ierr = PetscArrayzero(len_s,size);CHKERRQ(ierr);
1751   ierr = PetscArrayzero(len_si,size);CHKERRQ(ierr);
1752 
1753   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1754   coi   = c_oth->i; coj = c_oth->j;
1755   con   = ptap->C_oth->rmap->n;
1756   proc  = 0;
1757   for (i=0; i<con; i++) {
1758     while (prmap[i] >= owners[proc+1]) proc++;
1759     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1760     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1761   }
1762 
1763   len          = 0; /* max length of buf_si[], see (4) */
1764   owners_co[0] = 0;
1765   nsend        = 0;
1766   for (proc=0; proc<size; proc++) {
1767     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1768     if (len_s[proc]) {
1769       nsend++;
1770       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1771       len         += len_si[proc];
1772     }
1773   }
1774 
1775   /* determine the number and length of messages to receive for coi and coj  */
1776   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
1777   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
1778 
1779   /* post the Irecv and Isend of coj */
1780   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
1781   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
1782   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
1783   for (proc=0, k=0; proc<size; proc++) {
1784     if (!len_s[proc]) continue;
1785     i    = owners_co[proc];
1786     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
1787     k++;
1788   }
1789 
1790   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1791   /* ---------------------------------------- */
1792   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
1793   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
1794 
1795   ierr = MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);CHKERRQ(ierr);
1796   ierr = MatGetOptionsPrefix(Cmpi,&prefix);CHKERRQ(ierr);
1797   ierr = MatSetOptionsPrefix(ptap->C_loc,prefix);CHKERRQ(ierr);
1798   ierr = MatAppendOptionsPrefix(ptap->C_loc,"inner_C_loc_");CHKERRQ(ierr);
1799 
1800   ierr = MatProductSetType(ptap->C_loc,MATPRODUCT_AB);CHKERRQ(ierr);
1801   ierr = MatProductSetAlgorithm(ptap->C_loc,"default");CHKERRQ(ierr);
1802   ierr = MatProductSetFill(ptap->C_loc,fill);CHKERRQ(ierr);
1803   ierr = MatProductSetFromOptions(ptap->C_loc);CHKERRQ(ierr);
1804   ierr = MatProductSymbolic(ptap->C_loc);CHKERRQ(ierr);
1805 
1806   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1807 
1808   /* receives coj are complete */
1809   for (i=0; i<nrecv; i++) {
1810     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1811   }
1812   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1813   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1814 
1815   /* add received column indices into ta to update Crmax */
1816   for (k=0; k<nrecv; k++) {/* k-th received message */
1817     Jptr = buf_rj[k];
1818     for (j=0; j<len_r[k]; j++) {
1819       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
1820     }
1821   }
1822   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
1823   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
1824 
1825   /* (4) send and recv coi */
1826   /*-----------------------*/
1827   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
1828   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
1829   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
1830   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1831   for (proc=0,k=0; proc<size; proc++) {
1832     if (!len_s[proc]) continue;
1833     /* form outgoing message for i-structure:
1834          buf_si[0]:                 nrows to be sent
1835                [1:nrows]:           row index (global)
1836                [nrows+1:2*nrows+1]: i-structure index
1837     */
1838     /*-------------------------------------------*/
1839     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1840     buf_si_i    = buf_si + nrows+1;
1841     buf_si[0]   = nrows;
1842     buf_si_i[0] = 0;
1843     nrows       = 0;
1844     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1845       nzi = coi[i+1] - coi[i];
1846       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1847       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1848       nrows++;
1849     }
1850     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
1851     k++;
1852     buf_si += len_si[proc];
1853   }
1854   for (i=0; i<nrecv; i++) {
1855     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1856   }
1857   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1858   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1859 
1860   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
1861   ierr = PetscFree(len_ri);CHKERRQ(ierr);
1862   ierr = PetscFree(swaits);CHKERRQ(ierr);
1863   ierr = PetscFree(buf_s);CHKERRQ(ierr);
1864 
1865   /* (5) compute the local portion of Cmpi      */
1866   /* ------------------------------------------ */
1867   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1868   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
1869   current_space = free_space;
1870 
1871   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
1872   for (k=0; k<nrecv; k++) {
1873     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1874     nrows       = *buf_ri_k[k];
1875     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1876     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1877   }
1878 
1879   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
1880   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1881   for (i=0; i<pn; i++) {
1882     /* add C_loc into Cmpi */
1883     nzi  = c_loc->i[i+1] - c_loc->i[i];
1884     Jptr = c_loc->j + c_loc->i[i];
1885     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1886 
1887     /* add received col data into lnk */
1888     for (k=0; k<nrecv; k++) { /* k-th received message */
1889       if (i == *nextrow[k]) { /* i-th row */
1890         nzi  = *(nextci[k]+1) - *nextci[k];
1891         Jptr = buf_rj[k] + *nextci[k];
1892         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1893         nextrow[k]++; nextci[k]++;
1894       }
1895     }
1896     nzi = lnk[0];
1897 
1898     /* copy data into free space, then initialize lnk */
1899     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1900     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
1901   }
1902   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
1903   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1904   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
1905 
1906   /* local sizes and preallocation */
1907   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1908   if (P->cmap->bs > 0) {
1909     ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr);
1910     ierr = PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);CHKERRQ(ierr);
1911   }
1912   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1913   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1914 
1915   /* members in merge */
1916   ierr = PetscFree(id_r);CHKERRQ(ierr);
1917   ierr = PetscFree(len_r);CHKERRQ(ierr);
1918   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
1919   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
1920   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
1921   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
1922   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
1923 
1924   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
1925 
1926   /* attach the supporting struct to Cmpi for reuse */
1927   Cmpi->product->data    = ptap;
1928   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1929   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
1930 
1931   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1932   Cmpi->assembled = PETSC_FALSE;
1933   PetscFunctionReturn(0);
1934 }
1935 
MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)1936 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1937 {
1938   PetscErrorCode    ierr;
1939   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1940   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1941   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1942   Mat_APMPI         *ptap;
1943   Mat               AP_loc,C_loc,C_oth;
1944   PetscInt          i,rstart,rend,cm,ncols,row;
1945   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1946   PetscScalar       *apa;
1947   const PetscInt    *cols;
1948   const PetscScalar *vals;
1949 
1950   PetscFunctionBegin;
1951   MatCheckProduct(C,3);
1952   ptap = (Mat_APMPI*)C->product->data;
1953   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
1954   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");
1955 
1956   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1957   /* 1) get R = Pd^T,Ro = Po^T */
1958   if (ptap->reuse == MAT_REUSE_MATRIX) {
1959     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1960     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1961   }
1962 
1963   /* 2) get AP_loc */
1964   AP_loc = ptap->AP_loc;
1965   ap = (Mat_SeqAIJ*)AP_loc->data;
1966 
1967   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1968   /*-----------------------------------------------------*/
1969   if (ptap->reuse == MAT_REUSE_MATRIX) {
1970     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1971     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1972     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1973   }
1974 
1975   /* 2-2) compute numeric A_loc*P - dominating part */
1976   /* ---------------------------------------------- */
1977   /* get data from symbolic products */
1978   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1979   if (ptap->P_oth) {
1980     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1981   }
1982   apa   = ptap->apa;
1983   api   = ap->i;
1984   apj   = ap->j;
1985   for (i=0; i<am; i++) {
1986     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1987     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1988     apnz = api[i+1] - api[i];
1989     for (j=0; j<apnz; j++) {
1990       col = apj[j+api[i]];
1991       ap->a[j+ap->i[i]] = apa[col];
1992       apa[col] = 0.0;
1993     }
1994   }
1995   /* We have modified the contents of local matrix AP_loc and must increase its ObjectState, since we are not doing AssemblyBegin/End on it. */
1996   ierr = PetscObjectStateIncrease((PetscObject)AP_loc);CHKERRQ(ierr);
1997 
1998   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
1999   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
2000   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
2001   C_loc = ptap->C_loc;
2002   C_oth = ptap->C_oth;
2003 
2004   /* add C_loc and Co to to C */
2005   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
2006 
2007   /* C_loc -> C */
2008   cm    = C_loc->rmap->N;
2009   c_seq = (Mat_SeqAIJ*)C_loc->data;
2010   cols = c_seq->j;
2011   vals = c_seq->a;
2012 
2013 
2014   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2015   /* when there are no off-processor parts.  */
2016   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2017   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2018   /* a table, and other, more complex stuff has to be done. */
2019   if (C->assembled) {
2020     C->was_assembled = PETSC_TRUE;
2021     C->assembled     = PETSC_FALSE;
2022   }
2023   if (C->was_assembled) {
2024     for (i=0; i<cm; i++) {
2025       ncols = c_seq->i[i+1] - c_seq->i[i];
2026       row = rstart + i;
2027       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2028       cols += ncols; vals += ncols;
2029     }
2030   } else {
2031     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
2032   }
2033 
2034   /* Co -> C, off-processor part */
2035   cm = C_oth->rmap->N;
2036   c_seq = (Mat_SeqAIJ*)C_oth->data;
2037   cols = c_seq->j;
2038   vals = c_seq->a;
2039   for (i=0; i<cm; i++) {
2040     ncols = c_seq->i[i+1] - c_seq->i[i];
2041     row = p->garray[i];
2042     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2043     cols += ncols; vals += ncols;
2044   }
2045 
2046   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2047   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2048 
2049   ptap->reuse = MAT_REUSE_MATRIX;
2050   PetscFunctionReturn(0);
2051 }
2052 
MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat C)2053 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat C)
2054 {
2055   PetscErrorCode      ierr;
2056   Mat_Product         *product = C->product;
2057   Mat                 A=product->A,P=product->B;
2058   MatProductAlgorithm alg=product->alg;
2059   PetscReal           fill=product->fill;
2060   PetscBool           flg;
2061 
2062   PetscFunctionBegin;
2063   /* scalable: do R=P^T locally, then C=R*A*P */
2064   ierr = PetscStrcmp(alg,"scalable",&flg);CHKERRQ(ierr);
2065   if (flg) {
2066     ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,product->fill,C);CHKERRQ(ierr);
2067     C->ops->productnumeric = MatProductNumeric_PtAP;
2068     goto next;
2069   }
2070 
2071   /* nonscalable: do R=P^T locally, then C=R*A*P */
2072   ierr = PetscStrcmp(alg,"nonscalable",&flg);CHKERRQ(ierr);
2073   if (flg) {
2074     ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
2075     goto next;
2076   }
2077 
2078   /* allatonce */
2079   ierr = PetscStrcmp(alg,"allatonce",&flg);CHKERRQ(ierr);
2080   if (flg) {
2081     ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);CHKERRQ(ierr);
2082     goto next;
2083   }
2084 
2085   /* allatonce_merged */
2086   ierr = PetscStrcmp(alg,"allatonce_merged",&flg);CHKERRQ(ierr);
2087   if (flg) {
2088     ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);CHKERRQ(ierr);
2089     goto next;
2090   }
2091 
2092   /* hypre */
2093 #if defined(PETSC_HAVE_HYPRE)
2094   ierr = PetscStrcmp(alg,"hypre",&flg);CHKERRQ(ierr);
2095   if (flg) {
2096     ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
2097     C->ops->productnumeric = MatProductNumeric_PtAP;
2098     PetscFunctionReturn(0);
2099   }
2100 #endif
2101   SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"Mat Product Algorithm is not supported");
2102 
2103 next:
2104   C->ops->productnumeric = MatProductNumeric_PtAP;
2105   PetscFunctionReturn(0);
2106 }
2107