1 /*
2 Support for the parallel SELL matrix vector multiply
3 */
4 #include <../src/mat/impls/sell/mpi/mpisell.h>
5 #include <petsc/private/isimpl.h> /* needed because accesses data structure of ISLocalToGlobalMapping directly */
6
7 /*
8 Takes the local part of an already assembled MPISELL matrix
9 and disassembles it. This is to allow new nonzeros into the matrix
10 that require more communication in the matrix vector multiply.
11 Thus certain data-structures must be rebuilt.
12
13 Kind of slow! But that's what application programmers get when
14 they are sloppy.
15 */
MatDisAssemble_MPISELL(Mat A)16 PetscErrorCode MatDisAssemble_MPISELL(Mat A)
17 {
18 Mat_MPISELL *sell=(Mat_MPISELL*)A->data;
19 Mat B=sell->B,Bnew;
20 Mat_SeqSELL *Bsell=(Mat_SeqSELL*)B->data;
21 PetscInt i,j,totalslices,N=A->cmap->N,ec,row;
22 PetscBool isnonzero;
23 PetscErrorCode ierr;
24
25 PetscFunctionBegin;
26 /* free stuff related to matrix-vec multiply */
27 ierr = VecGetSize(sell->lvec,&ec);CHKERRQ(ierr); /* needed for PetscLogObjectMemory below */
28 ierr = VecDestroy(&sell->lvec);CHKERRQ(ierr);
29 ierr = VecScatterDestroy(&sell->Mvctx);CHKERRQ(ierr);
30 if (sell->colmap) {
31 #if defined(PETSC_USE_CTABLE)
32 ierr = PetscTableDestroy(&sell->colmap);CHKERRQ(ierr);
33 #else
34 ierr = PetscFree(sell->colmap);CHKERRQ(ierr);
35 ierr = PetscLogObjectMemory((PetscObject)A,-sell->B->cmap->n*sizeof(PetscInt));CHKERRQ(ierr);
36 #endif
37 }
38
39 /* make sure that B is assembled so we can access its values */
40 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
41 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42
43 /* invent new B and copy stuff over */
44 ierr = MatCreate(PETSC_COMM_SELF,&Bnew);CHKERRQ(ierr);
45 ierr = MatSetSizes(Bnew,B->rmap->n,N,B->rmap->n,N);CHKERRQ(ierr);
46 ierr = MatSetBlockSizesFromMats(Bnew,A,A);CHKERRQ(ierr);
47 ierr = MatSetType(Bnew,((PetscObject)B)->type_name);CHKERRQ(ierr);
48 ierr = MatSeqSELLSetPreallocation(Bnew,0,Bsell->rlen);CHKERRQ(ierr);
49 if (Bsell->nonew >= 0) { /* Inherit insertion error options (if positive). */
50 ((Mat_SeqSELL*)Bnew->data)->nonew = Bsell->nonew;
51 }
52
53 /*
54 Ensure that B's nonzerostate is monotonically increasing.
55 Or should this follow the MatSetValues() loop to preserve B's nonzerstate across a MatDisAssemble() call?
56 */
57 Bnew->nonzerostate = B->nonzerostate;
58
59 totalslices = B->rmap->n/8+((B->rmap->n & 0x07)?1:0); /* floor(n/8) */
60 for (i=0; i<totalslices; i++) { /* loop over slices */
61 for (j=Bsell->sliidx[i],row=0; j<Bsell->sliidx[i+1]; j++,row=((row+1)&0x07)) {
62 isnonzero = (PetscBool)((j-Bsell->sliidx[i])/8 < Bsell->rlen[8*i+row]);
63 if (isnonzero) {
64 ierr = MatSetValue(Bnew,8*i+row,sell->garray[Bsell->colidx[j]],Bsell->val[j],B->insertmode);CHKERRQ(ierr);
65 }
66 }
67 }
68
69 ierr = PetscFree(sell->garray);CHKERRQ(ierr);
70 ierr = PetscLogObjectMemory((PetscObject)A,-ec*sizeof(PetscInt));CHKERRQ(ierr);
71 ierr = MatDestroy(&B);CHKERRQ(ierr);
72 ierr = PetscLogObjectParent((PetscObject)A,(PetscObject)Bnew);CHKERRQ(ierr);
73
74 sell->B = Bnew;
75 A->was_assembled = PETSC_FALSE;
76 A->assembled = PETSC_FALSE;
77 PetscFunctionReturn(0);
78 }
79
MatSetUpMultiply_MPISELL(Mat mat)80 PetscErrorCode MatSetUpMultiply_MPISELL(Mat mat)
81 {
82 Mat_MPISELL *sell=(Mat_MPISELL*)mat->data;
83 Mat_SeqSELL *B=(Mat_SeqSELL*)(sell->B->data);
84 PetscErrorCode ierr;
85 PetscInt i,j,*bcolidx=B->colidx,ec=0,*garray,totalslices;
86 IS from,to;
87 Vec gvec;
88 PetscBool isnonzero;
89 #if defined(PETSC_USE_CTABLE)
90 PetscTable gid1_lid1;
91 PetscTablePosition tpos;
92 PetscInt gid,lid;
93 #else
94 PetscInt N = mat->cmap->N,*indices;
95 #endif
96
97 PetscFunctionBegin;
98 totalslices = sell->B->rmap->n/8+((sell->B->rmap->n & 0x07)?1:0); /* floor(n/8) */
99
100 /* ec counts the number of columns that contain nonzeros */
101 #if defined(PETSC_USE_CTABLE)
102 /* use a table */
103 ierr = PetscTableCreate(sell->B->rmap->n,mat->cmap->N+1,&gid1_lid1);CHKERRQ(ierr);
104 for (i=0; i<totalslices; i++) { /* loop over slices */
105 for (j=B->sliidx[i]; j<B->sliidx[i+1]; j++) {
106 isnonzero = (PetscBool)((j-B->sliidx[i])/8 < B->rlen[(i<<3)+(j&0x07)]);
107 if (isnonzero) { /* check the mask bit */
108 PetscInt data,gid1 = bcolidx[j] + 1;
109 ierr = PetscTableFind(gid1_lid1,gid1,&data);CHKERRQ(ierr);
110 if (!data) {
111 /* one based table */
112 ierr = PetscTableAdd(gid1_lid1,gid1,++ec,INSERT_VALUES);CHKERRQ(ierr);
113 }
114 }
115 }
116 }
117
118 /* form array of columns we need */
119 ierr = PetscMalloc1(ec+1,&garray);CHKERRQ(ierr);
120 ierr = PetscTableGetHeadPosition(gid1_lid1,&tpos);CHKERRQ(ierr);
121 while (tpos) {
122 ierr = PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);CHKERRQ(ierr);
123 gid--;
124 lid--;
125 garray[lid] = gid;
126 }
127 ierr = PetscSortInt(ec,garray);CHKERRQ(ierr); /* sort, and rebuild */
128 ierr = PetscTableRemoveAll(gid1_lid1);CHKERRQ(ierr);
129 for (i=0; i<ec; i++) {
130 ierr = PetscTableAdd(gid1_lid1,garray[i]+1,i+1,INSERT_VALUES);CHKERRQ(ierr);
131 }
132
133 /* compact out the extra columns in B */
134 for (i=0; i<totalslices; i++) { /* loop over slices */
135 for (j=B->sliidx[i]; j<B->sliidx[i+1]; j++) {
136 isnonzero = (PetscBool)((j-B->sliidx[i])/8 < B->rlen[(i<<3)+(j&0x07)]);
137 if (isnonzero) {
138 PetscInt gid1 = bcolidx[j] + 1;
139 ierr = PetscTableFind(gid1_lid1,gid1,&lid);CHKERRQ(ierr);
140 lid--;
141 bcolidx[j] = lid;
142 }
143 }
144 }
145 ierr = PetscLayoutDestroy(&sell->B->cmap);CHKERRQ(ierr);
146 ierr = PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)sell->B),ec,ec,1,&sell->B->cmap);CHKERRQ(ierr);
147 ierr = PetscTableDestroy(&gid1_lid1);CHKERRQ(ierr);
148 #else
149 /* Make an array as long as the number of columns */
150 ierr = PetscCalloc1(N+1,&indices);CHKERRQ(ierr);
151 /* mark those columns that are in sell->B */
152 for (i=0; i<totalslices; i++) { /* loop over slices */
153 for (j=B->sliidx[i]; j<B->sliidx[i+1]; j++) {
154 isnonzero = (PetscBool)((j-B->sliidx[i])/8 < B->rlen[(i<<3)+(j&0x07)]);
155 if (isnonzero) {
156 if (!indices[bcolidx[j]]) ec++;
157 indices[bcolidx[j]] = 1;
158 }
159 }
160 }
161
162 /* form array of columns we need */
163 ierr = PetscMalloc1(ec+1,&garray);CHKERRQ(ierr);
164 ec = 0;
165 for (i=0; i<N; i++) {
166 if (indices[i]) garray[ec++] = i;
167 }
168
169 /* make indices now point into garray */
170 for (i=0; i<ec; i++) {
171 indices[garray[i]] = i;
172 }
173
174 /* compact out the extra columns in B */
175 for (i=0; i<totalslices; i++) { /* loop over slices */
176 for (j=B->sliidx[i]; j<B->sliidx[i+1]; j++) {
177 isnonzero = (PetscBool)((j-B->sliidx[i])/8 < B->rlen[(i<<3)+(j&0x07)]);
178 if (isnonzero) bcolidx[j] = indices[bcolidx[j]];
179 }
180 }
181 ierr = PetscLayoutDestroy(&sell->B->cmap);CHKERRQ(ierr);
182 ierr = PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)sell->B),ec,ec,1,&sell->B->cmap);CHKERRQ(ierr);
183 ierr = PetscFree(indices);CHKERRQ(ierr);
184 #endif
185 /* create local vector that is used to scatter into */
186 ierr = VecCreateSeq(PETSC_COMM_SELF,ec,&sell->lvec);CHKERRQ(ierr);
187 /* create two temporary Index sets for build scatter gather */
188 ierr = ISCreateGeneral(PETSC_COMM_SELF,ec,garray,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
189 ierr = ISCreateStride(PETSC_COMM_SELF,ec,0,1,&to);CHKERRQ(ierr);
190
191 /* create temporary global vector to generate scatter context */
192 /* This does not allocate the array's memory so is efficient */
193 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),1,mat->cmap->n,mat->cmap->N,NULL,&gvec);CHKERRQ(ierr);
194
195 /* generate the scatter context */
196 ierr = VecScatterCreate(gvec,from,sell->lvec,to,&sell->Mvctx);CHKERRQ(ierr);
197 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)sell->Mvctx);CHKERRQ(ierr);
198 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)sell->lvec);CHKERRQ(ierr);
199 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)from);CHKERRQ(ierr);
200 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)to);CHKERRQ(ierr);
201
202 sell->garray = garray;
203
204 ierr = PetscLogObjectMemory((PetscObject)mat,(ec+1)*sizeof(PetscInt));CHKERRQ(ierr);
205 ierr = ISDestroy(&from);CHKERRQ(ierr);
206 ierr = ISDestroy(&to);CHKERRQ(ierr);
207 ierr = VecDestroy(&gvec);CHKERRQ(ierr);
208 PetscFunctionReturn(0);
209 }
210
211 /* ugly stuff added for Glenn someday we should fix this up */
212 static PetscInt *auglyrmapd = NULL,*auglyrmapo = NULL; /* mapping from the local ordering to the "diagonal" and "off-diagonal" parts of the local matrix */
213 static Vec auglydd = NULL,auglyoo = NULL; /* work vectors used to scale the two parts of the local matrix */
214
MatMPISELLDiagonalScaleLocalSetUp(Mat inA,Vec scale)215 PetscErrorCode MatMPISELLDiagonalScaleLocalSetUp(Mat inA,Vec scale)
216 {
217 Mat_MPISELL *ina=(Mat_MPISELL*)inA->data; /*access private part of matrix */
218 PetscErrorCode ierr;
219 PetscInt i,n,nt,cstart,cend,no,*garray=ina->garray,*lindices;
220 PetscInt *r_rmapd,*r_rmapo;
221
222 PetscFunctionBegin;
223 ierr = MatGetOwnershipRange(inA,&cstart,&cend);CHKERRQ(ierr);
224 ierr = MatGetSize(ina->A,NULL,&n);CHKERRQ(ierr);
225 ierr = PetscCalloc1(inA->rmap->mapping->n+1,&r_rmapd);CHKERRQ(ierr);
226 nt = 0;
227 for (i=0; i<inA->rmap->mapping->n; i++) {
228 if (inA->rmap->mapping->indices[i] >= cstart && inA->rmap->mapping->indices[i] < cend) {
229 nt++;
230 r_rmapd[i] = inA->rmap->mapping->indices[i] + 1;
231 }
232 }
233 if (nt != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D n %D",nt,n);
234 ierr = PetscMalloc1(n+1,&auglyrmapd);CHKERRQ(ierr);
235 for (i=0; i<inA->rmap->mapping->n; i++) {
236 if (r_rmapd[i]) {
237 auglyrmapd[(r_rmapd[i]-1)-cstart] = i;
238 }
239 }
240 ierr = PetscFree(r_rmapd);CHKERRQ(ierr);
241 ierr = VecCreateSeq(PETSC_COMM_SELF,n,&auglydd);CHKERRQ(ierr);
242 ierr = PetscCalloc1(inA->cmap->N+1,&lindices);CHKERRQ(ierr);
243 for (i=0; i<ina->B->cmap->n; i++) {
244 lindices[garray[i]] = i+1;
245 }
246 no = inA->rmap->mapping->n - nt;
247 ierr = PetscCalloc1(inA->rmap->mapping->n+1,&r_rmapo);CHKERRQ(ierr);
248 nt = 0;
249 for (i=0; i<inA->rmap->mapping->n; i++) {
250 if (lindices[inA->rmap->mapping->indices[i]]) {
251 nt++;
252 r_rmapo[i] = lindices[inA->rmap->mapping->indices[i]];
253 }
254 }
255 if (nt > no) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D no %D",nt,n);
256 ierr = PetscFree(lindices);CHKERRQ(ierr);
257 ierr = PetscMalloc1(nt+1,&auglyrmapo);CHKERRQ(ierr);
258 for (i=0; i<inA->rmap->mapping->n; i++) {
259 if (r_rmapo[i]) {
260 auglyrmapo[(r_rmapo[i]-1)] = i;
261 }
262 }
263 ierr = PetscFree(r_rmapo);CHKERRQ(ierr);
264 ierr = VecCreateSeq(PETSC_COMM_SELF,nt,&auglyoo);CHKERRQ(ierr);
265 PetscFunctionReturn(0);
266 }
267
MatDiagonalScaleLocal_MPISELL(Mat A,Vec scale)268 PetscErrorCode MatDiagonalScaleLocal_MPISELL(Mat A,Vec scale)
269 {
270 Mat_MPISELL *a=(Mat_MPISELL*)A->data; /*access private part of matrix */
271 PetscErrorCode ierr;
272 PetscInt n,i;
273 PetscScalar *d,*o;
274 const PetscScalar *s;
275
276 PetscFunctionBegin;
277 if (!auglyrmapd) {
278 ierr = MatMPISELLDiagonalScaleLocalSetUp(A,scale);CHKERRQ(ierr);
279 }
280 ierr = VecGetArrayRead(scale,&s);CHKERRQ(ierr);
281 ierr = VecGetLocalSize(auglydd,&n);CHKERRQ(ierr);
282 ierr = VecGetArray(auglydd,&d);CHKERRQ(ierr);
283 for (i=0; i<n; i++) {
284 d[i] = s[auglyrmapd[i]]; /* copy "diagonal" (true local) portion of scale into dd vector */
285 }
286 ierr = VecRestoreArray(auglydd,&d);CHKERRQ(ierr);
287 /* column scale "diagonal" portion of local matrix */
288 ierr = MatDiagonalScale(a->A,NULL,auglydd);CHKERRQ(ierr);
289 ierr = VecGetLocalSize(auglyoo,&n);CHKERRQ(ierr);
290 ierr = VecGetArray(auglyoo,&o);CHKERRQ(ierr);
291 for (i=0; i<n; i++) {
292 o[i] = s[auglyrmapo[i]]; /* copy "off-diagonal" portion of scale into oo vector */
293 }
294 ierr = VecRestoreArrayRead(scale,&s);CHKERRQ(ierr);
295 ierr = VecRestoreArray(auglyoo,&o);CHKERRQ(ierr);
296 /* column scale "off-diagonal" portion of local matrix */
297 ierr = MatDiagonalScale(a->B,NULL,auglyoo);CHKERRQ(ierr);
298 PetscFunctionReturn(0);
299 }
300