1 
2 static char help[]= "Scatters from a sequential vector to a parallel vector. \n\
3 uses block index sets\n\n";
4 
5 /* 'mpiexec -n 3 ./ex3 -vecscatter_type mpi3node' might give incorrect solution due to multiple cores write to the same variable */
6 
7 #include <petscvec.h>
8 
main(int argc,char ** argv)9 int main(int argc,char **argv)
10 {
11   PetscErrorCode ierr;
12   PetscInt       bs=1,n=5,i,low;
13   PetscInt       ix0[3] = {5,7,9},iy0[3] = {1,2,4},ix1[3] = {2,3,4},iy1[3] = {0,1,3};
14   PetscMPIInt    size,rank;
15   PetscScalar    *array;
16   Vec            x,y;
17   IS             isx,isy;
18   VecScatter     ctx;
19   PetscViewer    sviewer;
20 
21   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
22   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
23   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
24 
25   if (size <2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run more than one processor");
26 
27   ierr = PetscOptionsGetInt(NULL,NULL,"-bs",&bs,NULL);CHKERRQ(ierr);
28   n    = bs*n;
29 
30   /* Create vector x over shared memory */
31   ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
32   ierr = VecSetSizes(x,n,PETSC_DECIDE);CHKERRQ(ierr);
33   ierr = VecSetType(x,VECNODE);CHKERRQ(ierr);
34   ierr = VecSetFromOptions(x);CHKERRQ(ierr);
35 
36   ierr = VecGetOwnershipRange(x,&low,NULL);CHKERRQ(ierr);
37   ierr = VecGetArray(x,&array);CHKERRQ(ierr);
38   for (i=0; i<n; i++) {
39     array[i] = (PetscScalar)(i + low);
40   }
41   ierr = VecRestoreArray(x,&array);CHKERRQ(ierr);
42 
43   /* Create a sequential vector y */
44   ierr = VecCreateSeq(PETSC_COMM_SELF,n,&y);CHKERRQ(ierr);
45   ierr = VecGetArray(y,&array);CHKERRQ(ierr);
46   for (i=0; i<n; i++) {
47     array[i] = (PetscScalar)(i + 100*rank);
48   }
49   ierr = VecRestoreArray(y,&array);CHKERRQ(ierr);
50 
51   /* Create two index sets */
52   if (!rank) {
53     ierr = ISCreateBlock(PETSC_COMM_SELF,bs,3,ix0,PETSC_COPY_VALUES,&isx);CHKERRQ(ierr);
54     ierr = ISCreateBlock(PETSC_COMM_SELF,bs,3,iy0,PETSC_COPY_VALUES,&isy);CHKERRQ(ierr);
55   } else {
56     ierr = ISCreateBlock(PETSC_COMM_SELF,bs,3,ix1,PETSC_COPY_VALUES,&isx);CHKERRQ(ierr);
57     ierr = ISCreateBlock(PETSC_COMM_SELF,bs,3,iy1,PETSC_COPY_VALUES,&isy);CHKERRQ(ierr);
58   }
59 
60   if (rank == 10) {
61     ierr = PetscPrintf(PETSC_COMM_SELF,"\n[%d] isx:\n",rank);CHKERRQ(ierr);
62     ierr = ISView(isx,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
63   }
64 
65   ierr = VecScatterCreate(y,isy,x,isx,&ctx);CHKERRQ(ierr);
66   ierr = VecScatterSetFromOptions(ctx);CHKERRQ(ierr);
67 
68   /* Test forward vecscatter */
69   ierr = VecSet(x,0.0);CHKERRQ(ierr);
70   ierr = VecScatterBegin(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
71   ierr = VecScatterEnd(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
72   ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
73 
74   /* Test reverse vecscatter */
75   ierr = VecScale(x,-1.0);CHKERRQ(ierr);
76   ierr = VecScatterBegin(ctx,x,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
77   ierr = VecScatterEnd(ctx,x,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
78   ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
79   if (rank == 1) {
80     ierr = VecView(y,sviewer);CHKERRQ(ierr);
81   }
82   ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
83 
84   /* Free spaces */
85   ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
86   ierr = ISDestroy(&isx);CHKERRQ(ierr);
87   ierr = ISDestroy(&isy);CHKERRQ(ierr);
88   ierr = VecDestroy(&x);CHKERRQ(ierr);
89   ierr = VecDestroy(&y);CHKERRQ(ierr);
90   ierr = PetscFinalize();
91   return ierr;
92 }
93 
94 /*TEST
95 
96    test:
97       nsize: 2
98       args: -vecscatter_type mpi3node
99       output_file: output/ex3_1.out
100       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
101 
102    test:
103       suffix: 2
104       nsize: 2
105       args: -vecscatter_type mpi3
106       output_file: output/ex3_1.out
107       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
108 
109    test:
110       suffix: 3
111       nsize: 2
112       args: -bs 2 -vecscatter_type mpi3node
113       output_file: output/ex3_3.out
114       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
115 
116    test:
117       suffix: 4
118       nsize: 2
119       args: -bs 2 -vecscatter_type mpi3
120       output_file: output/ex3_3.out
121       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
122 
123    test:
124       suffix: 5
125       nsize: 3
126       args: -vecscatter_type mpi3
127       output_file: output/ex3_5.out
128       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
129 
130 TEST*/
131