1
2!    Description: A star forest is a simple tree with one root and zero or more leaves.
3!    Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
4!     This example creates a star forest, communicates values using the graph  views the graph, then destroys it.
5!
6!     This is a copy of ex1.c but currently only tests the broadcast operation
7
8      program main
9#include <petsc/finclude/petscvec.h>
10      use petscvec
11      implicit none
12
13      PetscErrorCode                ierr
14      PetscInt                      i,nroots,nrootsalloc,nleaves,nleavesalloc,mine(6),stride
15      type(PetscSFNode)             remote(6)
16      PetscMPIInt                   rank,size
17      PetscSF                       sf
18      PetscInt                      rootdata(6),leafdata(6)
19
20! used with PetscSFGetGraph()
21      type(PetscSFNode), pointer :: gremote(:)
22      PetscInt, pointer ::          gmine(:)
23      PetscInt                      gnroots,gnleaves;
24
25      call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
26      if (ierr .ne. 0) then
27        print*,'Unable to initialize PETSc'
28        stop
29      endif
30      stride = 2
31      call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr);CHKERRA(ierr)
32      call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr);CHKERRA(ierr)
33
34      if (rank == 0) then
35         nroots = 3
36      else
37         nroots = 2
38      endif
39      nrootsalloc  = nroots * stride;
40      if (rank > 0) then
41         nleaves = 3
42      else
43         nleaves = 2
44      endif
45      nleavesalloc = nleaves * stride
46      if (stride > 1) then
47         do i=1,nleaves
48            mine(i) = stride * (i-1)
49         enddo
50      endif
51
52! Left periodic neighbor
53      remote(1)%rank  = modulo(rank+size-1,size)
54      remote(1)%index = 1 * stride
55! Right periodic neighbor
56      remote(2)%rank  = modulo(rank+1,size)
57      remote(2)%index = 0 * stride
58      if (rank > 0) then !               All processes reference rank 0, index
59         remote(3)%rank  = 0
60         remote(3)%index = 2 * stride
61      endif
62
63!  Create a star forest for communication
64      call PetscSFCreate(PETSC_COMM_WORLD,sf,ierr);CHKERRA(ierr)
65      call PetscSFSetFromOptions(sf,ierr);CHKERRA(ierr)
66      call PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr);CHKERRA(ierr)
67      call PetscSFSetUp(sf,ierr);CHKERRA(ierr)
68
69!   View graph, mostly useful for debugging purposes.
70      call PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL,ierr);CHKERRA(ierr)
71      call PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
72      call PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
73
74!   Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
75!     * user-defined structures, could also be used.
76!     Set rootdata buffer to be broadcast
77      do i=1,nrootsalloc
78         rootdata(i) = -1
79      enddo
80      do i=1,nroots
81         rootdata(1 + (i-1)*stride) = 100*(rank+1) + i - 1
82      enddo
83
84!     Initialize local buffer, these values are never used.
85      do i=1,nleavesalloc
86         leafdata(i) = -1
87      enddo
88
89!     Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls.
90      call PetscSFBcastBegin(sf,MPIU_INTEGER,rootdata,leafdata,ierr);CHKERRA(ierr)
91      call PetscSFBcastEnd(sf,MPIU_INTEGER,rootdata,leafdata,ierr);CHKERRA(ierr)
92      call PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n",ierr);CHKERRA(ierr)
93      call PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
94      call PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n",ierr);CHKERRA(ierr)
95      call PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRA(ierr)
96
97      call PetscSFGetGraph(sf,gnroots,gnleaves,gmine,gremote,ierr);CHKERRA(ierr)
98      if (gnleaves .ne. nleaves) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'nleaves returned from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
99      do i=1,nleaves
100         if (gmine(i) .ne. mine(i)) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Root from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
101      enddo
102      do i=1,nleaves
103         if (gremote(i)%index .ne. remote(i)%index) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Leaf from PetscSFGetGraph() does not match that set with PetscSFSetGraph()'); endif
104      enddo
105
106      deallocate(gremote)
107!    Clean storage for star forest.
108      call PetscSFDestroy(sf,ierr);CHKERRA(ierr)
109      call PetscFinalize(ierr);
110  end
111
112!/*TEST
113!  build:
114!    requires: define(PETSC_HAVE_FORTRAN_TYPE_STAR)
115!
116!  test:
117!    nsize: 3
118!
119!TEST*/
120