1!
2module class_Distribution
3#ifdef MPI
4  use mpi_siesta
5#endif
6
7  implicit none
8
9  character(len=*), parameter :: mod_name="class_Distribution.F90"
10
11#ifndef MPI
12  ! To allow compilation in serial mode
13  integer, parameter, private :: MPI_GROUP_NULL = -huge(1)
14  integer, parameter, private :: MPI_COMM_NULL = -huge(1)
15  integer, parameter, private :: MPI_UNDEFINED = -huge(1)
16#endif
17
18  !---------------------------------------------
19  integer, parameter, public          :: TYPE_NULL = -1
20  integer, parameter, public          :: TYPE_BLOCK_CYCLIC = 1
21  integer, parameter, public          :: TYPE_PEXSI = 2
22  !---------------------------------------------
23
24  type Distribution_
25     integer              :: refCount = 0
26     character(len=36)    :: id
27     !------------------------
28     character(len=256)   :: name = "null Dist"
29     !------------------------
30     integer       :: dist_type
31     !------------------------------------------
32     integer  :: ref_comm  = MPI_COMM_NULL
33     integer  :: group     = MPI_GROUP_NULL      ! MPI group
34     integer  :: node      = MPI_UNDEFINED       ! MPI rank in group  (my_proc)
35     integer, allocatable :: ranks_in_ref_comm(:)
36     integer  :: nodes = 0       ! MPI size of group  (nprocs)
37     integer  :: node_io = -1    ! Node capable of IO
38     !------------------------
39     integer  :: blocksize = 0   !
40     !
41     integer  :: isrcproc = 0    ! Processor holding the first element (unused)
42     !------------------------------------------
43  end type Distribution_
44
45  type Distribution
46     type(Distribution_), pointer :: data => null()
47  end type Distribution
48
49  type(Distribution_), pointer :: obj => null()
50
51  public :: newDistribution, distType
52  public :: ref_comm, get_ranks_in_ref_comm
53  public :: num_local_elements, node_handling_element
54  public :: index_local_to_global, index_global_to_local
55
56  interface newDistribution
57     module procedure newDistribution_
58  end interface newDistribution
59
60  interface distType
61     module procedure dist_type
62  end interface distType
63
64  interface ref_comm
65     module procedure ref_comm_
66  end interface ref_comm
67
68  interface get_ranks_in_ref_comm
69     module procedure get_ranks_in_ref_comm_
70  end interface get_ranks_in_ref_comm
71
72  interface num_local_elements
73     module procedure num_local_elements_
74  end interface num_local_elements
75  interface index_local_to_global
76     module procedure index_local_to_global_
77  end interface index_local_to_global
78  interface index_global_to_local
79     module procedure index_global_to_local_
80  end interface index_global_to_local
81  interface node_handling_element
82     module procedure node_handling_element_
83  end interface node_handling_element
84
85  !====================================
86#define TYPE_NAME Distribution
87#include "basic_type.inc"
88  !====================================
89
90  subroutine delete_Data(spdata)
91    type(Distribution_) :: spdata
92    integer :: ierr
93
94#ifdef MPI
95    if (spdata%group /= MPI_GROUP_NULL) then
96       call MPI_Group_free(spdata%group,ierr)
97    endif
98#endif
99    if (allocated(spdata%ranks_in_ref_comm)) then
100       deallocate(spdata%ranks_in_ref_comm)
101    endif
102
103  end subroutine delete_Data
104
105  function dist_type(this) result(dtype)
106    type(Distribution)  :: this
107    integer :: dtype
108
109    obj => this%data
110
111    dtype = obj%dist_type
112
113  end function dist_type
114
115  !
116  subroutine newDistribution_(this,Ref_Comm,Ranks_in_Ref_Comm, &
117       dist_type,Blocksize,name)
118    !........................................
119    ! Constructor
120    !........................................
121    type (Distribution), intent(inout) :: this
122    integer, intent(in)                       :: ref_comm
123    integer, intent(in)                       :: ranks_in_ref_comm(:)
124    integer, intent(in)                       :: dist_type
125    integer, intent(in)                       :: Blocksize
126    character(len=*), intent(in), optional    :: name
127
128    integer :: error, gsize, ref_group
129
130    call init(this)
131
132    obj => this%data
133
134    obj%dist_type = dist_type
135    obj%ref_comm = ref_comm
136    obj%blocksize = Blocksize
137
138    ! Do we need to allocate, or a simple assignment would suffice? (F2003 only)
139    gsize = size(ranks_in_ref_comm)
140    if (allocated(obj%ranks_in_ref_comm)) deallocate(obj%ranks_in_ref_comm)
141    allocate(obj%ranks_in_ref_comm(gsize))
142    obj%ranks_in_ref_comm(:)  = ranks_in_ref_comm(:)
143
144    ! Define a group internally only for the purposes of
145    ! getting the ranks consistently
146
147#ifdef MPI
148    call MPI_Comm_Group(ref_comm,ref_group,error)
149    call MPI_Group_Incl(ref_group,gsize,obj%ranks_in_ref_comm,obj%group,error)
150    call MPI_Group_Rank( obj%Group, obj%node, error )
151    call MPI_Group_Size( obj%Group, obj%nodes, error )
152    call MPI_Group_free(ref_group, error)
153#else
154    obj%node = 0
155    obj%nodes = 1
156#endif
157!    print "(i0,a,i0,1x,4i2)", obj%dist_type,  &
158!                " node, ranks in ref: ", obj%node, obj%ranks_in_ref_comm(:)
159    if (present(name)) then
160       obj%name = trim(name)
161    else
162       obj%name = "(Distribution from BlockSize and Ranks)"
163    endif
164    call tag_new_object(this)
165
166  end subroutine newDistribution_
167
168  !-----------------------------------------------------------
169  subroutine get_ranks_in_ref_comm_(this,ranks)
170    type(Distribution), intent(in)            :: this
171    integer, allocatable, intent(out) :: ranks(:)
172
173    integer :: rsize
174
175    obj => this%data
176    rsize = size(obj%ranks_in_ref_comm)
177    allocate(ranks(rsize))
178    ranks(:) = obj%ranks_in_ref_comm(:)
179
180  end subroutine get_ranks_in_ref_comm_
181  !-----------------------------------------------------------
182  function ref_comm_(this) result(comm)
183    type(Distribution), intent(in)  :: this
184    integer                 :: comm
185
186    obj => this%data
187    comm =  obj%ref_comm
188
189  end function ref_comm_
190  !-----------------------------------------------------------
191  function num_local_elements_(this,nels,Node) result(nl)
192    type(Distribution), intent(in)  :: this
193    integer, intent(in)                    :: nels
194    integer, intent(in)                    :: Node
195    integer                                :: nl
196
197#ifdef MPI
198    integer, external :: numroc
199#endif
200    integer :: remainder
201
202    obj => this%data
203
204    select case(obj%dist_type)
205    case (TYPE_BLOCK_CYCLIC)
206       if (Node >= obj%Nodes) then
207          nl = 0
208       else
209#ifdef MPI
210          nl = numroc(nels,obj%blocksize,Node,  &
211               obj%isrcproc,obj%nodes)
212#else
213          nl = nels
214#endif
215       endif
216    case (TYPE_PEXSI)
217       remainder = nels - obj%blocksize * obj%nodes
218       if (Node == (obj%Nodes - 1)) then
219          nl = obj%blocksize + remainder
220       else if (Node >= obj%Nodes) then
221          nl = 0
222       else
223          nl = obj%blocksize
224       endif
225
226    case default
227       nl = -huge(1)   ! Other signal?
228    end select
229
230  end function num_local_elements_
231
232  function index_local_to_global_(this,il,Node) result(ig)
233    type(Distribution), intent(in)  :: this
234    integer, intent(in)                    :: il
235    integer, intent(in)                    :: Node
236    integer                                :: ig
237
238#ifdef MPI
239    integer, external :: indxl2g
240#endif
241
242    obj => this%data
243
244    select case(obj%dist_type)
245    case (TYPE_BLOCK_CYCLIC)
246       if (Node >= obj%Nodes) then
247          ig = 0
248       else
249#ifdef MPI
250          ig = indxl2g(il,obj%blocksize,Node, &
251               obj%isrcproc,obj%nodes)
252#else
253          ig = il
254#endif
255       endif
256    case (TYPE_PEXSI)
257       if (Node >= obj%Nodes) then
258          ig = 0
259       else
260          ig = obj%blocksize*Node + il
261       endif
262    case default
263       ig = 0   ! Other signal?
264    end select
265
266  end function index_local_to_global_
267
268  function index_global_to_local_(this,ig,Node) result(il)
269    type(Distribution), intent(in)  :: this
270    integer, intent(in)                    :: ig
271    integer, intent(in), optional          :: Node
272    integer                                :: il
273
274    integer :: owner, myrank, error
275#ifdef MPI
276    integer, external :: indxg2l
277#endif
278
279    obj => this%data
280
281    select case(obj%dist_type)
282    case (TYPE_BLOCK_CYCLIC)
283       if (present(Node)) then
284          if (Node >= obj%nodes) then
285             il = 0
286          else
287#ifdef MPI
288             il = indxg2l(ig,obj%blocksize,Node,  &
289                  obj%isrcproc,obj%nodes)
290#else
291             il = ig
292#endif
293          endif
294       else
295          ! Assume that we only want a non-zero value if the orb
296          ! is local to this node
297          owner = node_handling_element_(this,ig)
298          if (owner == obj%node) then
299#ifdef MPI
300             il = indxg2l(ig,obj%blocksize,myrank,  &
301                  obj%isrcproc,obj%nodes)
302#else
303             il = ig
304#endif
305          else
306             il = 0
307          endif
308       endif
309    case (TYPE_PEXSI)
310       if (present(Node)) then
311          if (Node >= obj%nodes) then
312             il = 0
313          else
314             il = ig - obj%blocksize*Node
315          endif
316       else
317          ! Assume that we only want a non-zero value if the orb
318          ! is local to this node
319          owner = node_handling_element_(this,ig)
320          if (owner == obj%node) then
321             il = ig - obj%blocksize * obj%node
322          else
323             il = 0
324          endif
325       endif
326    case default
327       il = 0   ! Other signal?
328    end select
329
330  end function index_global_to_local_
331
332  function node_handling_element_(this,ig) result(proc)
333    type(Distribution), intent(in)  :: this
334    integer, intent(in)                    :: ig
335    integer                                :: proc
336
337    integer, parameter :: dummy_Node = 0
338#ifdef MPI
339    integer, external  :: indxg2p
340#endif
341
342    obj => this%data
343
344    select case(obj%dist_type)
345    case (TYPE_BLOCK_CYCLIC)
346#ifdef MPI
347       proc = indxg2p(ig,obj%blocksize,dummy_Node,  &
348            obj%isrcproc,obj%nodes)
349#else
350       proc = 0
351#endif
352
353    case (TYPE_PEXSI)
354       ! Assume bs=2, norbs=13, nodes=5
355       ! Then, the distribution is 2, 2, 2, 2, 5 for procs 0,1,2,3,4
356       ! For ig=13, proc=6 according to the naive calculation (first line)
357       ! We have to correct this to assign this orb to proc 4.
358       ! Same for ig=10: proc=5
359       ! In this case the load balancing is quite bad.
360
361       proc = (ig-1) / obj%blocksize
362       if (proc > obj%Nodes-1) proc = obj%Nodes - 1
363
364    case default
365       proc = MPI_UNDEFINED   ! Other signal?
366    end select
367
368  end function node_handling_element_
369
370end module class_Distribution
371