1!--------------------------------------------------------------------------------------------------! 2! CP2K: A general program to perform molecular dynamics simulations ! 3! Copyright (C) 2000 - 2019 CP2K developers group ! 4!--------------------------------------------------------------------------------------------------! 5 6! ************************************************************************************************** 7!> \brief Environment for NEGF based quantum transport calculations 8!> \author Sergey Chulkov 9! ************************************************************************************************** 10 11MODULE negf_subgroup_types 12 USE cp_blacs_env, ONLY: cp_blacs_env_create,& 13 cp_blacs_env_release,& 14 cp_blacs_env_retain,& 15 cp_blacs_env_type,& 16 get_blacs_info 17 USE cp_para_env, ONLY: cp_para_env_create,& 18 cp_para_env_release,& 19 cp_para_env_retain 20 USE cp_para_types, ONLY: cp_para_env_type 21 USE message_passing, ONLY: mp_comm_split 22 USE negf_control_types, ONLY: negf_control_type 23#include "./base/base_uses.f90" 24 25 IMPLICIT NONE 26 PRIVATE 27 28 CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'negf_subgroup_types' 29 LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .TRUE. 30 31 PUBLIC :: negf_subgroup_env_type, negf_sub_env_create, negf_sub_env_release 32 33! ************************************************************************************************** 34!> \brief Parallel (sub)group environment. 35!> \par History 36!> * 06.2017 created [Sergey Chulkov] 37! ************************************************************************************************** 38 TYPE negf_subgroup_env_type 39 !> number of parallel groups. 40 !> If it is >1 then the global MPI communicator has actually been split into subgroups. 41 !> All other components of the structure are always initialised regardless of the split status 42 !> (they simply point to the corresponding global variables if no splitting has been made). 43 INTEGER :: ngroups 44 !> global MPI rank of the given processor. Local MPI rank can be obtained as para_env%mepos. 45 !> Useful to find out the current group index by accessing the 'group_distribution' array. 46 INTEGER :: mepos_global 47 !> global MPI communicator 48 INTEGER :: mpi_comm_global 49 !> MPI communicator of the current parallel group 50 INTEGER :: mpi_comm 51 !> group_distribution(0:num_pe) : a process with rank 'i' belongs to the parallel group 52 !> with index 'group_distribution(i)' 53 INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution 54 !> group-specific BLACS parallel environment 55 TYPE(cp_blacs_env_type), POINTER :: blacs_env 56 !> group-specific MPI parallel environment 57 TYPE(cp_para_env_type), POINTER :: para_env 58 END TYPE negf_subgroup_env_type 59 60CONTAINS 61 62! ************************************************************************************************** 63!> \brief Split MPI communicator to create a set of parallel (sub)groups. 64!> \param sub_env parallel (sub)group environment (initialised on exit) 65!> \param negf_control NEGF input control 66!> \param blacs_env_global BLACS environment for all the processors 67!> \param blacs_grid_layout BLACS grid layout 68!> \param blacs_repeatable BLACS repeatable layout 69!> \par History 70!> * 06.2017 created [Sergey Chulkov] 71! ************************************************************************************************** 72 SUBROUTINE negf_sub_env_create(sub_env, negf_control, blacs_env_global, blacs_grid_layout, blacs_repeatable) 73 TYPE(negf_subgroup_env_type), INTENT(out) :: sub_env 74 TYPE(negf_control_type), POINTER :: negf_control 75 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global 76 INTEGER, INTENT(in) :: blacs_grid_layout 77 LOGICAL, INTENT(in) :: blacs_repeatable 78 79 CHARACTER(LEN=*), PARAMETER :: routineN = 'negf_sub_env_create', & 80 routineP = moduleN//':'//routineN 81 82 INTEGER :: handle 83 LOGICAL :: is_split 84 TYPE(cp_para_env_type), POINTER :: para_env_global 85 86 CALL timeset(routineN, handle) 87 88 CALL get_blacs_info(blacs_env_global, para_env=para_env_global) 89 sub_env%mepos_global = para_env_global%mepos 90 sub_env%mpi_comm_global = para_env_global%group 91 92 ! ++ split mpi communicator if 93 ! a) the requested number of processors per group > 0 (means that the split has been requested explicitly), and 94 ! b) the number of subgroups is >= 2 95 is_split = negf_control%nprocs > 0 .AND. negf_control%nprocs*2 <= para_env_global%num_pe 96 97 IF (is_split) THEN 98 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1)) 99 100 CALL mp_comm_split(comm=para_env_global%group, sub_comm=sub_env%mpi_comm, ngroups=sub_env%ngroups, & 101 group_distribution=sub_env%group_distribution, subgroup_min_size=negf_control%nprocs) 102 103 ! ++ create a new parallel environment based on the given sub-communicator) 104 NULLIFY (sub_env%blacs_env, sub_env%para_env) 105 CALL cp_para_env_create(sub_env%para_env, sub_env%mpi_comm) 106 107 ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations 108 ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'. 109 CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env, blacs_grid_layout, blacs_repeatable) 110 ELSE 111 sub_env%mpi_comm = para_env_global%group 112 sub_env%ngroups = 1 113 114 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1)) 115 sub_env%group_distribution(:) = 0 116 117 sub_env%blacs_env => blacs_env_global 118 CALL cp_blacs_env_retain(sub_env%blacs_env) 119 120 sub_env%para_env => para_env_global 121 CALL cp_para_env_retain(sub_env%para_env) 122 END IF 123 124 CALL timestop(handle) 125 END SUBROUTINE negf_sub_env_create 126 127! ************************************************************************************************** 128!> \brief Release a parallel (sub)group environment. 129!> \param sub_env parallel (sub)group environment to release 130!> \par History 131!> * 06.2017 created [Sergey Chulkov] 132! ************************************************************************************************** 133 SUBROUTINE negf_sub_env_release(sub_env) 134 TYPE(negf_subgroup_env_type), INTENT(inout) :: sub_env 135 136 CHARACTER(LEN=*), PARAMETER :: routineN = 'negf_sub_env_release', & 137 routineP = moduleN//':'//routineN 138 139 INTEGER :: handle 140 141 CALL timeset(routineN, handle) 142 143 CALL cp_blacs_env_release(sub_env%blacs_env) 144 CALL cp_para_env_release(sub_env%para_env) 145 146 ! do not need to call mp_comm_free() as the MPI sub-communicator sub_env%mpi_comm 147 ! has already been released by the subroutine cp_para_env_release() 148 IF (ALLOCATED(sub_env%group_distribution)) & 149 DEALLOCATE (sub_env%group_distribution) 150 151 sub_env%ngroups = 0 152 153 CALL timestop(handle) 154 END SUBROUTINE negf_sub_env_release 155END MODULE negf_subgroup_types 156