1!--------------------------------------------------------------------------------------------------! 2! CP2K: A general program to perform molecular dynamics simulations ! 3! Copyright (C) 2000 - 2020 CP2K developers group ! 4!--------------------------------------------------------------------------------------------------! 5 6MODULE qs_tddfpt2_subgroups 7 USE admm_types, ONLY: admm_type 8 USE atomic_kind_types, ONLY: atomic_kind_type 9 USE basis_set_types, ONLY: get_gto_basis_set,& 10 gto_basis_set_type 11 USE cell_types, ONLY: cell_type 12 USE cp_blacs_env, ONLY: cp_blacs_env_create,& 13 cp_blacs_env_release,& 14 cp_blacs_env_retain,& 15 cp_blacs_env_type 16 USE cp_control_types, ONLY: dft_control_type,& 17 qs_control_type,& 18 tddfpt2_control_type 19 USE cp_dbcsr_cp2k_link, ONLY: cp_dbcsr_alloc_block_from_nbl 20 USE cp_dbcsr_operations, ONLY: cp_dbcsr_dist2d_to_dist 21 USE cp_fm_struct, ONLY: cp_fm_struct_create,& 22 cp_fm_struct_release,& 23 cp_fm_struct_type 24 USE cp_fm_types, ONLY: cp_fm_copy_general,& 25 cp_fm_create,& 26 cp_fm_get_info,& 27 cp_fm_p_type,& 28 cp_fm_release,& 29 cp_fm_retain,& 30 cp_fm_type 31 USE cp_para_env, ONLY: cp_para_env_create,& 32 cp_para_env_release,& 33 cp_para_env_retain 34 USE cp_para_types, ONLY: cp_para_env_type 35 USE dbcsr_api, ONLY: dbcsr_create,& 36 dbcsr_distribution_release,& 37 dbcsr_distribution_type,& 38 dbcsr_get_info,& 39 dbcsr_release,& 40 dbcsr_type 41 USE distribution_1d_types, ONLY: distribution_1d_type 42 USE distribution_2d_types, ONLY: distribution_2d_release,& 43 distribution_2d_type 44 USE distribution_methods, ONLY: distribute_molecules_2d 45 USE input_constants, ONLY: tddfpt_kernel_full,& 46 tddfpt_kernel_stda 47 USE input_section_types, ONLY: section_vals_type,& 48 section_vals_val_get 49 USE kinds, ONLY: default_string_length,& 50 dp 51 USE message_passing, ONLY: mp_comm_split 52 USE molecule_kind_types, ONLY: molecule_kind_type 53 USE molecule_types, ONLY: molecule_type 54 USE particle_types, ONLY: particle_type 55 USE pw_env_methods, ONLY: pw_env_create,& 56 pw_env_rebuild 57 USE pw_env_types, ONLY: pw_env_release,& 58 pw_env_retain,& 59 pw_env_type 60 USE qs_environment_types, ONLY: get_qs_env,& 61 qs_environment_type 62 USE qs_kind_types, ONLY: get_qs_kind,& 63 qs_kind_type 64 USE qs_ks_types, ONLY: qs_ks_env_type 65 USE qs_neighbor_list_types, ONLY: neighbor_list_set_p_type,& 66 release_neighbor_list_sets 67 USE qs_neighbor_lists, ONLY: atom2d_build,& 68 atom2d_cleanup,& 69 build_neighbor_lists,& 70 local_atoms_type,& 71 pair_radius_setup 72 USE task_list_methods, ONLY: generate_qs_task_list 73 USE task_list_types, ONLY: allocate_task_list,& 74 deallocate_task_list,& 75 task_list_type 76#include "./base/base_uses.f90" 77 78 IMPLICIT NONE 79 80 PRIVATE 81 82 CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups' 83 LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .TRUE. 84 85 PUBLIC :: tddfpt_subgroup_env_type 86 PUBLIC :: tddfpt_sub_env_init, tddfpt_sub_env_release 87 PUBLIC :: tddfpt_dbcsr_create_by_dist, tddfpt_fm_replicate_across_subgroups 88 89! ************************************************************************************************** 90!> \brief Parallel (sub)group environment. 91!> \par History 92!> * 01.2017 created [Sergey Chulkov] 93! ************************************************************************************************** 94 TYPE tddfpt_subgroup_env_type 95 !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE. 96 !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ) 97 !> can still be accessed; in this case they simply point to the corresponding global variables 98 LOGICAL :: is_split 99 !> MPI communicator of the current parallel group 100 INTEGER :: mpi_comm 101 !> number of parallel groups 102 INTEGER :: ngroups 103 !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group 104 !> with index 'group_distribution(i)' 105 INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution 106 !> group-specific BLACS parallel environment 107 TYPE(cp_blacs_env_type), POINTER :: blacs_env 108 !> group-specific MPI parallel environment 109 TYPE(cp_para_env_type), POINTER :: para_env 110 !> occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes 111 !> in the parallel group 112 TYPE(cp_fm_p_type), ALLOCATABLE, DIMENSION(:) :: mos_occ 113 !> group-specific copy of the ADMM A matrix 'admm_type%A' 114 TYPE(cp_fm_type), POINTER :: admm_A 115 ! 116 !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components 117 !> below point to the corresponding global variables and can be accessed 118 LOGICAL :: is_mgrid 119 !> group-specific DBCSR distribution 120 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist 121 !> group-specific two-dimensional distribution of pairs of particles 122 TYPE(distribution_2d_type), POINTER :: dist_2d 123 !> group-specific plane wave environment 124 TYPE(pw_env_type), POINTER :: pw_env 125 !> lists of neighbours in auxiliary and primary basis sets 126 TYPE(neighbor_list_set_p_type), & 127 DIMENSION(:), POINTER :: sab_aux_fit, sab_orb 128 !> task lists in auxiliary and primary basis sets 129 TYPE(task_list_type), POINTER :: task_list_aux_fit, task_list_orb 130 END TYPE tddfpt_subgroup_env_type 131 132! ************************************************************************************************** 133!> \brief Structure to save global multi-grid related parameters. 134!> \par History 135!> * 09.2016 created [Sergey Chulkov] 136!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 137! ************************************************************************************************** 138 TYPE mgrid_saved_parameters 139 !> create commensurate grids 140 LOGICAL :: commensurate_mgrids 141 !> create real-space grids 142 LOGICAL :: realspace_mgrids 143 !> do not perform load balancing 144 LOGICAL :: skip_load_balance 145 !> cutoff value at the finest grid level 146 REAL(KIND=dp) :: cutoff 147 !> inverse scale factor 148 REAL(KIND=dp) :: progression_factor 149 !> relative cutoff 150 REAL(KIND=dp) :: relative_cutoff 151 !> list of explicitly given cutoff values 152 REAL(KIND=dp), DIMENSION(:), POINTER :: e_cutoff 153 END TYPE mgrid_saved_parameters 154 155CONTAINS 156 157! ************************************************************************************************** 158!> \brief Split MPI communicator to create a set of parallel (sub)groups. 159!> \param sub_env parallel group environment (initialised on exit) 160!> \param qs_env Quickstep environment 161!> \param mos_occ ground state molecular orbitals in primary atomic basis set 162!> \param kernel Type of kernel (full/sTDA) that will be used 163!> \par History 164!> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt() 165!> [Sergey Chulkov] 166! ************************************************************************************************** 167 SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, kernel) 168 TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env 169 TYPE(qs_environment_type), POINTER :: qs_env 170 TYPE(cp_fm_p_type), DIMENSION(:), INTENT(in) :: mos_occ 171 INTEGER, INTENT(in) :: kernel 172 173 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_init', & 174 routineP = moduleN//':'//routineN 175 176 INTEGER :: handle, ispin, nao, nao_aux, nmo_occ, & 177 nspins 178 TYPE(admm_type), POINTER :: admm_env 179 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global 180 TYPE(cp_fm_struct_type), POINTER :: fm_struct 181 TYPE(cp_para_env_type), POINTER :: para_env_global 182 TYPE(dft_control_type), POINTER :: dft_control 183 TYPE(mgrid_saved_parameters) :: mgrid_saved 184 TYPE(pw_env_type), POINTER :: pw_env_global 185 TYPE(qs_control_type), POINTER :: qs_control 186 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control 187 188 CALL timeset(routineN, handle) 189 190 nspins = SIZE(mos_occ) 191 192 CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, & 193 para_env=para_env_global, pw_env=pw_env_global) 194 195 tddfpt_control => dft_control%tddfpt2_control 196 qs_control => dft_control%qs_control 197 198 ! ++ split mpi communicator if 199 ! a) the requested number of processors per group > 0 200 ! (means that the split has been requested explicitly), and 201 ! b) the number of subgroups is >= 2 202 sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe 203 204 ALLOCATE (sub_env%mos_occ(nspins)) 205 NULLIFY (sub_env%admm_A) 206 207 IF (sub_env%is_split) THEN 208 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1)) 209 210 CALL mp_comm_split(comm=para_env_global%group, sub_comm=sub_env%mpi_comm, ngroups=sub_env%ngroups, & 211 group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs) 212 213 ! ++ create a new parallel environment based on the given sub-communicator) 214 NULLIFY (sub_env%blacs_env, sub_env%para_env) 215 CALL cp_para_env_create(sub_env%para_env, sub_env%mpi_comm) 216 217 ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations 218 ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'. 219 ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and 220 ! globenv%blacs_repeatable, however the global environment is not available 221 ! from the subroutine 'qs_energies_properties'. 222 CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env) 223 224 NULLIFY (fm_struct) 225 226 DO ispin = 1, nspins 227 NULLIFY (sub_env%mos_occ(ispin)%matrix) 228 CALL cp_fm_get_info(mos_occ(ispin)%matrix, nrow_global=nao, ncol_global=nmo_occ) 229 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env) 230 CALL cp_fm_create(sub_env%mos_occ(ispin)%matrix, fm_struct) 231 CALL cp_fm_struct_release(fm_struct) 232 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin)%matrix, & 233 fm_dest_sub=sub_env%mos_occ(ispin)%matrix, sub_env=sub_env) 234 END DO 235 236 IF (dft_control%do_admm) THEN 237 CALL get_qs_env(qs_env, admm_env=admm_env) 238 CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao) 239 CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env) 240 CALL cp_fm_create(sub_env%admm_A, fm_struct) 241 CALL cp_fm_struct_release(fm_struct) 242 CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env) 243 END IF 244 ELSE 245 CALL cp_para_env_retain(para_env_global) 246 sub_env%para_env => para_env_global 247 248 CALL cp_blacs_env_retain(blacs_env_global) 249 sub_env%blacs_env => blacs_env_global 250 251 DO ispin = 1, nspins 252 CALL cp_fm_retain(mos_occ(ispin)%matrix) 253 sub_env%mos_occ(ispin)%matrix => mos_occ(ispin)%matrix 254 END DO 255 256 IF (dft_control%do_admm) THEN 257 CALL get_qs_env(qs_env, admm_env=admm_env) 258 CALL cp_fm_retain(admm_env%A) 259 sub_env%admm_A => admm_env%A 260 END IF 261 END IF 262 263 IF (kernel == tddfpt_kernel_full) THEN 264 ! ++ allocate a new plane wave environment 265 sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit 266 267 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d) 268 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit) 269 NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit) 270 271 IF (sub_env%is_mgrid) THEN 272 IF (tddfpt_control%mgrid_is_explicit) & 273 CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved) 274 275 NULLIFY (sub_env%pw_env) 276 277 CALL pw_env_create(sub_env%pw_env) 278 CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env) 279 280 CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, & 281 blacs_env=sub_env%blacs_env, qs_env=qs_env) 282 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", & 283 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, & 284 skip_load_balance=qs_control%skip_load_balance_distributed, & 285 reorder_grid_ranks=.TRUE.) 286 287 IF (dft_control%do_admm) & 288 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, & 289 basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, & 290 pw_env=sub_env%pw_env, qs_env=qs_env, & 291 skip_load_balance=qs_control%skip_load_balance_distributed, & 292 reorder_grid_ranks=.FALSE.) 293 294 IF (tddfpt_control%mgrid_is_explicit) & 295 CALL restore_qs_mgrid(qs_control, mgrid_saved) 296 ELSE 297 CALL pw_env_retain(pw_env_global) 298 sub_env%pw_env => pw_env_global 299 300 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, & 301 sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb) 302 303 IF (dft_control%do_admm) & 304 CALL get_qs_env(qs_env, sab_aux_fit=sub_env%sab_aux_fit, task_list_aux_fit=sub_env%task_list_aux_fit) 305 END IF 306 307 ELSE IF (kernel == tddfpt_kernel_stda) THEN 308 sub_env%is_mgrid = .FALSE. 309 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d) 310 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit) 311 NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit) 312 NULLIFY (sub_env%pw_env) 313 IF (sub_env%is_split) THEN 314 CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, & 315 blacs_env=sub_env%blacs_env, qs_env=qs_env) 316 ! maybe we don't need task_list, just sab_orb 317 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", & 318 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, & 319 skip_load_balance=qs_control%skip_load_balance_distributed, & 320 reorder_grid_ranks=.TRUE.) 321 CPABORT('subsys missing') 322 ELSE 323 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb) 324 END IF 325 ELSE 326 CPABORT("Unknown kernel type") 327 END IF 328 329 CALL timestop(handle) 330 331 END SUBROUTINE tddfpt_sub_env_init 332 333! ************************************************************************************************** 334!> \brief Release parallel group environment 335!> \param sub_env parallel group environment (modified on exit) 336!> \par History 337!> * 01.2017 created [Sergey Chulkov] 338! ************************************************************************************************** 339 SUBROUTINE tddfpt_sub_env_release(sub_env) 340 TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env 341 342 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_release', & 343 routineP = moduleN//':'//routineN 344 345 INTEGER :: handle, i 346 347 CALL timeset(routineN, handle) 348 349 IF (sub_env%is_mgrid) THEN 350 IF (ASSOCIATED(sub_env%task_list_aux_fit)) & 351 CALL deallocate_task_list(sub_env%task_list_aux_fit) 352 353 IF (ASSOCIATED(sub_env%task_list_orb)) & 354 CALL deallocate_task_list(sub_env%task_list_orb) 355 356 CALL release_neighbor_list_sets(sub_env%sab_aux_fit) 357 CALL release_neighbor_list_sets(sub_env%sab_orb) 358 359 IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN 360 CALL dbcsr_distribution_release(sub_env%dbcsr_dist) 361 DEALLOCATE (sub_env%dbcsr_dist) 362 END IF 363 364 IF (ASSOCIATED(sub_env%dist_2d)) & 365 CALL distribution_2d_release(sub_env%dist_2d) 366 END IF 367 368 ! if TDDFPT-specific plane-wave environment has not been requested, 369 ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_* 370 ! point to the corresponding ground-state variables from qs_env 371 ! and should not be deallocated 372 373 CALL pw_env_release(sub_env%pw_env) 374 375 sub_env%is_mgrid = .FALSE. 376 377 IF (ASSOCIATED(sub_env%admm_A)) & 378 CALL cp_fm_release(sub_env%admm_A) 379 380 DO i = SIZE(sub_env%mos_occ), 1, -1 381 CALL cp_fm_release(sub_env%mos_occ(i)%matrix) 382 END DO 383 DEALLOCATE (sub_env%mos_occ) 384 385 CALL cp_blacs_env_release(sub_env%blacs_env) 386 CALL cp_para_env_release(sub_env%para_env) 387 388 ! do not need to call mp_comm_free() as the MPI sub-communicator sub_env%mpi_comm 389 ! has already been released by the subroutine cp_para_env_release() 390 IF (ALLOCATED(sub_env%group_distribution)) & 391 DEALLOCATE (sub_env%group_distribution) 392 393 sub_env%is_split = .FALSE. 394 395 CALL timestop(handle) 396 397 END SUBROUTINE tddfpt_sub_env_release 398 399! ************************************************************************************************** 400!> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the 401!> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved' 402!> variable. 403!> \param qs_control Quickstep control parameters (modified on exit) 404!> \param tddfpt_control TDDFPT control parameters 405!> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit) 406!> \par History 407!> * 09.2016 created [Sergey Chulkov] 408!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 409!> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section() 410! ************************************************************************************************** 411 SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved) 412 TYPE(qs_control_type), POINTER :: qs_control 413 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control 414 TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved 415 416 CHARACTER(LEN=*), PARAMETER :: routineN = 'init_tddfpt_mgrid', & 417 routineP = moduleN//':'//routineN 418 419 INTEGER :: handle, igrid, ngrids 420 421 CALL timeset(routineN, handle) 422 423 ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved' 424 mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids 425 mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids 426 mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed 427 mgrid_saved%cutoff = qs_control%cutoff 428 mgrid_saved%progression_factor = qs_control%progression_factor 429 mgrid_saved%relative_cutoff = qs_control%relative_cutoff 430 mgrid_saved%e_cutoff => qs_control%e_cutoff 431 432 ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids 433 qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids 434 qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids 435 qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance 436 qs_control%cutoff = tddfpt_control%mgrid_cutoff 437 qs_control%progression_factor = tddfpt_control%mgrid_progression_factor 438 qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff 439 440 ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids)) 441 ngrids = tddfpt_control%mgrid_ngrids 442 IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN 443 ! following read_mgrid_section() there is a magic scale factor there (0.5_dp) 444 DO igrid = 1, ngrids 445 qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp 446 END DO 447 ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value; 448 ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order 449 DO igrid = ngrids, 1, -1 450 IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN 451 qs_control%cutoff = qs_control%e_cutoff(igrid) 452 EXIT 453 END IF 454 END DO 455 ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value; 456 ! use the largest actual value 457 IF (igrid <= 0) & 458 qs_control%cutoff = qs_control%e_cutoff(1) 459 ELSE 460 qs_control%e_cutoff(1) = qs_control%cutoff 461 DO igrid = 2, ngrids 462 qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor 463 END DO 464 END IF 465 466 CALL timestop(handle) 467 END SUBROUTINE init_tddfpt_mgrid 468 469! ************************************************************************************************** 470!> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable. 471!> \param qs_control Quickstep control parameters (modified on exit) 472!> \param mgrid_saved structure that holds global MGRID-related parameters 473!> \par History 474!> * 09.2016 created [Sergey Chulkov] 475! ************************************************************************************************** 476 SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved) 477 TYPE(qs_control_type), POINTER :: qs_control 478 TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved 479 480 CHARACTER(LEN=*), PARAMETER :: routineN = 'restore_qs_mgrid', & 481 routineP = moduleN//':'//routineN 482 483 INTEGER :: handle 484 485 CALL timeset(routineN, handle) 486 487 IF (ASSOCIATED(qs_control%e_cutoff)) & 488 DEALLOCATE (qs_control%e_cutoff) 489 490 qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids 491 qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids 492 qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance 493 qs_control%cutoff = mgrid_saved%cutoff 494 qs_control%progression_factor = mgrid_saved%progression_factor 495 qs_control%relative_cutoff = mgrid_saved%relative_cutoff 496 qs_control%e_cutoff => mgrid_saved%e_cutoff 497 498 CALL timestop(handle) 499 END SUBROUTINE restore_qs_mgrid 500 501! ************************************************************************************************** 502!> \brief Distribute atoms across the two-dimensional grid of processors. 503!> \param distribution_2d new two-dimensional distribution of pairs of particles 504!> (allocated and initialised on exit) 505!> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit) 506!> \param blacs_env BLACS parallel environment 507!> \param qs_env Quickstep environment 508!> \par History 509!> * 09.2016 created [Sergey Chulkov] 510!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 511! ************************************************************************************************** 512 SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env) 513 TYPE(distribution_2d_type), POINTER :: distribution_2d 514 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist 515 TYPE(cp_blacs_env_type), POINTER :: blacs_env 516 TYPE(qs_environment_type), POINTER :: qs_env 517 518 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_distribution_2d', & 519 routineP = moduleN//':'//routineN 520 521 INTEGER :: handle 522 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set 523 TYPE(cell_type), POINTER :: cell 524 TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set 525 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set 526 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set 527 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set 528 TYPE(section_vals_type), POINTER :: input 529 530 CALL timeset(routineN, handle) 531 532 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, & 533 molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, & 534 particle_set=particle_set, qs_kind_set=qs_kind_set) 535 536 NULLIFY (distribution_2d) 537 CALL distribute_molecules_2d(cell=cell, & 538 atomic_kind_set=atomic_kind_set, & 539 particle_set=particle_set, & 540 qs_kind_set=qs_kind_set, & 541 molecule_kind_set=molecule_kind_set, & 542 molecule_set=molecule_set, & 543 distribution_2d=distribution_2d, & 544 blacs_env=blacs_env, & 545 force_env_section=input) 546 547 ALLOCATE (dbcsr_dist) 548 CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist) 549 550 CALL timestop(handle) 551 END SUBROUTINE tddfpt_build_distribution_2d 552 553! ************************************************************************************************** 554!> \brief Build task and neighbour lists for the given plane wave environment and basis set. 555!> \param task_list new task list (allocated and initialised on exit) 556!> \param sab new list of neighbours (allocated and initialised on exit) 557!> \param basis_type type of the basis set 558!> \param distribution_2d two-dimensional distribution of pairs of particles 559!> \param pw_env plane wave environment 560!> \param qs_env Quickstep environment 561!> \param skip_load_balance do not perform load balancing 562!> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor 563!> as well as grids 564!> \par History 565!> * 09.2016 created [Sergey Chulkov] 566!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 567! ************************************************************************************************** 568 SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, & 569 skip_load_balance, reorder_grid_ranks) 570 TYPE(task_list_type), POINTER :: task_list 571 TYPE(neighbor_list_set_p_type), DIMENSION(:), & 572 POINTER :: sab 573 CHARACTER(len=*), INTENT(in) :: basis_type 574 TYPE(distribution_2d_type), POINTER :: distribution_2d 575 TYPE(pw_env_type), POINTER :: pw_env 576 TYPE(qs_environment_type), POINTER :: qs_env 577 LOGICAL, INTENT(in) :: skip_load_balance, reorder_grid_ranks 578 579 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_tasklist', & 580 routineP = moduleN//':'//routineN 581 582 INTEGER :: handle, ikind, nkinds 583 LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present 584 REAL(kind=dp) :: subcells 585 REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius 586 REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius 587 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set 588 TYPE(cell_type), POINTER :: cell 589 TYPE(distribution_1d_type), POINTER :: local_particles 590 TYPE(gto_basis_set_type), POINTER :: orb_basis_set 591 TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d 592 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set 593 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set 594 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set 595 TYPE(qs_ks_env_type), POINTER :: ks_env 596 TYPE(section_vals_type), POINTER :: input 597 598 CALL timeset(routineN, handle) 599 600 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, & 601 ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, & 602 particle_set=particle_set, qs_kind_set=qs_kind_set) 603 604 nkinds = SIZE(atomic_kind_set) 605 606 ALLOCATE (atom2d(nkinds)) 607 CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, & 608 molecule_set, molecule_only=.FALSE., particle_set=particle_set) 609 610 ALLOCATE (orb_present(nkinds)) 611 ALLOCATE (orb_radius(nkinds)) 612 ALLOCATE (pair_radius(nkinds, nkinds)) 613 614 DO ikind = 1, nkinds 615 CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type) 616 IF (ASSOCIATED(orb_basis_set)) THEN 617 orb_present(ikind) = .TRUE. 618 CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind)) 619 ELSE 620 orb_present(ikind) = .FALSE. 621 orb_radius(ikind) = 0.0_dp 622 ENDIF 623 END DO 624 625 CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius) 626 627 NULLIFY (sab) 628 CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells) 629 CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, & 630 mic=.FALSE., subcells=subcells, molecular=.FALSE., nlname="sab_orb") 631 632 CALL atom2d_cleanup(atom2d) 633 DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius) 634 635 CALL allocate_task_list(task_list) 636 CALL generate_qs_task_list(ks_env, task_list, & 637 reorder_rs_grid_ranks=reorder_grid_ranks, soft_valid=.FALSE., & 638 basis_type=basis_type, skip_load_balance_distributed=skip_load_balance, & 639 pw_env_external=pw_env, sab_orb_external=sab) 640 641 CALL timestop(handle) 642 END SUBROUTINE tddfpt_build_tasklist 643 644! ************************************************************************************************** 645!> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of 646!> neighbours. 647!> \param matrix matrix to create 648!> \param template template matrix 649!> \param dbcsr_dist DBCSR distribution 650!> \param sab list of neighbours 651!> \par History 652!> * 09.2016 created [Sergey Chulkov] 653!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 654! ************************************************************************************************** 655 SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab) 656 TYPE(dbcsr_type), POINTER :: matrix, template 657 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist 658 TYPE(neighbor_list_set_p_type), DIMENSION(:), & 659 POINTER :: sab 660 661 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_dbcsr_create_by_dist', & 662 routineP = moduleN//':'//routineN 663 664 CHARACTER :: matrix_type 665 CHARACTER(len=default_string_length) :: matrix_name 666 INTEGER :: handle 667 INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes 668 669 CALL timeset(routineN, handle) 670 671 CPASSERT(ASSOCIATED(template)) 672 CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, & 673 name=matrix_name, matrix_type=matrix_type) 674 675 IF (ASSOCIATED(matrix)) THEN 676 CALL dbcsr_release(matrix) 677 ELSE 678 ALLOCATE (matrix) 679 END IF 680 681 CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes, nze=0) 682 CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab) 683 684 CALL timestop(handle) 685 686 END SUBROUTINE tddfpt_dbcsr_create_by_dist 687 688! ************************************************************************************************** 689!> \brief Replicate a globally distributed matrix across all sub-groups. At the end 690!> every sub-group will hold a local copy of the original globally distributed matrix. 691!> 692!> |--------------------| 693!> fm_src | 0 1 2 3 | 694!> |--------------------| 695!> / MPI ranks \ 696!> |/_ _\| 697!> |--------------------| |--------------------| 698!> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1 699!> |--------------------| |--------------------| 700!> subgroup 0 subgroup 1 701!> 702!> \param fm_src globally distributed matrix to replicate 703!> \param fm_dest_sub subgroup-specific copy of the replicated matrix 704!> \param sub_env subgroup environment 705!> \par History 706!> * 09.2016 created [Sergey Chulkov] 707!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov] 708! ************************************************************************************************** 709 SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env) 710 TYPE(cp_fm_type), POINTER :: fm_src, fm_dest_sub 711 TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env 712 713 CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_fm_replicate_across_subgroups', & 714 routineP = moduleN//':'//routineN 715 716 INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, & 717 nrows_global_dest, nrows_global_src 718 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global 719 TYPE(cp_fm_type), POINTER :: fm_null 720 TYPE(cp_para_env_type), POINTER :: para_env_global 721 722 IF (sub_env%is_split) THEN 723 CALL timeset(routineN, handle) 724 725 CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, & 726 context=blacs_env_global, para_env=para_env_global) 727 CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest) 728 729 IF (debug_this_module) THEN 730 CPASSERT(nrows_global_src == nrows_global_dest) 731 CPASSERT(ncols_global_src == ncols_global_dest) 732 END IF 733 734 NULLIFY (fm_null) 735 igroup_local = sub_env%group_distribution(para_env_global%mepos) 736 ngroups = sub_env%ngroups 737 738 DO igroup = 0, ngroups - 1 739 IF (igroup == igroup_local) THEN 740 CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global) 741 ELSE 742 CALL cp_fm_copy_general(fm_src, fm_null, para_env_global) 743 END IF 744 END DO 745 746 CALL timestop(handle) 747 END IF 748 END SUBROUTINE tddfpt_fm_replicate_across_subgroups 749END MODULE qs_tddfpt2_subgroups 750 751