1!
2!  Dalton, a molecular electronic structure program
3!  Copyright (C) by the authors of Dalton.
4!
5!  This program is free software; you can redistribute it and/or
6!  modify it under the terms of the GNU Lesser General Public
7!  License version 2.1 as published by the Free Software Foundation.
8!
9!  This program is distributed in the hope that it will be useful,
10!  but WITHOUT ANY WARRANTY; without even the implied warranty of
11!  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12!  Lesser General Public License for more details.
13!
14!  If a copy of the GNU LGPL v2.1 was not distributed with this
15!  code, you can obtain one at https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html.
16!
17!
18!...  This file contains most host program specific subroutines for Gen1Int interface.
19!
20!...  2013-05-02, Bin Gao
21!...  * fix the bug of returning wrong partial geometric derivatives
22!
23!...  2012-05-09, Bin Gao
24!...  * first version
25
26#include "gen1int_host.h"
27
28  ! public subroutines
29
30  !> \brief initializes Gen1Int interface, for instance, creates the AO sub-shells
31  !>        of host program (based on \fn(ORBPRO) subroutine by getting the unnormalized
32  !>        contraction coefficients); should be called before any calculation
33  !> \author Bin Gao
34  !> \date 2010-12-06
35  !> \param num_comp is the number of components
36  !> \param num_atom_type is the number of atomic types
37  !> \param num_sym_atom contains the number of symmetry independent centers of atomic types
38  !> \param ang_numbers contains the angular momentum (1=s, 2=p, 3=d, ...)
39  !> \param num_cgto contains the number of CGTOs in the AO blocks for an angular momentum
40  !> \param num_prim contains the number of uncontracted functions
41  !> \param num_contr contains the number of contracted functions
42  !> \param exponents contains the exponents of primitive shells
43  !> \param ucontr_coefs contains the unnormalized contraction coefficients
44  subroutine gen1int_host_init(num_comp, num_atom_type, KATOM, num_sym_atom, &
45                               ang_numbers, NBLCK, KANG, num_cgto, KBLOCK,   &
46                               num_prim, num_contr, KPRIM, exponents,        &
47                               ucontr_coefs)
48    use gen1int_api
49    implicit none
50    integer, intent(in) :: num_comp
51    integer, intent(in) :: num_atom_type
52    integer, intent(in) :: KATOM
53    integer, intent(in) :: num_sym_atom(KATOM)
54    integer, intent(in) :: ang_numbers(KATOM,num_comp)
55    integer, intent(in) :: NBLCK(KATOM,num_comp)
56    integer, intent(in) :: KANG
57    integer, intent(in) :: num_cgto(KANG,KATOM,num_comp)
58    integer, intent(in) :: KBLOCK
59    integer, intent(in) :: num_prim(KBLOCK,num_comp)
60    integer, intent(in) :: num_contr(KBLOCK,num_comp)
61    integer, intent(in) :: KPRIM
62    real(REALK), intent(in) :: exponents(KPRIM,KBLOCK,num_comp)
63    real(REALK), intent(in) :: ucontr_coefs(KPRIM,KPRIM,KBLOCK,num_comp)
64    logical :: mpi_sync = .true.
65#if defined(VAR_MPI)
66#include "mpif.h"
67#include "iprtyp.h"
68    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
69    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
70    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
71#endif
72    ! in case of initializing the interface multiple times
73    if (Gen1IntAPIInited()) mpi_sync = .false.
74    ! initializes API of Gen1Int
75    call Gen1IntAPICreate(num_comp, num_atom_type, KATOM, num_sym_atom, &
76                          ang_numbers, NBLCK, KANG, num_cgto, KBLOCK,   &
77                          num_prim, num_contr, KPRIM, exponents, ucontr_coefs)
78#if defined(VAR_MPI)
79    if (mpi_sync) then
80      ! wakes up workers
81      count_mpi = 1
82      call MPI_Bcast(GEN1INT_INIT, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
83      ! broadcasts level of print
84      call MPI_Bcast(0, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
85      ! broadcasts information of API of Gen1Int
86      call Gen1IntAPIBcast(manager_mpi, my_MPI_COMM_WORLD)
87    end if
88#endif
89    return
90  end subroutine gen1int_host_init
91
92#if defined(VAR_MPI)
93  !> \brief initializes Gen1Int interface, for instance, creates the AO sub-shells
94  !>        of host program (based on \fn(ORBPRO) subroutine by getting the unnormalized
95  !>        contraction coefficients); should be called before any calculation
96  !> \author Bin Gao
97  !> \date 2012-05-13
98  subroutine gen1int_worker_init
99    use gen1int_api
100    implicit none
101#include "mpif.h"
102    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
103    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
104    ! initializes API of Gen1Int by information from manager processor
105    call Gen1IntAPIBcast(manager_mpi, my_MPI_COMM_WORLD)
106    return
107  end subroutine gen1int_worker_init
108#endif
109
110  !> \brief evaluates the one-electron property integral matrices on manager processor
111  !> \author Bin Gao
112  !> \date 2012-05-09
113  !> \param gto_type specifies the type of GTOs, should be either NON_LAO (non London atomic
114  !>        orbital), LONDON (London atomic orbital, LAO), or ROT_LAO (rotational LAO), only
115  !>        NON_LAO implemented
116  !> \param prop_name is the name of property integrals
117  !> \param order_mom is the order of multipole moments
118  !> \param order_mag_bra is the order of partial magnetic derivatives on bra center, not implemented
119  !> \param order_mag_ket is the order of partial magnetic derivatives on ket center, not implemented
120  !> \param order_mag_total is the order of total magnetic derivatives, not implemented
121  !> \param order_ram_bra is the order of partial derivatives w.r.t. the total rotational
122  !>        angular momentum on bra center, not implemented
123  !> \param order_ram_ket is the order of partial derivatives w.r.t. the total rotational
124  !>        angular momentum on ket center, not implemented
125  !> \param order_ram_total is the order of total derivatives w.r.t. the total rotational
126  !>        angular momentum, not implemented
127  !> \param max_ncent_bra is the maximum number of differentiated centers on bra center
128  !> \param order_geo_bra is the order of partial geometric derivatives on bra center
129  !> \param num_atoms_bra is the number of selected atoms for differentiated centers
130  !>        on bra center, <1 means using all atoms
131  !> \param idx_atoms_bra contains the indices of the selected atoms on bra center,
132  !>        will not be used if \var(num_atoms_bra) is <1
133  !> \param max_ncent_ket is the maximum number of differentiated centers on ket center
134  !> \param order_geo_ket is the order of partial geometric derivatives on ket center
135  !> \param num_atoms_ket is the number of selected atoms for differentiated centers
136  !>        on ket center, <1 means using all atoms
137  !> \param idx_atoms_ket contains the indices of the selected atoms on ket center,
138  !>        will not be used if \var(num_atoms_ket) is <1
139  !> \param max_num_cent is the maximum number of differentiated centers for total
140  !>        geometric derivatives
141  !> \param order_geo_total is the order of total geometric derivatives
142  !> \param num_geo_atoms is the number of selected atoms chosen as the differentiated
143  !>        centers, <1 means using all atoms
144  !> \param idx_geo_atoms contains the indices of the selected atoms, will not be used
145  !>        if \var(num_geo_atoms) is <1
146  !> \param add_sr is for scalar-relativistic (SR) correction, not implemented
147  !> \param add_so is for spin-orbit (SO) correction, not implemented
148  !> \param add_london transforms the operator by the LAO type gauge-including projector, not implemented
149  !> \param num_ints is the number of property integral matrices including various derivatives
150  !> \param write_ints indicates if writing integral matrices on file
151  !> \param io_viewer is the logical unit number of the viewer
152  !> \param level_print is the level of print
153  !> \return val_ints contains the property integral matrices
154  !> \note the arrangement of var(val_ints) will be in the order of \var(order_mom),
155  !>       \var(order_mag_bra), ..., \var(order_geo_total), and each of them is arranged
156  !>       in the order of (xx,xy,yy,xz,yz,zz) or (xx,yx,zx,xy,yy,zy,xz,yz,zz), see
157  !>       Gen1Int library manual, for instance Section 2.2.
158  !>       Moreover, the "triangular" total geometric derivatives could be obtained
159  !>       from the unique total geometric derivatives by giving \var(max_num_cent)=\var(order_geo_total)
160  subroutine gen1int_host_get_int(gto_type, prop_name, order_mom, &
161                                  order_elec,                     &
162                                  order_mag_bra, order_mag_ket,   &
163                                  order_mag_total,                &
164                                  order_ram_bra, order_ram_ket,   &
165                                  order_ram_total,                &
166                                  max_ncent_bra, order_geo_bra,   &
167                                  num_atoms_bra, idx_atoms_bra,   &
168                                  max_ncent_ket, order_geo_ket,   &
169                                  num_atoms_ket, idx_atoms_ket,   &
170                                  max_num_cent, order_geo_total,  &
171                                  num_geo_atoms, idx_geo_atoms,   &
172                                  add_sr, add_so, add_london,     &
173                                  num_ints, val_ints, write_ints, &
174                                  nr_active_blocks,               &
175                                  active_component_pairs,         &
176                                  io_viewer, level_print)
177    use gen1int_api
178    implicit none
179    integer,       intent(in)    :: gto_type
180    character*(*), intent(in)    :: prop_name
181    integer,       intent(in)    :: order_mom
182    integer,       intent(in)    :: order_elec
183    integer,       intent(in)    :: order_mag_bra
184    integer,       intent(in)    :: order_mag_ket
185    integer,       intent(in)    :: order_mag_total
186    integer,       intent(in)    :: order_ram_bra
187    integer,       intent(in)    :: order_ram_ket
188    integer,       intent(in)    :: order_ram_total
189    integer,       intent(in)    :: max_ncent_bra
190    integer,       intent(in)    :: order_geo_bra
191    integer,       intent(in)    :: num_atoms_bra
192    integer,       intent(in)    :: idx_atoms_bra(*)
193    integer,       intent(in)    :: max_ncent_ket
194    integer,       intent(in)    :: order_geo_ket
195    integer,       intent(in)    :: num_atoms_ket
196    integer,       intent(in)    :: idx_atoms_ket(*)
197    integer,       intent(in)    :: max_num_cent
198    integer,       intent(in)    :: order_geo_total
199    integer,       intent(in)    :: num_geo_atoms
200    integer,       intent(in)    :: idx_geo_atoms(*)
201    logical,       intent(in)    :: add_sr
202    logical,       intent(in)    :: add_so
203    logical,       intent(in)    :: add_london
204    integer,       intent(in)    :: num_ints
205    type(matrix),  intent(inout) :: val_ints(*)
206    logical,       intent(in)    :: write_ints
207    integer,       intent(in)    :: nr_active_blocks
208    integer,       intent(in)    :: active_component_pairs(*)
209    integer,       intent(in)    :: io_viewer
210    integer,       intent(in)    :: level_print
211
212    real(REALK) start_time             !start time
213    type(prop_comp_t) prop_comp        !operator of property integrals with non-zero components
214    type(nary_tree_t) nary_tree_bra    !N-ary tree for partial geometric derivatives on bra center
215    type(nary_tree_t) nary_tree_ket    !N-ary tree for partial geometric derivatives on ket center
216    type(nary_tree_t) nary_tree_total  !N-ary tree for total geometric derivatives
217#if defined(VAR_MPI)
218#include "mpif.h"
219#include "iprtyp.h"
220    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
221    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
222    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
223#endif
224    ! gets the start time
225    call xtimer_set(start_time)
226#if defined(VAR_MPI)
227    count_mpi = 1
228    ! wakes up workers
229    call MPI_Bcast(GEN1INT_GET_INT, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
230    ! broadcasts level of print
231    call MPI_Bcast(level_print,     count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
232    ! broadcasts number of property integral matrices including various derivatives
233    call MPI_Bcast(num_ints,        count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
234#endif
235    ! creates the operator of property integrals and N-ary tree for total
236    ! geometric derivatives on manager processor, and broadcasts other input
237    ! arguments to worker processors
238    call gen1int_host_prop_create(gto_type, prop_name, order_mom, &
239                                  order_elec,                     &
240                                  order_mag_bra, order_mag_ket,   &
241                                  order_mag_total,                &
242                                  order_ram_bra, order_ram_ket,   &
243                                  order_ram_total,                &
244                                  add_sr, add_so, add_london,     &
245                                  io_viewer, level_print,         &
246                                  nr_active_blocks,               &
247                                  active_component_pairs,         &
248                                  prop_comp)
249    call gen1int_host_geom_create(max_ncent_bra, order_geo_bra,   &
250                                  num_atoms_bra, idx_atoms_bra,   &
251                                  max_ncent_ket, order_geo_ket,   &
252                                  num_atoms_ket, idx_atoms_ket,   &
253                                  max_num_cent, order_geo_total,  &
254                                  num_geo_atoms, idx_geo_atoms,   &
255                                  io_viewer, level_print,         &
256                                  nary_tree_bra, nary_tree_ket, nary_tree_total)
257    ! performs calculations
258    call Gen1IntAPIPropGetIntExpt(prop_comp=prop_comp,             &
259                                  nary_tree_bra=nary_tree_bra,     &
260                                  nary_tree_ket=nary_tree_ket,     &
261                                  nary_tree_total=nary_tree_total, &
262#if defined(VAR_MPI)
263                                  api_comm_mpi=my_MPI_COMM_WORLD,  &
264#endif
265                                  num_ints=num_ints,               &
266                                  val_ints=val_ints,               &
267                                  write_ints=write_ints,           &
268                                  num_dens=0,                      &
269                                  io_viewer=io_viewer,             &
270                                  level_print=level_print)
271    ! free spaces
272    call Gen1IntAPIPropDestroy(prop_comp=prop_comp)
273    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_bra)
274    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_ket)
275    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_total)
276#if defined(VAR_MPI)
277    ! blocks until all processors have finished
278    call MPI_Barrier(my_MPI_COMM_WORLD, ierr_mpi)
279#endif
280    ! prints the CPU elapsed time
281    call xtimer_view(start_time, trim(prop_name)//"@gen1int_host_get_int", io_viewer)
282    return
283  end subroutine gen1int_host_get_int
284
285#if defined(VAR_MPI)
286  !> \brief evaluates the one-electron property integral matrices on worker processors
287  !> \author Bin Gao
288  !> \date 2012-05-09
289  !> \param len_work is the length of Dalton/Dirac workspace
290  !> \param wrk_space is the Dalton/Dirac workspace
291  !> \param io_viewer is the logical unit number of the viewer
292  !> \param level_print is the level of print
293  subroutine gen1int_worker_get_int(len_work, wrk_space, io_viewer, level_print)
294    use gen1int_api
295    implicit none
296    integer, intent(in) :: len_work
297    real(REALK), intent(inout) :: wrk_space(len_work)
298    integer, intent(in) :: io_viewer
299    integer, intent(in) :: level_print
300    integer order_geo_bra              !order of geometric derivatives on bra center
301    integer order_geo_ket              !order of geometric derivatives on ket center
302    integer order_geo_total            !order of total geometric derivatives
303    integer num_ints                   !number of property integral matrices including various derivatives
304    type(prop_comp_t) prop_comp        !operator of property integrals with non-zero components
305    type(nary_tree_t) nary_tree_bra    !N-ary tree for partial geometric derivatives on bra center
306    type(nary_tree_t) nary_tree_ket    !N-ary tree for partial geometric derivatives on ket center
307    type(nary_tree_t) nary_tree_total  !N-ary tree for total geometric derivatives
308#include "mpif.h"
309    integer(kind=MPI_INTEGER_KIND) :: count_mpi
310    integer(kind=MPI_INTEGER_KIND) :: ierr_mpi !MPI error information
311    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
312    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
313    count_mpi   = 1
314    ! gets the number of property integral matrices including various derivatives
315    call MPI_Bcast(num_ints, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
316    ! creates the operator of property integrals and N-ary trees for geometric
317    ! derivatives on worker processors by the arguments from manager
318    call gen1int_worker_prop_create(io_viewer, level_print, prop_comp)
319    call gen1int_worker_geom_create(io_viewer, level_print, nary_tree_bra, nary_tree_ket, nary_tree_total)
320    ! performs calculations
321    call Gen1IntAPIPropGetIntExpt(prop_comp=prop_comp,             &
322                                  nary_tree_bra=nary_tree_bra,     &
323                                  nary_tree_ket=nary_tree_ket,     &
324                                  nary_tree_total=nary_tree_total, &
325                                  api_comm_mpi=my_MPI_COMM_WORLD,  &
326                                  num_ints=num_ints,               &
327                                  num_dens=0,                      &
328                                  io_viewer=io_viewer,             &
329                                  level_print=level_print)
330    ! free spaces
331    call Gen1IntAPIPropDestroy(prop_comp=prop_comp)
332    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_bra)
333    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_ket)
334    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_total)
335    ! blocks until all processors have finished
336    call MPI_Barrier(my_MPI_COMM_WORLD, ierr_mpi)
337    return
338  end subroutine gen1int_worker_get_int
339#endif
340
341  !> \brief evaluates the one-electron property expectation values on manager processor
342  !> \author Bin Gao
343  !> \date 2012-05-09
344  !> \param num_dens is the number of AO density matrices
345  !> \param ao_dens contains the AO density matrices
346  !> \param write_expt indicates if writing expectation values on file
347  !> \param io_viewer is the logical unit number of the viewer
348  !> \param level_print is the level of print
349  !> \return val_expt contains the expectation values
350  !> \note please see the comments of \fn(gen1int_host_get_int) of other arguments,
351  !>       \var(val_expt) should be zero by users before calculations
352  subroutine gen1int_host_get_expt(gto_type, prop_name, order_mom, &
353                                   order_elec,                     &
354                                   order_mag_bra, order_mag_ket,   &
355                                   order_mag_total,                &
356                                   order_ram_bra, order_ram_ket,   &
357                                   order_ram_total,                &
358                                   max_ncent_bra, order_geo_bra,   &
359                                   num_atoms_bra, idx_atoms_bra,   &
360                                   max_ncent_ket, order_geo_ket,   &
361                                   num_atoms_ket, idx_atoms_ket,   &
362                                   max_num_cent, order_geo_total,  &
363                                   num_geo_atoms, idx_geo_atoms,   &
364                                   add_sr, add_so, add_london,     &
365                                   num_dens, ao_dens,              &
366                                   num_ints, val_expt, write_expt, &
367                                   nr_active_blocks,               &
368                                   active_component_pairs,         &
369                                   io_viewer, level_print)
370    use gen1int_api
371    implicit none
372    integer,       intent(in)    :: gto_type
373    character*(*), intent(in)    :: prop_name
374    integer,       intent(in)    :: order_mom
375    integer,       intent(in)    :: order_elec
376    integer,       intent(in)    :: order_mag_bra
377    integer,       intent(in)    :: order_mag_ket
378    integer,       intent(in)    :: order_mag_total
379    integer,       intent(in)    :: order_ram_bra
380    integer,       intent(in)    :: order_ram_ket
381    integer,       intent(in)    :: order_ram_total
382    integer,       intent(in)    :: max_ncent_bra
383    integer,       intent(in)    :: order_geo_bra
384    integer,       intent(in)    :: num_atoms_bra
385    integer,       intent(in)    :: idx_atoms_bra(*)
386    integer,       intent(in)    :: max_ncent_ket
387    integer,       intent(in)    :: order_geo_ket
388    integer,       intent(in)    :: num_atoms_ket
389    integer,       intent(in)    :: idx_atoms_ket(*)
390    integer,       intent(in)    :: max_num_cent
391    integer,       intent(in)    :: order_geo_total
392    integer,       intent(in)    :: num_geo_atoms
393    integer,       intent(in)    :: idx_geo_atoms(*)
394    logical,       intent(in)    :: add_sr
395    logical,       intent(in)    :: add_so
396    logical,       intent(in)    :: add_london
397    integer,       intent(in)    :: num_dens
398    type(matrix),  intent(inout) :: ao_dens(num_dens)
399    integer,       intent(in)    :: num_ints
400    real(REALK),   intent(inout) :: val_expt(num_ints*num_dens)
401    logical,       intent(in)    :: write_expt
402    integer,       intent(in)    :: nr_active_blocks
403    integer,       intent(in)    :: active_component_pairs(*)
404    integer,       intent(in)    :: io_viewer
405    integer,       intent(in)    :: level_print
406
407    real(REALK) start_time             !start time
408    type(prop_comp_t) prop_comp        !operator of property integrals with non-zero components
409    type(nary_tree_t) nary_tree_bra    !N-ary tree for partial geometric derivatives on bra center
410    type(nary_tree_t) nary_tree_ket    !N-ary tree for partial geometric derivatives on ket center
411    type(nary_tree_t) nary_tree_total  !N-ary tree for total geometric derivatives
412    integer idens                      !incremental recorder over AO density matrices
413#if defined(VAR_MPI)
414#include "mpif.h"
415#include "iprtyp.h"
416    integer(kind=MPI_INTEGER_KIND) :: count_mpi
417    integer(kind=MPI_INTEGER_KIND) :: ierr_mpi !MPI error information
418    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
419    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
420#endif
421    ! gets the start time
422    call xtimer_set(start_time)
423#if defined(VAR_MPI)
424    count_mpi   = 1
425    ! wakes up workers
426    call MPI_Bcast(GEN1INT_GET_EXPT, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
427    ! broadcasts level of print
428    call MPI_Bcast(level_print, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
429    ! broadcasts the number of AO density matrices
430    call MPI_Bcast(num_dens, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
431    ! broadcasts AO density matrices
432    do idens = 1, num_dens
433      call MatBcast(ao_dens(idens), manager_mpi, my_MPI_COMM_WORLD)
434    end do
435    ! broadcasts the number of property integral matrices including various derivatives
436    call MPI_Bcast(num_ints, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
437#endif
438    ! creates the operator of property integrals and N-ary tree for total
439    ! geometric derivatives on manager processor, and broadcasts other input
440    ! arguments to worker processors
441    call gen1int_host_prop_create(gto_type, prop_name, order_mom, &
442                                  order_elec,                     &
443                                  order_mag_bra, order_mag_ket,   &
444                                  order_mag_total,                &
445                                  order_ram_bra, order_ram_ket,   &
446                                  order_ram_total,                &
447                                  add_sr, add_so, add_london,     &
448                                  io_viewer, level_print,         &
449                                  nr_active_blocks,               &
450                                  active_component_pairs,         &
451                                  prop_comp)
452    call gen1int_host_geom_create(max_ncent_bra, order_geo_bra,   &
453                                  num_atoms_bra, idx_atoms_bra,   &
454                                  max_ncent_ket, order_geo_ket,   &
455                                  num_atoms_ket, idx_atoms_ket,   &
456                                  max_num_cent, order_geo_total,  &
457                                  num_geo_atoms, idx_geo_atoms,   &
458                                  io_viewer, level_print,         &
459                                  nary_tree_bra, nary_tree_ket, nary_tree_total)
460    ! performs calculations
461    call Gen1IntAPIPropGetIntExpt(prop_comp=prop_comp,             &
462                                  nary_tree_bra=nary_tree_bra,     &
463                                  nary_tree_ket=nary_tree_ket,     &
464                                  nary_tree_total=nary_tree_total, &
465#if defined(VAR_MPI)
466                                  api_comm_mpi=my_MPI_COMM_WORLD,  &
467#endif
468                                  num_dens=num_dens,               &
469                                  ao_dens=ao_dens,                 &
470                                  num_ints=num_ints,               &
471                                  val_expt=val_expt,               &
472                                  write_expt=write_expt,           &
473                                  io_viewer=io_viewer,             &
474                                  level_print=level_print)
475    ! free spaces
476    call Gen1IntAPIPropDestroy(prop_comp=prop_comp)
477    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_bra)
478    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_ket)
479    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_total)
480#if defined(VAR_MPI)
481    ! blocks until all processors have finished
482    call MPI_Barrier(my_MPI_COMM_WORLD, ierr_mpi)
483#endif
484    ! prints the CPU elapsed time
485    call xtimer_view(start_time, trim(prop_name)//"@gen1int_host_get_expt", io_viewer)
486    return
487  end subroutine gen1int_host_get_expt
488
489#if defined(VAR_MPI)
490  !> \brief evaluates the one-electron property expectation values on worker processors
491  !> \author Bin Gao
492  !> \date 2012-05-09
493  !> \param len_work is the length of Dalton/Dirac workspace
494  !> \param wrk_space is the Dalton/Dirac workspace
495  !> \param io_viewer is the logical unit number of the viewer
496  !> \param level_print is the level of print
497  subroutine gen1int_worker_get_expt(len_work, wrk_space, io_viewer, level_print)
498    use gen1int_api
499    implicit none
500    integer, intent(in) :: len_work
501    real(REALK), intent(inout) :: wrk_space(len_work)
502    integer, intent(in) :: io_viewer
503    integer, intent(in) :: level_print
504    integer order_geo_total                  !order of total geometric derivatives
505    integer num_ints                         !number of property integral matrices including various derivatives
506    integer num_dens                         !number of AO density matrices
507    type(matrix), allocatable :: ao_dens(:)  !AO density matrices
508    integer idens                            !incremental recorder over AO density matrices
509    type(prop_comp_t) prop_comp              !operator of property integrals with non-zero components
510    type(nary_tree_t) nary_tree_bra          !N-ary tree for partial geometric derivatives on bra center
511    type(nary_tree_t) nary_tree_ket          !N-ary tree for partial geometric derivatives on ket center
512    type(nary_tree_t) nary_tree_total        !N-ary tree for total geometric derivatives
513#include "mpif.h"
514    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
515    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
516    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
517    integer                        :: ierr
518    count_mpi = 1
519    ! gets the number of AO density matrices
520    call MPI_Bcast(num_dens, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
521    ! allocates memory for AO density matrices
522    allocate(ao_dens(num_dens), stat=ierr)
523    if (ierr/=0) then
524      call quit("gen1int_worker_get_expt>> failed to allocate ao_dens!")
525    end if
526    ! gets AO density matrices
527    do idens = 1, num_dens
528      call MatBcast(ao_dens(idens), manager_mpi, my_MPI_COMM_WORLD)
529    end do
530    ! gets the number of property integral matrices including various derivatives
531    call MPI_Bcast(num_ints, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
532    ! creates the operator of property integrals and N-ary trees for geometric
533    ! derivatives on worker processors by the arguments from manager
534    call gen1int_worker_prop_create(io_viewer, level_print, prop_comp)
535    call gen1int_worker_geom_create(io_viewer, level_print, nary_tree_bra, nary_tree_ket, nary_tree_total)
536    ! performs calculations
537    call Gen1IntAPIPropGetIntExpt(prop_comp=prop_comp,             &
538                                  nary_tree_bra=nary_tree_bra,     &
539                                  nary_tree_ket=nary_tree_ket,     &
540                                  nary_tree_total=nary_tree_total, &
541                                  api_comm_mpi=my_MPI_COMM_WORLD,  &
542                                  num_dens=num_dens,               &
543                                  ao_dens=ao_dens,                 &
544                                  num_ints=num_ints,               &
545                                  io_viewer=io_viewer,             &
546                                  level_print=level_print)
547    ! free spaces
548    call Gen1IntAPIPropDestroy(prop_comp=prop_comp)
549    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_bra)
550    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_ket)
551    call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_total)
552    do idens = 1, num_dens
553      call MatDestroy(A=ao_dens(idens))
554    end do
555    deallocate(ao_dens)
556    ! blocks until all processors have finished
557    call MPI_Barrier(my_MPI_COMM_WORLD, ierr_mpi)
558    return
559  end subroutine gen1int_worker_get_expt
560#endif
561
562  !> \brief reads the information of cube file and initializes the information
563  !>        for cube files; the following keywords should be added into **WAVE FUNCTIONS
564  !>        in DALTON.INP/DIRAC.INP:
565  !>        *CUBE
566  !>        .DENSITY
567  !>        .HOMO
568  !>        .LUMO
569  !>        .MO
570  !>        1,5-9,12
571  !>        .FORMAT
572  !>        GAUSSIAN
573  !>        .ORIGIN
574  !>        -2.0  -4.0   -5.0
575  !>        .INCREMENT
576  !>        100    0.0    0.0    0.1  #of increments in the slowest running direction
577  !>         80    0.0    0.1    0.0
578  !>         40    0.1    0.0    0.0  #of increments in the fastest running directio
579  !>        where .DENSITY, .HOMO, .LUMO, and .MO (followed by the indices of molecular orbtials)
580  !>        are not all necessarily needed
581  !> \author Bin Gao
582  !> \date 2012-03-09
583  !> \param io_input is the logical unit number of standard input
584  !> \param io_viewer is the logical unit number of the viewer
585  !> \param word is the keyword from input file
586  subroutine gen1int_host_cube_init(io_input, io_viewer, word)
587    ! module of generating cube files
588    use gen1int_cube
589    ! to decode the string of MOs (in Gen1Int library)
590    use str_decode
591    implicit none
592    integer, intent(in) :: io_input
593    integer, intent(in) :: io_viewer
594    character*(*), intent(inout) :: word
595! uses MXCORB
596#include "maxorb.h"
597! uses DO_CUBE
598#include "infinp.h"
599    character(MAX_LEN_STR) key_word  !key words read from standard input
600    type(decode_str_t) str_idx_mo    !string of indices of MOs
601    integer ixyz                     !incremental recorder along XYZ directions
602    integer ierr                     !error information
603    ! reads in the first line after *CUBE
604    read(io_input,"(A)",err=999,end=999) key_word
605    do while(key_word(1:1)/="*")
606      select case(trim(key_word))
607      ! electron density
608      case(".DENSITY")
609        do_density_cube = .true.
610      ! HOMO
611      case(".HOMO")
612        do_homo_cube = .true.
613      ! LUMO
614      case(".LUMO")
615        do_lumo_cube = .true.
616      ! MOs
617      case(".MO")
618        read(io_input,"(A)",err=999,end=999) key_word
619        call StrDecodeCreate(the_str=trim(key_word), &
620                             conn_char="-",          &
621                             sep_char=",",           &
622                             num_ints=num_cube_mo,   &
623                             decode_str=str_idx_mo)
624        allocate(idx_cube_mo(num_cube_mo), stat=ierr)
625        if (ierr/=0) then
626          call quit("gen1int_host_cube_init>> failed to allocate idx_cube_mo!")
627        end if
628        call StrDecodeGetInts(decode_str=str_idx_mo, &
629                              num_ints=num_cube_mo,  &
630                              convert_ints=idx_cube_mo)
631        call StrDecodeDestroy(decode_str=str_idx_mo)
632        do_mo_cube = .true.
633      ! format of cube file
634      case(".FORMAT")
635        read(io_input,"(A)",err=999,end=999) cube_format
636      ! origin
637      case(".ORIGIN")
638        read(io_input,*,err=999,end=999) cube_origin
639      ! increments
640      case(".INCREMENT")
641        do ixyz = 1, 3
642          ! cube_increment(:,1) is the increment for X
643          ! cube_increment(:,2) is the increment for Y
644          ! cube_increment(:,3) is the increment for Z
645          ! reads N, X, Y, Z
646          read(io_input,*,err=999,end=999) cube_num_inc(ixyz), cube_increment(ixyz,:)
647        end do
648      ! comments or illegal keyword
649      case default
650        if (key_word(1:1)/="#" .and. key_word(1:1)/="!") then
651          write(io_viewer,100) "keyword """//trim(key_word)// &
652                               """ is not recognized in *CUBE!"
653          call quit('unknown keyword in *CUBE')
654        end if
655      end select
656      ! reads next line
657      read(io_input,"(A)",err=999,end=999) key_word
658    end do
659    ! writes input information to check
660    if (do_density_cube) &
661      write(io_viewer,100) "generates cube file of electron density"
662    if (do_homo_cube) &
663      write(io_viewer,100) "generates cube file of HOMO"
664    if (do_lumo_cube) &
665      write(io_viewer,100) "generates cube file of LUMO"
666    if (do_mo_cube) then
667      write(io_viewer,100) "generates cube file of MOs"
668      write(io_viewer,110) idx_cube_mo
669    end if
670    write(io_viewer,100) "format: "//trim(cube_format)
671    write(io_viewer,120) "origin:", cube_origin
672    do ixyz = 1, 3
673      write(io_viewer,130) "increments:", cube_num_inc(ixyz), cube_increment(ixyz,:)
674    end do
675    ! returns the last read keyword back
676    word = trim(key_word)
677    ! generates cube files later on
678    DO_CUBE = do_density_cube .or. do_homo_cube .or. do_lumo_cube .or. do_mo_cube
679    return
680999 write(io_viewer,100) "failed to process input after reading "// &
681                         trim(key_word)//"!"
682    call quit('failed to process input')
683100 format("gen1int_host_cube_init>> ",A,I8)
684110 format("gen1int_host_cube_init>> ",10I5)
685120 format("gen1int_host_cube_init>> ",A,3F16.8)
686130 format("gen1int_host_cube_init>> ",A,I8,3F16.8)
687  end subroutine gen1int_host_cube_init
688
689  !> \brief generates the cube file of the electron density and/or molecular orbitals
690  !>        using Gen1Int library
691  !> \author Bin Gao
692  !> \date 2012-03-09
693  !> \param len_work is the length of Dalton/Dirac workspace
694  !> \param wrk_space is the Dalton/Dirac workspace
695  !> \param io_viewer is the logical unit number of the viewer
696  !> \param level_print is the level of print
697  subroutine gen1int_host_get_cube(len_work, wrk_space, io_viewer, level_print)
698    use london_ao
699    use gen1int_matrix
700    use gen1int_api
701    use gen1int_cube
702    implicit none
703    integer, intent(in) :: len_work
704    real(REALK), intent(inout) :: wrk_space(len_work)
705    integer, intent(in) :: io_viewer
706    integer, intent(in) :: level_print
707! uses MXCENT, etc.
708#include "mxcent.h"
709! uses NUCDEP, CHARGE, CORD
710#include "nuclei.h"
711! uses NCMOT
712#include "inforb.h"
713! uses LUSIFC
714#include "inftap.h"
715    integer num_points                                !number of points in cube file
716    real(REALK), allocatable :: grid_points(:,:)      !XYZ coordinates of points in cube file
717    integer ipoint                                    !incremental recorder over points
718    integer ixyz                                      !incremental recorder along XYZ directions
719    integer ic, jc, kc                                !incremental recorder over cube increments
720    type(matrix) ao_dens(1)                           !AO density matrix
721    type(matrix) mo_coef                              !MO coefficients
722    real(REALK) start_time                            !start time
723    type(prop_comp_t) prop_comp                       !operator of property integrals with non-zero components
724    type(nary_tree_t) nary_tree_bra                   !N-ary tree for partial geometric derivatives on bra center
725    type(nary_tree_t) nary_tree_ket                   !N-ary tree for partial geometric derivatives on ket center
726    type(nary_tree_t) nary_tree_total                 !N-ary tree for total geometric derivatives
727    real(REALK), allocatable :: cube_values(:,:,:,:)  !values of points in cube file
728    integer io_cube                                   !logical unit number of cube file
729    logical close_sirifc                              !if closing SIRIFC afterwards
730    integer ierr                                      !error information
731    logical found                                     !if found required data from SIRIFC
732    integer start_ao, end_ao                          !start and end addresses of AOs
733    integer imo                                       !incremental recorder over MOs
734#if defined(VAR_MPI)
735#include "mpif.h"
736#include "iprtyp.h"
737    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
738    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
739    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
740    count_mpi = 1
741    ! wakes up workers
742    call MPI_Bcast(GEN1INT_GET_CUBE, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
743    ! broadcasts level of print
744    call MPI_Bcast(level_print,      count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
745#endif
746    ! sets the XYZ coordinates of points in cube file
747    num_points = product(cube_num_inc)
748    allocate(grid_points(3,num_points), stat=ierr)
749    if (ierr/=0) then
750      call quit("gen1int_host_get_cube>> failed to allocate grid_points!")
751    end if
752    ipoint = 0
753    do ic = 1, cube_num_inc(1)
754      do jc = 1, cube_num_inc(2)
755        do kc = 1, cube_num_inc(3)
756          ! if the origin is (X0,Y0,Z0), and the increment is (X1,Y1,Z1),
757          ! then point (I1,I2,I3) has the coordinates:
758          ! X-coordinate: X0+(I1-1)*X1+(I2-1)*X2+(I3-1)*X3
759          ! Y-coordinate: Y0+(I1-1)*Y1+(I2-1)*Y2+(I3-1)*Y3
760          ! Z-coordinate: Z0+(I1-1)*Z1+(I2-1)*Z2+(I3-1)*Z3
761          ipoint = ipoint+1
762          do ixyz = 1, 3
763            grid_points(ixyz,ipoint) = cube_origin(ixyz) &
764              + real(ic-1,REALK)*cube_increment(1,ixyz)  &
765              + real(jc-1,REALK)*cube_increment(2,ixyz)  &
766              + real(kc-1,REALK)*cube_increment(3,ixyz)
767          end do
768        end do
769      end do
770    end do
771    ! gets the AO density matrix
772    if (do_density_cube) then
773      call gen1int_host_get_dens(ao_dens(1), len_work, wrk_space)
774      ! writes matrix to check
775      if (level_print>10) then
776        write(io_viewer,"()")
777        write(io_viewer,100) "AO density matrix"
778        call MatView(A=ao_dens(1), io_viewer=io_viewer)
779      end if
780      ! creates the operator of overlap integrals
781      call gen1int_host_prop_create(NON_LAO, INT_OVERLAP,      &
782                                    0, 0,                      &
783                                    0, 0, 0,                   &
784                                    0, 0, 0,                   &
785                                    .false., .false., .false., &
786                                    io_viewer, level_print,    &
787                                    1, (/1, 1/),               &   !hardcoded for Dalton
788                                    prop_comp)
789      call gen1int_host_geom_create(0, 0, 0, (/0/),         &
790                                    0, 0, 0, (/0/),         &
791                                    0, 0, 0, (/0/),         &
792                                    io_viewer, level_print, &
793                                    nary_tree_bra, nary_tree_ket, nary_tree_total)
794      ! evaluates the electron density at points of cube file
795      allocate(cube_values(cube_num_inc(3),cube_num_inc(2),cube_num_inc(1),1), &
796               stat=ierr)
797      if (ierr/=0) then
798        call quit("gen1int_host_get_cube>> failed to allocate cube_values!")
799      end if
800      cube_values = 0.0_8  !necessary to zero
801      call Gen1IntAPIPropGetFunExpt(prop_comp=prop_comp,             &
802                                    nary_tree_bra=nary_tree_bra,     &
803                                    nary_tree_ket=nary_tree_ket,     &
804                                    nary_tree_total=nary_tree_total, &
805!FIXME: be parallel
806                                   !api_comm_mpi=my_MPI_COMM_WORLD,  &
807                                    num_points=num_points,           &
808                                    grid_points=grid_points,         &
809                                    num_dens=1,                      &
810                                    ao_dens=ao_dens,                 &
811                                    num_ints=1,                      &
812                                    val_expt=cube_values,            &
813                                    io_viewer=io_viewer,             &
814                                    level_print=level_print)
815      ! free spaces
816      call MatDestroy(A=ao_dens(1))
817      call Gen1IntAPIPropDestroy(prop_comp=prop_comp)
818      call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_bra)
819      call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_ket)
820      call Gen1IntAPINaryTreeDestroy(nary_tree=nary_tree_total)
821      ! writes cube file
822      write(io_viewer,100) "writes cube file of electron density"
823      io_cube = -1
824      call GPOPEN(io_cube, "density.cube", "unknown", " ", "formatted", &
825                  ierr, .false.)
826      write(io_cube,"(1X,A)") "molecule density=scf"
827      write(io_cube,"(1X,A)") "electron density from total SCF density"
828      write(io_cube,"(I5,3F12.6)") NUCDEP, cube_origin
829      do ixyz = 1, 3
830        write(io_cube,"(I5,3F12.6)") cube_num_inc(ixyz), cube_increment(ixyz,:)
831      end do
832      do ipoint = 1, NUCDEP
833        write(io_cube,"(I5,4F12.6)") int(CHARGE(ipoint)), CHARGE(ipoint), &
834                                     CORD(:,ipoint)
835      end do
836      do ic = 1, cube_num_inc(1)
837        do jc = 1, cube_num_inc(2)
838          write(io_cube,"(6Es13.5)") cube_values(:,jc,ic,1)
839        end do
840      end do
841      call GPCLOSE(io_cube, "KEEP")
842      ! free spaces
843      deallocate(cube_values)
844    end if
845    ! gets the MO coefficients
846    if (do_homo_cube .or. do_lumo_cube .or. do_mo_cube) then
847      ! gets molecular orbital coefficienct from SIRIFC file
848      close_sirifc = LUSIFC<=0
849      if (close_sirifc) &
850        call GPOPEN(LUSIFC, "SIRIFC", "OLD", " ", "UNFORMATTED", ierr, .false.)
851      rewind(LUSIFC)
852      ! reads the molecular orbital coefficients
853      wrk_space(1:NCMOT) = 0.0_8
854#ifdef PRG_DIRAC
855      print *,
856      call quit('error: RD_SIRIFC not available in DIRAC')
857#else
858      call RD_SIRIFC("CMO", found, wrk_space(1), wrk_space(NCMOT+1), &
859                     len_work-NCMOT)
860#endif
861      if (.not.found) then
862        call quit("gen1int_host_get_cube>> CMO is not found on SIRIFC!")
863      end if
864      if (close_sirifc) call GPCLOSE(LUSIFC, "KEEP")
865      ! gets the number of required MOs
866      if (do_homo_cube) num_cube_mo = num_cube_mo+1
867      if (do_lumo_cube) num_cube_mo = num_cube_mo+1
868      ! sets MO coefficients
869      call MatCreate(A=mo_coef, num_row=NBAST, num_col=num_cube_mo, info_mat=ierr)
870      if (ierr/=0) then
871        call quit("gen1int_host_get_cube>> failed to create mo_coef!")
872      end if
873      if (do_mo_cube) then
874        do imo = 1, size(idx_cube_mo)
875          if (idx_cube_mo(imo)>NCMOT .or. idx_cube_mo(imo)<1) then
876            call quit("gen1int_host_get_cube>> incorrect MOs!")
877          end if
878          start_ao = (idx_cube_mo(imo)-1)*NBAST
879          end_ao = start_ao+NBAST
880          start_ao = start_ao+1
881          call MatSetValues(mo_coef, 1, NBAST, imo, imo, &
882                            wrk_space(start_ao:end_ao), trans=.false.)
883        end do
884        imo = size(idx_cube_mo)
885      else
886        imo = 0
887      end if
888      ! MO coefficients of HOMO
889      if (do_homo_cube) then
890        start_ao = (NOCCT-1)*NBAST
891        end_ao = start_ao+NBAST
892        start_ao = start_ao+1
893        imo = imo+1
894        call MatSetValues(mo_coef, 1, NBAST, imo, imo, &
895                          wrk_space(start_ao:end_ao), trans=.false.)
896      end if
897      ! MO coefficients of LUMO
898      if (do_lumo_cube) then
899        start_ao = NOCCT*NBAST
900        end_ao = start_ao+NBAST
901        start_ao = start_ao+1
902        imo = imo+1
903        call MatSetValues(mo_coef, 1, NBAST, imo, imo, &
904                          values=wrk_space(start_ao:end_ao), trans=.false.)
905      end if
906      ! evaluates MOs at points in cube file
907      allocate(cube_values(cube_num_inc(3),cube_num_inc(2), &
908                           cube_num_inc(1),num_cube_mo),    &
909               stat=ierr)
910      if (ierr/=0) then
911        call quit("gen1int_host_get_cube>> failed to allocate cube_values!")
912      end if
913#ifdef PRG_DIRAC
914      call Gen1IntAPIGetMO(comp_shell=(/1,2/),      &
915#else
916      call Gen1IntAPIGetMO(comp_shell=(/1/),        &
917#endif
918                           mo_coef=mo_coef,         &
919                           num_points=num_points,   &
920                           grid_points=grid_points, &
921                           num_derv=1,              &
922                           num_mo=num_cube_mo,      &
923                           val_mo=cube_values,      &
924                          !api_comm_mpi=my_MPI_COMM_WORLD, &
925                           gto_type=NON_LAO,        &
926                           order_mag=0,             &
927                           order_ram=0,             &
928                           order_geo=0)
929      ! free spaces
930      call MatDestroy(A=mo_coef)
931      ! resets the number of indices of MOs in cube file
932      if (do_mo_cube) then
933        num_cube_mo = size(idx_cube_mo)
934      else
935        num_cube_mo = 0
936      end if
937      ! writes cube file of MOs
938      write(io_viewer,100) "writes cube file of HOMO, LUMO and/or MOs"
939      if (do_mo_cube) then
940        io_cube = -1
941        call GPOPEN(io_cube, "mo.cube", "unknown", " ", "formatted", &
942                    ierr, .false.)
943        write(io_cube,"(1X,A)") "molecule mo=selected"
944        write(io_cube,"(1X,A)") "MO coefficients"
945        write(io_cube,"(I5,3F12.6)") -NUCDEP, cube_origin
946        do ixyz = 1, 3
947          write(io_cube,"(I5,3F12.6)") cube_num_inc(ixyz), cube_increment(ixyz,:)
948        end do
949        do ipoint = 1, NUCDEP
950          write(io_cube,"(I5,4F12.6)") int(CHARGE(ipoint)), CHARGE(ipoint), &
951                                       CORD(:,ipoint)
952        end do
953        write(io_cube,"(10I5)") num_cube_mo, idx_cube_mo
954        do ic = 1, cube_num_inc(1)
955          do jc = 1, cube_num_inc(2)
956!FIXME: the memory access here is awful, might need to change
957            write(io_cube,"(6Es13.5)") &
958              ((cube_values(kc,jc,ic,imo), imo=1,num_cube_mo), kc=1,cube_num_inc(3))
959          end do
960        end do
961        call GPCLOSE(io_cube, "KEEP")
962        imo = num_cube_mo
963      else
964        imo = 0
965      end if
966      ! writes cube file of HOMO
967      if (do_homo_cube) then
968        io_cube = -1
969        call GPOPEN(io_cube, "homo.cube", "unknown", " ", "formatted", &
970                    ierr, .false.)
971        write(io_cube,"(1X,A)") "molecule mo=HOMO"
972        write(io_cube,"(1X,A)") "MO coefficients"
973        write(io_cube,"(I5,3F12.6)") -NUCDEP, cube_origin
974        do ixyz = 1, 3
975          write(io_cube,"(I5,3F12.6)") cube_num_inc(ixyz), cube_increment(ixyz,:)
976        end do
977        do ipoint = 1, NUCDEP
978          write(io_cube,"(I5,4F12.6)") int(CHARGE(ipoint)), CHARGE(ipoint), &
979                                       CORD(:,ipoint)
980        end do
981        write(io_cube,"(10I5)") 1, NOCCT
982        imo = imo+1
983        do ic = 1, cube_num_inc(1)
984          do jc = 1, cube_num_inc(2)
985            write(io_cube,"(6Es13.5)") cube_values(:,jc,ic,imo)
986          end do
987        end do
988        call GPCLOSE(io_cube, "KEEP")
989      end if
990      ! writes cube file of LUMO
991      if (do_lumo_cube) then
992        io_cube = -1
993        call GPOPEN(io_cube, "lumo.cube", "unknown", " ", "formatted", &
994                    ierr, .false.)
995        write(io_cube,"(1X,A)") "molecule mo=LUMO"
996        write(io_cube,"(1X,A)") "MO coefficients"
997        write(io_cube,"(I5,3F12.6)") -NUCDEP, cube_origin
998        do ixyz = 1, 3
999          write(io_cube,"(I5,3F12.6)") cube_num_inc(ixyz), cube_increment(ixyz,:)
1000        end do
1001        do ipoint = 1, NUCDEP
1002          write(io_cube,"(I5,4F12.6)") int(CHARGE(ipoint)), CHARGE(ipoint), &
1003                                       CORD(:,ipoint)
1004        end do
1005        write(io_cube,"(10I5)") 1, NOCCT+1
1006        imo = imo+1
1007        do ic = 1, cube_num_inc(1)
1008          do jc = 1, cube_num_inc(2)
1009            write(io_cube,"(6Es13.5)") cube_values(:,jc,ic,imo)
1010          end do
1011        end do
1012        call GPCLOSE(io_cube, "KEEP")
1013      end if
1014      ! free spaces
1015      deallocate(cube_values)
1016    end if
1017    ! frees spaces
1018    deallocate(grid_points)
1019    return
1020100 format("gen1int_host_get_cube>> ",A,I8)
1021  end subroutine gen1int_host_get_cube
1022
1023#if defined(VAR_MPI)
1024  subroutine gen1int_worker_get_cube(len_work, wrk_space, io_viewer, level_print)
1025    use gen1int_matrix
1026    use gen1int_api
1027    use gen1int_cube
1028    implicit none
1029    integer, intent(in) :: len_work
1030    real(REALK), intent(inout) :: wrk_space(len_work)
1031    integer, intent(in) :: io_viewer
1032    integer, intent(in) :: level_print
1033#include "mpif.h"
1034    return
1035  end subroutine gen1int_worker_get_cube
1036#endif
1037
1038  !> \brief frees the space taken by the cube files
1039  !> \author Bin Gao
1040  !> \date 2012-05-14
1041  subroutine gen1int_host_cube_finalize()
1042    use gen1int_cube
1043    implicit none
1044    do_density_cube = .false.
1045    do_homo_cube = .false.
1046    do_lumo_cube = .false.
1047    do_mo_cube = .false.
1048    num_cube_mo = 0
1049    if (allocated(idx_cube_mo)) deallocate(idx_cube_mo)
1050    cube_format = "GAUSSIAN"
1051    cube_origin = 0.0_8
1052    cube_increment = 0.0_8
1053    cube_num_inc = 0
1054    return
1055  end subroutine gen1int_host_cube_finalize
1056
1057  !> \brief terminates Gen1Int interface after all calculations
1058  !> \author Bin Gao
1059  !> \date 2011-10-02
1060  subroutine gen1int_host_finalize
1061    use gen1int_api
1062    implicit none
1063    call Gen1IntAPIDestroy()
1064    return
1065  end subroutine gen1int_host_finalize
1066
1067  !> \brief test suite of Gen1Int interface, enabled by adding the following lines
1068  !>        in DALTON.INP/DIRAC.INP
1069  !>        **INTEGRAL
1070  !>        .GENINT
1071  !> \author Bin Gao
1072  !> \date 2011-10-04
1073  !> \param len_work is the length of Dalton/Dirac workspace
1074  !> \param wrk_space is the Dalton/Dirac workspace
1075  !> \param io_viewer is the logical unit number of the viewer
1076  !> \param level_print is the level of print
1077  subroutine gen1int_host_test(len_work, wrk_space, io_viewer, level_print)
1078    use london_ao
1079    use gen1int_api
1080    implicit none
1081    integer, intent(in) :: len_work
1082    real(REALK), intent(inout) :: wrk_space(len_work)
1083    integer, intent(in) :: io_viewer
1084    integer, intent(in) :: level_print
1085    real(REALK), parameter :: ERR_THRSH = 10.0_8**(-8)    !threshold of error
1086    real(REALK), parameter :: RATIO_THRSH = 10.0_8**(-6)  !threshold of ratio to the referenced result
1087    logical test_failed                                   !indicator if the test failed
1088    integer num_ao                                        !number of orbitals
1089    integer, parameter :: NUM_TEST = 11                   !number of tests
1090    character*20, parameter :: PROP_NAME(NUM_TEST) = &    !names of testing property integrals,
1091      (/"INT_KIN_ENERGY    ", "INT_OVERLAP       ",  &    !see Gen1int library src/gen1int.F90
1092        "INT_POT_ENERGY    ", "INT_CART_MULTIPOLE",  &
1093        "INT_ANGMOM        ", "INT_ONE_HAMIL     ",  &
1094        "INT_ONE_HAMIL     ", "INT_OVERLAP       ",  &
1095        "INT_OVERLAP       ", "INT_OVERLAP       ",  &
1096        "INT_CART_MULTIPOLE"/)
1097    character*8, parameter :: HERM_PROP(NUM_TEST) = &     !names of property integrals,
1098      (/"KINENERG", "SQHDOR  ", "POTENERG",         &     !see \fn(PR1IN1) in abacus/her1pro.F
1099        "CARMOM  ", "ANGMOM  ", "MAGMOM  ",         &
1100        "ANGMOM  ", "S1MAG   ", "S1MAGL  ",         &
1101        "S1MAGR  ", "CM1     "/)
1102    integer, parameter :: GTO_TYPE(NUM_TEST) = &          !
1103      (/NON_LAO, NON_LAO, NON_LAO, NON_LAO,    &
1104        NON_LAO, LONDON, NON_LAO, LONDON,      &
1105        LONDON, LONDON, LONDON/)
1106    integer, parameter :: ORDER_MOM(NUM_TEST) = &         !order of Cartesian multipole moments
1107      (/0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1/)
1108    integer, parameter :: ORDER_MAG_BRA(NUM_TEST) = &     !
1109      (/0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0/)
1110    integer, parameter :: ORDER_MAG_KET(NUM_TEST) = &     !
1111      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0/)
1112    integer, parameter :: ORDER_MAG_TOTAL(NUM_TEST) = &   !
1113      (/0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1/)
1114    integer, parameter :: ORDER_RAM_BRA(NUM_TEST) = &     !
1115      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1116    integer, parameter :: ORDER_RAM_KET(NUM_TEST) = &     !
1117      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1118    integer, parameter :: ORDER_RAM_TOTAL(NUM_TEST) = &   !
1119      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1120    integer, parameter :: ORDER_GEO_BRA(NUM_TEST) = &     !
1121      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1122    integer, parameter :: ORDER_GEO_KET(NUM_TEST) = &     !
1123      (/0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1124    integer, parameter :: MAX_NUM_CENT(NUM_TEST) = &      !maximum number of differentiated centers
1125      (/0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1126    integer, parameter :: ORDER_GEO_TOTAL(NUM_TEST) = &   !order of total geometric derivatives
1127      (/0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/)
1128    logical, parameter :: ADD_SR(NUM_TEST) = &            !
1129      (/.false., .false., .false., .false.,  &
1130        .false., .false., .false., .false.,  &
1131        .false., .false., .false./)
1132    logical, parameter :: ADD_SO(NUM_TEST) = &            !
1133      (/.false., .false., .false., .false.,  &
1134        .false., .false., .false., .false.,  &
1135        .false., .false., .false./)
1136    logical, parameter :: ADD_LONDON(NUM_TEST) = &        !
1137      (/.false., .false., .false., .false.,      &
1138        .false., .false., .false., .false.,      &
1139        .false., .false., .false./)
1140    logical, parameter :: TRIANG(NUM_TEST) = &            !integral matrices are triangularized or squared
1141      (/.true., .false., .true., .true.,     &
1142        .true., .true., .true., .true.,      &
1143        .false., .false., .true./)
1144    logical, parameter :: SYMMETRIC(NUM_TEST) = &         !integral matrices are symmetric or anti-symmetric
1145      (/.true., .false., .true., .true.,        &
1146        .false., .false., .false., .false.,     &
1147        .false., .false., .false./)
1148    integer NUM_INTS(NUM_TEST)                            !number of integral matrices
1149    type(matrix), allocatable :: val_ints(:)              !integral matrices
1150    logical, parameter :: WRITE_INTS = .false.            !if writing integrals on file
1151    logical, parameter :: WRITE_EXPT = .false.            !if writing expectation values on file
1152    integer itest                                         !incremental recorder over different tests
1153    integer imat, jmat                                    !incremental recorders over matrices
1154    integer ierr                                          !error information
1155    ! variables related to \fn(PR1IN1) ...
1156! uses \var(MXCENT)
1157#include "mxcent.h"
1158! number of atomic centers
1159#include "nuclei.h"
1160! uses \var(MXCORB)
1161#include "maxorb.h"
1162! uses \var(MXQN)
1163#include "maxaqn.h"
1164! uses \var(MAXREP)
1165#include "symmet.h"
1166! uses FIELD1
1167#include "efield.h"
1168!FIXME: having problem of calling \fn(PR1IN1) with \var(GET_EXPT)=.true., which gives wrong integrals
1169    integer size_int                                      !size of property integrals
1170    integer start_herm_int, end_herm_int                  !addresses for integrals from \fn(PR1IN1)
1171    logical, parameter :: GET_EXPT = .false.              !if getting expectation values back
1172    integer max_typ
1173    integer, allocatable :: int_rep(:)
1174    integer lint_ad
1175    integer, allocatable :: int_adr(:)
1176    character*8, allocatable :: lb_int(:)
1177    integer NCOMP
1178    logical, parameter :: TOFILE = .false.                !if writing integrals on file
1179    character*6, parameter :: MTFORM = "TRIANG"
1180    logical, parameter :: DOINT(4) = (/.true., .false., .false., .false./)
1181    logical, parameter :: PROP_PRINT = .false.            !if printing referenced property integrals
1182    integer, parameter :: NUM_PQUAD = 40                  !number of integration points for DSO integrals
1183    integer len_free                                      !length of free Dalton/Dirac workspace
1184#if !defined (PRG_DIRAC)
1185    integer base_free                                     !base address of free Dalton/Dirac workspace
1186#endif
1187    logical almost_equal                                  !indicates if the results from Gen1Int are
1188                                                          !almost equal to those from \fn(PR1IN1)
1189    ! gets the number of atomic orbitals
1190    call Gen1IntAPIGetNumAO(num_ao=num_ao)
1191    write(io_viewer,100) "number of orbitals", num_ao
1192    write(io_viewer,110) "threshold of error", ERR_THRSH
1193    write(io_viewer,110) "threshold of ratio to the referenced result", RATIO_THRSH
1194    ! sets the number of integral matrices
1195    NUM_INTS(1) = 1
1196    NUM_INTS(2) = 3*NUCDEP
1197    NUM_INTS(3) = 1
1198    NUM_INTS(4) = 3
1199    NUM_INTS(5) = 3
1200    NUM_INTS(6) = 3
1201    NUM_INTS(7) = 3
1202    NUM_INTS(8) = 3
1203    NUM_INTS(9) = 3
1204    NUM_INTS(10) = 3
1205    NUM_INTS(11) = 9
1206    ! loops over different tests
1207    do itest = 1, NUM_TEST
1208      test_failed = .false.
1209#if defined(PRG_DIRAC)
1210      if (HERM_PROP(itest)=="POTENERG") cycle
1211#endif
1212      ! allocates integral matrices
1213      allocate(val_ints(NUM_INTS(itest)), stat=ierr)
1214      if (ierr/=0) then
1215        call quit("gen1int_host_test>> failed to allocate val_ints!")
1216      end if
1217      do imat = 1, NUM_INTS(itest)
1218        call MatCreate(A=val_ints(imat), num_row=num_ao, info_mat=ierr, &
1219                       triangular=TRIANG(itest), symmetric=SYMMETRIC(itest))
1220        if (ierr/=0) then
1221          call quit("gen1int_host_test>> failed to creates integral matrices!")
1222        end if
1223      end do
1224      ! calculates integrals using Gen1Int
1225      call gen1int_host_get_int(GTO_TYPE(itest),                                       &
1226                                trim(PROP_NAME(itest)), ORDER_MOM(itest),              &
1227                                0,                                                     &
1228                                ORDER_MAG_BRA(itest), ORDER_MAG_KET(itest),            &
1229                                ORDER_MAG_TOTAL(itest),                                &
1230                                ORDER_RAM_BRA(itest), ORDER_RAM_KET(itest),            &
1231                                ORDER_RAM_TOTAL(itest),                                &
1232                                MAX_NUM_CENT(itest), ORDER_GEO_BRA(itest), 0, (/0/),   &
1233                                MAX_NUM_CENT(itest), ORDER_GEO_KET(itest), 0, (/0/),   &
1234                                MAX_NUM_CENT(itest), ORDER_GEO_TOTAL(itest), 0, (/0/), &
1235                                ADD_SR(itest), ADD_SO(itest), ADD_LONDON(itest),       &
1236                                NUM_INTS(itest), val_ints, WRITE_INTS,                 &
1237                                1, (/1, 1/),                                           & !hardcoded for Dalton
1238                                io_viewer, level_print)
1239      ! gets the referenced results from HERMIT
1240!FIXME: \var(FORQM3)
1241      if (trim(PROP_NAME(itest))=="DSO") then
1242        max_typ = (3*NUCDEP)**2
1243      else
1244        max_typ = 3*MXCOOR
1245      end if
1246      allocate(int_rep(max_typ), stat=ierr)
1247      if (ierr/=0) then
1248        call quit("gen1int_host_test>> failed to allocate int_rep!")
1249      end if
1250      int_rep = 0
1251      if (trim(PROP_NAME(itest))=="ELFGRDC" .or. trim(PROP_NAME(itest))=="ELFGRDS") then
1252         lint_ad = 9*NUCIND*(MAXREP+1)
1253      else
1254         lint_ad = max_typ
1255      end if
1256      allocate(int_adr(lint_ad), stat=ierr)
1257      if (ierr/=0) then
1258        call quit("gen1int_host_test>> failed to allocate int_adr!")
1259      end if
1260      int_adr = 0
1261      allocate(lb_int(max_typ), stat=ierr)
1262      if (ierr/=0) then
1263        call quit("gen1int_host_test>> failed to allocate lb_int!")
1264      end if
1265#if !defined(PRG_DIRAC)
1266      base_free = 1
1267#endif
1268      if (TRIANG(itest)) then
1269        size_int = num_ao*(num_ao+1)/2
1270      else
1271        size_int = num_ao*num_ao
1272      end if
1273      end_herm_int = size_int*NUM_INTS(itest)
1274      len_free = len_work-end_herm_int
1275      write(io_viewer,100) "gets the referenced results from HERMIT ..."
1276      ! not equals to 0 so that \fn(PR1IN1) will copy the results back when first calling it
1277      NCOMP = -1
1278      if (TRIANG(itest)) then
1279#if !defined(PRG_DIRAC)
1280!FIXME: the last argument is symmetric AO density matrix
1281        if (HERM_PROP(itest)(1:7)=="CM1    ") then
1282          FIELD1 = 'X-FIELD'
1283          call PR1IN1(wrk_space(end_herm_int+1:), base_free, len_free, int_rep, &
1284                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest), &
1285                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,        &
1286                      wrk_space, NCOMP, TOFILE, MTFORM, DOINT,                  &
1287                      wrk_space(end_herm_int+1:), GET_EXPT, wrk_space(end_herm_int+1:))
1288          FIELD1 = 'Y-FIELD'
1289          call PR1IN1(wrk_space(end_herm_int+1:), base_free, len_free, int_rep,  &
1290                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest),  &
1291                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,         &
1292                      wrk_space(end_herm_int/3+1), NCOMP, TOFILE, MTFORM, DOINT, &
1293                      wrk_space(end_herm_int+1:), GET_EXPT, wrk_space(end_herm_int+1:))
1294          FIELD1 = 'Z-FIELD'
1295          call PR1IN1(wrk_space(end_herm_int+1:), base_free, len_free, int_rep,    &
1296                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest),    &
1297                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,           &
1298                      wrk_space(end_herm_int/3*2+1), NCOMP, TOFILE, MTFORM, DOINT, &
1299                      wrk_space(end_herm_int+1:), GET_EXPT, wrk_space(end_herm_int+1:))
1300        else
1301          call PR1IN1(wrk_space(end_herm_int+1:), base_free, len_free, int_rep, &
1302                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest), &
1303                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,        &
1304                      wrk_space(1:end_herm_int), NCOMP, TOFILE, MTFORM, DOINT,  &
1305                      wrk_space(end_herm_int+1:), GET_EXPT, wrk_space(end_herm_int+1:))
1306        end if
1307#else
1308        call PR1INT_1(wrk_space(end_herm_int+1:), len_free, int_rep,            &
1309                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest), &
1310                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,        &
1311                      wrk_space(1:end_herm_int), NCOMP, TOFILE, MTFORM, DOINT)
1312#endif
1313      else
1314#if !defined(PRG_DIRAC)
1315!FIXME: the last argument is square AO density matrix
1316        call PR1IN1(wrk_space(end_herm_int+1:), base_free, len_free, int_rep, &
1317                    int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest), &
1318                    NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,        &
1319                    wrk_space(1:end_herm_int), NCOMP, TOFILE, MTFORM, DOINT,  &
1320                    wrk_space(end_herm_int+1:), GET_EXPT, wrk_space(end_herm_int+1:))
1321#else
1322        call PR1INT_1(wrk_space(end_herm_int+1:), len_free, int_rep,            &
1323                      int_adr, lb_int, HERM_PROP(itest)(1:7), ORDER_MOM(itest), &
1324                      NUM_PQUAD, TRIANG(itest), PROP_PRINT, level_print,        &
1325                      wrk_space(1:end_herm_int), NCOMP, TOFILE, MTFORM, DOINT)
1326#endif
1327      end if
1328      deallocate(int_rep)
1329      deallocate(int_adr)
1330      deallocate(lb_int)
1331      ! magnetic derivatives of one-electron Hamiltonian using non-LAOs
1332      if (PROP_NAME(itest)=="INT_ONE_HAMIL     " .and. &
1333          GTO_TYPE(itest)==NON_LAO .and.               &
1334          ORDER_MAG_TOTAL(itest)==1) then
1335        wrk_space(1:end_herm_int) = -0.5_8*wrk_space(1:end_herm_int)
1336      ! HERMIT uses different sign for the following integrals
1337      else if (HERM_PROP(itest)=="DPLGRA  " .or. HERM_PROP(itest)=="POTENERG" .or. &
1338          HERM_PROP(itest)=="NUCSLO  " .or. HERM_PROP(itest)=="PSO     " .or. &
1339          HERM_PROP(itest)=="ANGMOM  " .or. HERM_PROP(itest)=="SQHDOR  " .or. &
1340          HERM_PROP(itest)=="MAGMOM  " .or. HERM_PROP(itest)=="S1MAG   " .or. &
1341          HERM_PROP(itest)=="CM1     ") then
1342        wrk_space(1:end_herm_int) = -wrk_space(1:end_herm_int)
1343      end if
1344      ! checks the results
1345      write(io_viewer,100) "checks the results of "//trim(PROP_NAME(itest))
1346      start_herm_int = 0
1347      do imat = 1, NUM_INTS(itest)
1348        end_herm_int = start_herm_int+size_int
1349        start_herm_int = start_herm_int+1
1350        ! DALTON (i,j): (i-1)*3+j
1351        ! 1 2 3
1352        ! 4 5 6
1353        ! 7 8 9
1354        ! Gen1Int (i,j): (j-1)*3+i
1355        ! 1 4 7
1356        ! 2 5 8
1357        ! 3 6 9
1358        if (HERM_PROP(itest)=="CM1     ") then
1359            jmat = mod(imat,3)
1360            if (jmat==0) then
1361                jmat = 6+imat/3
1362            else
1363                jmat = (jmat-1)*3+imat/3+1
1364            end if
1365            call MatArrayAlmostEqual(A=val_ints(jmat),                              &
1366                                     values=wrk_space(start_herm_int:end_herm_int), &
1367                                     io_viewer=io_viewer,                           &
1368                                     almost_equal=almost_equal,                     &
1369                                     triangular=TRIANG(itest),                      &
1370                                     symmetric=SYMMETRIC(itest),                    &
1371                                     threshold=ERR_THRSH,                           &
1372                                     ratio_thrsh=RATIO_THRSH)
1373            call MatDestroy(A=val_ints(jmat))
1374        else
1375            call MatArrayAlmostEqual(A=val_ints(imat),                              &
1376                                     values=wrk_space(start_herm_int:end_herm_int), &
1377                                     io_viewer=io_viewer,                           &
1378                                     almost_equal=almost_equal,                     &
1379                                     triangular=TRIANG(itest),                      &
1380                                     symmetric=SYMMETRIC(itest),                    &
1381                                     threshold=ERR_THRSH,                           &
1382                                     ratio_thrsh=RATIO_THRSH)
1383            call MatDestroy(A=val_ints(imat))
1384        end if
1385        if (.not. test_failed) test_failed = .not.almost_equal
1386        start_herm_int = end_herm_int
1387      end do
1388      deallocate(val_ints)
1389      if (test_failed) then
1390        write(io_viewer,100) "test of "//trim(PROP_NAME(itest))//".Gen1Int failed!"
1391      else
1392        write(io_viewer,100) "test of "//trim(PROP_NAME(itest))//".Gen1Int passed!"
1393      end if
1394    end do
1395    return
1396100 format("gen1int_host_test>> ",A,I8)
1397110 format("gen1int_host_test>> ",A,Es16.6)
1398  end subroutine gen1int_host_test
1399
1400  ! private subroutines
1401
1402  !> \brief creates the operator of property integrals, and broadcasts input
1403  !>        arguments to worker processors
1404  !> \author Bin Gao
1405  !> \date 2012-05-09
1406  !> \return prop_comp is the operator of property integrals with non-zero components
1407  !> \note see \fn(gen1int_host_get_int) for the explanation of other arguments
1408  subroutine gen1int_host_prop_create(gto_type, prop_name, order_mom, &
1409                                      order_elec,                     &
1410                                      order_mag_bra, order_mag_ket,   &
1411                                      order_mag_total,                &
1412                                      order_ram_bra, order_ram_ket,   &
1413                                      order_ram_total,                &
1414                                      add_sr, add_so, add_london,     &
1415                                      io_viewer, level_print,         &
1416                                      nr_active_blocks,               &
1417                                      active_component_pairs,         &
1418                                      prop_comp)
1419    use gen1int_api
1420    implicit none
1421    integer,           intent(in)    :: gto_type
1422    character*(*),     intent(in)    :: prop_name
1423    integer,           intent(in)    :: order_mom
1424    integer,           intent(in)    :: order_elec
1425    integer,           intent(in)    :: order_mag_bra
1426    integer,           intent(in)    :: order_mag_ket
1427    integer,           intent(in)    :: order_mag_total
1428    integer,           intent(in)    :: order_ram_bra
1429    integer,           intent(in)    :: order_ram_ket
1430    integer,           intent(in)    :: order_ram_total
1431    logical,           intent(in)    :: add_sr
1432    logical,           intent(in)    :: add_so
1433    logical,           intent(in)    :: add_london
1434    integer,           intent(in)    :: io_viewer
1435    integer,           intent(in)    :: level_print
1436    integer,           intent(in)    :: nr_active_blocks
1437    integer,           intent(in)    :: active_component_pairs(*)
1438    type(prop_comp_t), intent(inout) :: prop_comp
1439    integer len_name  !length of property name
1440#if defined(VAR_MPI)
1441#include "mpif.h"
1442    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
1443    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
1444    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
1445    count_mpi = 1
1446    ! broadcasts input arguments
1447    call MPI_Bcast(gto_type, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1448    len_name = len_trim(prop_name)
1449    if (len_name>MAX_LEN_STR) then
1450      call quit("gen1int_host_prop_create>> too long property name!")
1451    end if
1452    call MPI_Bcast(len_name, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1453    count_mpi = len_name
1454    call MPI_Bcast(prop_name(1:len_name), count_mpi, MPI_CHARACTER, &
1455                   manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1456    count_mpi = 1
1457    call MPI_Bcast(order_mom,       count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1458    call MPI_Bcast(order_elec,      count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1459    call MPI_Bcast(order_mag_bra,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1460    call MPI_Bcast(order_mag_ket,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1461    call MPI_Bcast(order_mag_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1462    call MPI_Bcast(order_ram_bra,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1463    call MPI_Bcast(order_ram_ket,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1464    call MPI_Bcast(order_ram_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1465    call MPI_Bcast(nr_active_blocks,       count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1466    count_mpi = nr_active_blocks*2
1467    call MPI_Bcast(active_component_pairs, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1468    count_mpi = 1
1469    call MPI_Bcast(add_sr,     count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1470    call MPI_Bcast(add_so,     count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1471    call MPI_Bcast(add_london, count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1472#endif
1473    ! creates the operator of property integrals
1474    call Gen1IntAPIPropCreate(gto_type, prop_name, order_mom, &
1475                              order_elec,                     &
1476                              order_mag_bra, order_mag_ket,   &
1477                              order_mag_total,                &
1478                              order_ram_bra, order_ram_ket,   &
1479                              order_ram_total,                &
1480                              add_sr, add_so, add_london,     &
1481                              nr_active_blocks,               &
1482                              active_component_pairs,         &
1483                              prop_comp)
1484    if (level_print>=5) then
1485      call Gen1IntAPIPropView(prop_comp=prop_comp, io_viewer=io_viewer)
1486    end if
1487    return
1488  end subroutine gen1int_host_prop_create
1489
1490#if defined(VAR_MPI)
1491  !> \brief creates the operator of property integrals on worker processors by the arguments from manager
1492  !> \author Bin Gao
1493  !> \date 2012-05-09
1494  !> \param io_viewer is the logical unit number of the viewer
1495  !> \param level_print is the level of print
1496  !> \return prop_comp is the operator of property integrals with non-zero components
1497  subroutine gen1int_worker_prop_create(io_viewer, level_print, prop_comp)
1498    use gen1int_api
1499    implicit none
1500    integer,           intent(in)    :: io_viewer
1501    integer,           intent(in)    :: level_print
1502    type(prop_comp_t), intent(inout) :: prop_comp
1503    ! local variables, see the explanation of input arguments in \fn(gen1int_host_get_int)
1504    integer gto_type
1505    character(MAX_LEN_STR) prop_name
1506    integer order_mom
1507    integer order_elec
1508    integer order_mag_bra
1509    integer order_mag_ket
1510    integer order_mag_total
1511    integer order_ram_bra
1512    integer order_ram_ket
1513    integer order_ram_total
1514    integer              :: nr_active_blocks
1515    integer, allocatable :: active_component_pairs(:)
1516    logical add_sr
1517    logical add_so
1518    logical add_london
1519    integer len_name  !length of property name
1520    integer ierr      !error information
1521#include "mpif.h"
1522    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
1523    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
1524    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
1525    count_mpi = 1
1526    ! gets input arguments from manager
1527    call MPI_Bcast(gto_type, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1528    call MPI_Bcast(len_name, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1529    if (len_name>MAX_LEN_STR) then
1530      call quit("gen1int_worker_prop_create>> too long property name!")
1531    end if
1532    prop_name = ""
1533    count_mpi = len_name
1534    call MPI_Bcast(prop_name(1:len_name), count_mpi, MPI_CHARACTER, &
1535                   manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1536    count_mpi = 1
1537    call MPI_Bcast(order_mom,       count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1538    call MPI_Bcast(order_elec,      count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1539    call MPI_Bcast(order_mag_bra,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1540    call MPI_Bcast(order_mag_ket,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1541    call MPI_Bcast(order_mag_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1542    call MPI_Bcast(order_ram_bra,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1543    call MPI_Bcast(order_ram_ket,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1544    call MPI_Bcast(order_ram_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1545    call MPI_Bcast(nr_active_blocks,count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1546    allocate(active_component_pairs(nr_active_blocks*2), stat=ierr)
1547    if (ierr /= 0) then
1548      call quit("gen1int_worker_prop_create>> failed to allocate active_component_pairs!")
1549    end if
1550    count_mpi = nr_active_blocks*2
1551    call MPI_Bcast(active_component_pairs, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1552    count_mpi = 1
1553    call MPI_Bcast(add_sr,     count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1554    call MPI_Bcast(add_so,     count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1555    call MPI_Bcast(add_london, count_mpi, MPI_LOGICALK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1556    ! creates the operator of property integrals
1557    call Gen1IntAPIPropCreate(gto_type, prop_name, order_mom, &
1558                              order_elec,                     &
1559                              order_mag_bra, order_mag_ket,   &
1560                              order_mag_total,                &
1561                              order_ram_bra, order_ram_ket,   &
1562                              order_ram_total,                &
1563                              add_sr, add_so, add_london,     &
1564                              nr_active_blocks,               &
1565                              active_component_pairs,         &
1566                              prop_comp)
1567    if (level_print>=5) then
1568      call Gen1IntAPIPropView(prop_comp=prop_comp, io_viewer=io_viewer)
1569    end if
1570    return
1571  end subroutine gen1int_worker_prop_create
1572#endif
1573
1574  !> \brief creates N-ary tree for geometric derivatives on manager processor, and broadcasts
1575  !>        input arguments to worker processors
1576  !> \author Bin Gao
1577  !> \date 2012-05-09
1578  !> \return nary_tree_bra is the N-ary tree for geometric derivatives on bra center
1579  !> \return nary_tree_ket is the N-ary tree for geometric derivatives on ket center
1580  !> \return nary_tree_total is the N-ary tree for total geometric derivatives
1581  !> \note see \fn(gen1int_host_get_int) for the explanation of other arguments
1582  subroutine gen1int_host_geom_create(max_ncent_bra, order_geo_bra,  &
1583                                      num_atoms_bra, idx_atoms_bra,  &
1584                                      max_ncent_ket, order_geo_ket,  &
1585                                      num_atoms_ket, idx_atoms_ket,  &
1586                                      max_num_cent, order_geo_total, &
1587                                      num_geo_atoms, idx_geo_atoms,  &
1588                                      io_viewer, level_print,        &
1589                                      nary_tree_bra, nary_tree_ket, nary_tree_total)
1590    use gen1int_api
1591    implicit none
1592    integer,           intent(in)    :: max_ncent_bra
1593    integer,           intent(in)    :: order_geo_bra
1594    integer,           intent(in)    :: num_atoms_bra
1595    integer,           intent(in)    :: idx_atoms_bra(*)
1596    integer,           intent(in)    :: max_ncent_ket
1597    integer,           intent(in)    :: order_geo_ket
1598    integer,           intent(in)    :: num_atoms_ket
1599    integer,           intent(in)    :: idx_atoms_ket(*)
1600    integer,           intent(in)    :: max_num_cent
1601    integer,           intent(in)    :: order_geo_total
1602    integer,           intent(in)    :: num_geo_atoms
1603    integer,           intent(in)    :: idx_geo_atoms(*)
1604    integer,           intent(in)    :: io_viewer
1605    integer,           intent(in)    :: level_print
1606    type(nary_tree_t), intent(inout) :: nary_tree_bra
1607    type(nary_tree_t), intent(inout) :: nary_tree_ket
1608    type(nary_tree_t), intent(inout) :: nary_tree_total
1609#if defined(VAR_MPI)
1610#include "mpif.h"
1611    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
1612    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
1613    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
1614    count_mpi = 1
1615    ! broadcasts information of N-ary trees
1616    call MPI_Bcast(max_ncent_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1617    call MPI_Bcast(order_geo_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1618    call MPI_Bcast(num_atoms_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1619    if (num_atoms_bra>0) then
1620      count_mpi = num_atoms_bra
1621      call MPI_Bcast(idx_atoms_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1622      count_mpi = 1
1623    end if
1624    call MPI_Bcast(max_ncent_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1625    call MPI_Bcast(order_geo_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1626    call MPI_Bcast(num_atoms_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1627    if (num_atoms_ket>0) then
1628      count_mpi = num_atoms_ket
1629      call MPI_Bcast(idx_atoms_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1630      count_mpi = 1
1631    end if
1632    call MPI_Bcast(max_num_cent,    count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1633    call MPI_Bcast(order_geo_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1634    call MPI_Bcast(num_geo_atoms,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1635    if (num_geo_atoms>0) then
1636      count_mpi = num_geo_atoms
1637      call MPI_Bcast(idx_geo_atoms, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1638    end if
1639#endif
1640    ! creates N-ary tree for geometric derivatives
1641    call Gen1IntAPINaryTreeCreate(max_num_cent=max_ncent_bra,  &
1642                                  order_geo=order_geo_bra,     &
1643                                  num_geo_atoms=num_atoms_bra, &
1644                                  idx_geo_atoms=idx_atoms_bra, &
1645                                  nary_tree=nary_tree_bra)
1646    call Gen1IntAPINaryTreeCreate(max_num_cent=max_ncent_ket,  &
1647                                  order_geo=order_geo_ket,     &
1648                                  num_geo_atoms=num_atoms_ket, &
1649                                  idx_geo_atoms=idx_atoms_ket, &
1650                                  nary_tree=nary_tree_ket)
1651    call Gen1IntAPINaryTreeCreate(max_num_cent=max_num_cent,   &
1652                                  order_geo=order_geo_total,   &
1653                                  num_geo_atoms=num_geo_atoms, &
1654                                  idx_geo_atoms=idx_geo_atoms, &
1655                                  nary_tree=nary_tree_total)
1656    if (level_print>=5) then
1657      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_bra, io_viewer=io_viewer)
1658      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_ket, io_viewer=io_viewer)
1659      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_total, io_viewer=io_viewer)
1660    end if
1661    return
1662  end subroutine gen1int_host_geom_create
1663
1664#if defined(VAR_MPI)
1665  !> \brief creates N-ary tree for geometric derivatives on worker processors by the arguments from manager
1666  !> \author Bin Gao
1667  !> \date 2012-05-09
1668  !> \param io_viewer is the logical unit number of the viewer
1669  !> \param level_print is the level of print
1670  !> \return nary_tree_bra is the N-ary tree for geometric derivatives on bra center
1671  !> \return nary_tree_ket is the N-ary tree for geometric derivatives on ket center
1672  !> \return nary_tree_total is the N-ary tree for total geometric derivatives
1673  subroutine gen1int_worker_geom_create(io_viewer, level_print, nary_tree_bra, nary_tree_ket, nary_tree_total)
1674    use gen1int_api
1675    implicit none
1676    integer,           intent(in)    :: io_viewer
1677    integer,           intent(in)    :: level_print
1678    type(nary_tree_t), intent(inout) :: nary_tree_bra
1679    type(nary_tree_t), intent(inout) :: nary_tree_ket
1680    type(nary_tree_t), intent(inout) :: nary_tree_total
1681    ! local variables, see the explanation of input arguments in \fn(gen1int_host_get_int)
1682    integer max_ncent_bra
1683    integer order_geo_bra
1684    integer num_atoms_bra
1685    integer, allocatable :: idx_atoms_bra(:)
1686    integer max_ncent_ket
1687    integer order_geo_ket
1688    integer num_atoms_ket
1689    integer, allocatable :: idx_atoms_ket(:)
1690    integer max_num_cent
1691    integer order_geo_total
1692    integer num_geo_atoms
1693    integer, allocatable :: idx_geo_atoms(:)
1694    integer ierr  !error information
1695#include "mpif.h"
1696    integer(kind=MPI_INTEGER_KIND) :: count_mpi, ierr_mpi
1697    integer(kind=MPI_INTEGER_KIND), parameter :: manager_mpi = MANAGER
1698    integer(kind=MPI_INTEGER_KIND), parameter :: my_MPI_COMM_WORLD = MPI_COMM_WORLD
1699    count_mpi = 1
1700    ! gets information of N-ary trees from manager node
1701    call MPI_Bcast(max_ncent_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1702    call MPI_Bcast(order_geo_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1703    call MPI_Bcast(num_atoms_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1704    if (num_atoms_bra>0) then
1705      allocate(idx_atoms_bra(num_atoms_bra), stat=ierr)
1706      if (ierr/=0) then
1707        call quit("gen1int_worker_geom_create>> failed to allocate idx_atoms_bra!")
1708      end if
1709      count_mpi = num_atoms_bra
1710      call MPI_Bcast(idx_atoms_bra, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1711      count_mpi = 1
1712    end if
1713    call MPI_Bcast(max_ncent_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1714    call MPI_Bcast(order_geo_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1715    call MPI_Bcast(num_atoms_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1716    if (num_atoms_ket>0) then
1717      allocate(idx_atoms_ket(num_atoms_ket), stat=ierr)
1718      if (ierr/=0) then
1719        call quit("gen1int_worker_geom_create>> failed to allocate idx_atoms_ket!")
1720      end if
1721      count_mpi = num_atoms_ket
1722      call MPI_Bcast(idx_atoms_ket, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1723      count_mpi = 1
1724    end if
1725    call MPI_Bcast(max_num_cent,    count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1726    call MPI_Bcast(order_geo_total, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1727    call MPI_Bcast(num_geo_atoms,   count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1728    if (num_geo_atoms>0) then
1729      allocate(idx_geo_atoms(num_geo_atoms), stat=ierr)
1730      if (ierr/=0) then
1731        call quit("gen1int_worker_geom_create>> failed to allocate idx_geo_atoms!")
1732      end if
1733      count_mpi = num_geo_atoms
1734      call MPI_Bcast(idx_geo_atoms, count_mpi, MPI_INTEGERK, manager_mpi, my_MPI_COMM_WORLD, ierr_mpi)
1735      count_mpi = 1
1736    end if
1737    ! creates N-ary trees for geometric derivatives
1738    call Gen1IntAPINaryTreeCreate(max_num_cent=max_ncent_bra,  &
1739                                  order_geo=order_geo_bra,     &
1740                                  num_geo_atoms=num_atoms_bra, &
1741                                  idx_geo_atoms=idx_atoms_bra, &
1742                                  nary_tree=nary_tree_bra)
1743    call Gen1IntAPINaryTreeCreate(max_num_cent=max_ncent_ket,  &
1744                                  order_geo=order_geo_ket,     &
1745                                  num_geo_atoms=num_atoms_ket, &
1746                                  idx_geo_atoms=idx_atoms_ket, &
1747                                  nary_tree=nary_tree_ket)
1748    call Gen1IntAPINaryTreeCreate(max_num_cent=max_num_cent,   &
1749                                  order_geo=order_geo_total,   &
1750                                  num_geo_atoms=num_geo_atoms, &
1751                                  idx_geo_atoms=idx_geo_atoms, &
1752                                  nary_tree=nary_tree_total)
1753    if (level_print>=5) then
1754      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_bra, io_viewer=io_viewer)
1755      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_ket, io_viewer=io_viewer)
1756      call Gen1IntAPINaryTreeView(nary_tree=nary_tree_total, io_viewer=io_viewer)
1757    end if
1758    return
1759  end subroutine gen1int_worker_geom_create
1760#endif
1761
1762  !> \brief gets the atomic orbital (AO) density matrix
1763  !> \author Bin Gao
1764  !> \date 2012-03-09
1765  !> \param len_work is the length of Dalton/Dirac workspace
1766  !> \param wrk_space is the Dalton/Dirac workspace
1767  !> \return ao_dens is the AO density matrix
1768  subroutine gen1int_host_get_dens(ao_dens, len_work, wrk_space)
1769    use gen1int_matrix
1770    implicit none
1771    type(matrix), intent(inout) :: ao_dens
1772    integer, intent(in) :: len_work
1773    real(REALK), intent(inout) :: wrk_space(len_work)
1774! uses NCMOT, NBAST, NNASHX, N2BASX
1775#include "inforb.h"
1776! uses LUSIFC
1777#include "inftap.h"
1778    integer start_dv_mo     !start of active part of one-electron density matrix (MO) in the workspace
1779    integer start_dv_ao     !start of active part of one-electron density matrix (AO) in the workspace
1780    integer start_ao_dens   !start of AO density matrix
1781    integer start_left_wrk  !start of left workspace
1782    integer len_left_wrk    !length of left workspace
1783    logical get_dc          !if calculating DCAO
1784    logical get_dv          !if calculating DVAO
1785    logical close_sirifc    !if closing SIRIFC afterwards
1786    integer ierr            !error information
1787    logical found           !if found required data from SIRIFC
1788    ! start of active part of one-electron density matrix (MO)
1789    start_dv_mo = 1+NCMOT
1790    ! start of active part of one-electron density matrix (AO)
1791    start_dv_ao = start_dv_mo+1
1792    ! start of AO density matrix
1793    start_ao_dens = start_dv_ao+1
1794    ! start of left workspace
1795    start_left_wrk = start_ao_dens+N2BASX+1
1796    ! only calculates DCAO
1797    get_dc = .true.
1798    get_dv = .false.
1799    ! lenght of the left workspace
1800    len_left_wrk = len_work-start_left_wrk+1
1801    ! checks if the workspace is enough
1802    if (len_left_wrk<0) &
1803      call STOPIT("GEN1INT", "gen1int_host_get_dens", start_left_wrk-1, len_work)
1804    ! opens file SIRIFC
1805    close_sirifc = LUSIFC<=0
1806    if (close_sirifc) &
1807      call GPOPEN(LUSIFC, "SIRIFC", "OLD", " ", "UNFORMATTED", ierr, .false.)
1808    rewind(LUSIFC)
1809    ! reads the molecular orbital coefficients
1810    wrk_space(1:NCMOT) = 0.0_8
1811#ifdef PRG_DIRAC
1812      print *, 'error: RD_SIRIFC not available in DIRAC'
1813      call quit('error: RD_SIRIFC not available in DIRAC')
1814#else
1815    call RD_SIRIFC("CMO", found, wrk_space(1), wrk_space(start_left_wrk), &
1816                   len_left_wrk)
1817#endif
1818    if (.not.found) then
1819      call quit("gen1int_host_get_dens>> CMO is not found on SIRIFC!")
1820    end if
1821    ! reads active part of one-electron density matrix (MO)
1822    if (get_dv) then
1823      wrk_space(start_dv_mo:start_dv_mo+NNASHX-1) = 0.0_8
1824#ifdef PRG_DIRAC
1825      print *,'error: RD_SIRIFC not available in DIRAC'
1826      call quit('error: RD_SIRIFC not available in DIRAC')
1827#else
1828      call RD_SIRIFC("DV", found, wrk_space(start_dv_mo), &
1829                     wrk_space(start_left_wrk), len_left_wrk)
1830#endif
1831      if (.not.found) then
1832        call quit("gen1int_host_get_dens>> DV is not found on SIRIFC!")
1833      end if
1834      wrk_space(start_dv_ao:start_dv_ao+N2BASX-1) = 0.0_8
1835    end if
1836    if (close_sirifc) call GPCLOSE(LUSIFC, "KEEP")
1837    ! gets the AO density matrix, using
1838    !
1839    ! FCKDEN(GETDC,GETDV,DCAO,DVAO,CMO,DV,WRK,LFRSAV)
1840    ! Input:
1841    !   GETDC   if true calculate DCAO
1842    !   GETDV   if true calculate DVAO
1843    !   CMO(*)  molecular orbital coefficients
1844    !   DV(*)   active part of one-electron density matrix (over MO's)
1845    ! Scratch:
1846    !   WRK(LFRSAV)
1847    call FCKDEN(get_dc, get_dv, wrk_space(start_ao_dens), wrk_space(start_dv_ao), &
1848                wrk_space(1), wrk_space(start_dv_mo), wrk_space(start_left_wrk),  &
1849                len_left_wrk)
1850    ! sums DCAO and DVAO
1851    if (get_dv) then
1852      start_dv_ao = start_dv_ao-1
1853      do ierr = 0, N2BASX-1
1854        wrk_space(start_ao_dens+ierr) = wrk_space(start_ao_dens+ierr) &
1855                                      + wrk_space(start_dv_ao+ierr+1)
1856      end do
1857    end if
1858    ! sets AO density matrix
1859    call MatCreate(A=ao_dens, num_row=NBAST, info_mat=ierr)
1860    if (ierr/=0) then
1861      call quit("gen1int_host_get_dens>> failed to create ao_dens!")
1862    end if
1863    call MatSetValues(ao_dens, 1, NBAST, 1, NBAST, &
1864                      values=wrk_space(start_ao_dens), trans=.false.)
1865    return
1866  end subroutine gen1int_host_get_dens
1867