1# flake8: noqa
2import os
3import tempfile
4
5from monty.termcolor import cprint
6from pymatgen.io.abinit.abiobjects import *
7from pymatgen.io.abinit.pseudos import Pseudo, PseudoTable, PseudoParser
8from pymatgen.io.abinit.netcdf import NetcdfReader
9from .launcher import PyFlowScheduler, PyLauncher
10from .qadapters import show_qparams, all_qtypes
11from .wrappers import Mrgscr, Mrgddb, Cut3D, Fold2Bloch
12from .nodes import Status
13from .tasks import *
14from .tasks import EphTask, ElasticTask
15from .works import *
16from .flows import (Flow, G0W0WithQptdmFlow, bandstructure_flow, PhononFlow, phonon_conv_flow,
17    g0w0_flow, NonLinearCoeffFlow)
18from .abitimer import AbinitTimerParser, AbinitTimerSection
19from pymatgen.io.abinit.abiinspect import GroundStateScfCycle, D2DEScfCycle, yaml_read_kpoints, yaml_read_irred_perts
20
21from .events import EventsParser, autodoc_event_handlers
22#from abipy.flowtk.works import *
23#from abipy.flowtk.gs_works import EosWork
24from abipy.flowtk.dfpt_works import ElasticWork, NscfDdksWork
25
26
27def flow_main(main):  # pragma: no cover
28    """
29    This decorator is used to decorate main functions producing `Flows`.
30    It adds the initialization of the logger and an argument parser that allows one to select
31    the loglevel, the workdir of the flow as well as the YAML file with the parameters of the `TaskManager`.
32    The main function shall have the signature:
33
34        main(options)
35
36    where options in the container with the command line options generated by `ArgumentParser`.
37
38    Args:
39        main: main function.
40    """
41    from functools import wraps
42
43    @wraps(main)
44    def wrapper(*args, **kwargs):
45        # Build the parse and parse input args.
46        parser = build_flow_main_parser()
47        options = parser.parse_args()
48
49        # loglevel is bound to the string value obtained from the command line argument.
50        # Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug
51        import logging
52        numeric_level = getattr(logging, options.loglevel.upper(), None)
53        if not isinstance(numeric_level, int):
54            raise ValueError('Invalid log level: %s' % options.loglevel)
55        logging.basicConfig(level=numeric_level)
56
57        # Istantiate the manager.
58        options.manager = TaskManager.as_manager(options.manager)
59
60        if options.tempdir:
61            options.workdir = tempfile.mkdtemp()
62            print("Working in temporary directory", options.workdir)
63
64        def execute():
65            """This is the function that performs the work depending on options."""
66            flow = main(options)
67
68            if options.plot:
69                flow.plot_networkx(tight_layout=True, with_edge_labels=True)
70
71            if options.graphviz:
72                graph = flow.get_graphviz() #engine=options.engine)
73                directory = tempfile.mkdtemp()
74                print("Producing source files in:", directory)
75                graph.view(directory=directory, cleanup=False)
76
77            if options.abivalidate:
78                print("Validating flow input files...")
79                isok, errors = flow.abivalidate_inputs()
80                if not isok:
81                    for e in errors:
82                        if e.retcode == 0: continue
83                        lines = e.log_file.readlines()
84                        i = len(lines) - 50 if len(lines) >= 50 else 0
85                        print("Last 50 line from log file:")
86                        print("".join(lines[i:]))
87                    raise RuntimeError("flow.abivalidate_input failed. See messages above.")
88                else:
89                    print("Validation succeeded")
90
91            if options.remove and os.path.isdir(flow.workdir):
92                print("Removing old directory:", flow.workdir)
93                import shutil
94                shutil.rmtree(flow.workdir)
95
96            if options.dry_run:
97                print("Dry-run mode.")
98                retcode = 0
99            elif options.scheduler:
100                retcode = flow.make_scheduler().start()
101                if retcode == 0:
102                    retcode = 0 if flow.all_ok else 1
103            elif options.batch:
104                retcode = flow.batch()
105            else:
106                # Default behaviour.
107                retcode = flow.build_and_pickle_dump()
108
109            cprint("Return code: %d" % retcode, "red" if retcode != 0 else "green")
110            return retcode
111
112        if options.prof:
113            # Profile execute
114            import pstats, cProfile
115            cProfile.runctx("execute()", globals(), locals(), "Profile.prof")
116            s = pstats.Stats("Profile.prof")
117            s.strip_dirs().sort_stats("time").print_stats()
118            return 0
119        else:
120            return execute()
121
122    return wrapper
123
124
125def build_flow_main_parser():
126    """
127    Build and return the parser used in the abipy/data/runs scripts.
128    """
129    import argparse
130    parser = argparse.ArgumentParser()
131
132    parser.add_argument('--loglevel', default="ERROR", type=str,
133                        help="set the loglevel. Possible values: CRITICAL, ERROR (default), WARNING, INFO, DEBUG")
134    parser.add_argument("-w", '--workdir', default="", type=str, help="Working directory of the flow.")
135    parser.add_argument("-m", '--manager', default=None,
136                        help="YAML file with the parameters of the task manager. "
137                             "Default None i.e. the manager is read from standard locations: "
138                             "working directory first then ~/.abinit/abipy/manager.yml.")
139    parser.add_argument("-s", '--scheduler', action="store_true", default=False,
140                        help="Run the flow with the scheduler")
141    parser.add_argument("-b", '--batch', action="store_true", default=False, help="Run the flow in batch mode")
142    parser.add_argument("-r", "--remove", default=False, action="store_true", help="Remove old flow workdir")
143    parser.add_argument("-p", "--plot", default=False, action="store_true", help="Plot flow with networkx.")
144    parser.add_argument("-g", "--graphviz", default=False, action="store_true", help="Plot flow with graphviz.")
145    parser.add_argument("-d", "--dry-run", default=False, action="store_true", help="Don't write directory with flow.")
146    parser.add_argument("-a", "--abivalidate", default=False, action="store_true", help="Call Abinit to validate input files.")
147    parser.add_argument("-t", "--tempdir", default=False, action="store_true", help="Execute flow in temporary directory.")
148    parser.add_argument("--prof", action="store_true", default=False, help="Profile code wth cProfile ")
149
150    return parser
151