1#!/usr/bin/env python3
2# Copyright (c) Facebook, Inc. and its affiliates.
3#
4# This source code is licensed under the MIT license found in the
5# LICENSE file in the root directory of this source tree.
6
7from __future__ import absolute_import, division, print_function, unicode_literals
8
9import json
10import os
11import shutil
12import stat
13import subprocess
14import sys
15
16from .dyndeps import create_dyn_dep_munger
17from .envfuncs import Env, add_path_entry, path_search
18from .fetcher import copy_if_different
19from .runcmd import run_cmd
20
21
22class BuilderBase(object):
23    def __init__(
24        self,
25        build_opts,
26        ctx,
27        manifest,
28        src_dir,
29        build_dir,
30        inst_dir,
31        env=None,
32        final_install_prefix=None,
33    ):
34        self.env = Env()
35        if env:
36            self.env.update(env)
37
38        subdir = manifest.get("build", "subdir", ctx=ctx)
39        if subdir:
40            src_dir = os.path.join(src_dir, subdir)
41
42        self.ctx = ctx
43        self.src_dir = src_dir
44        self.build_dir = build_dir or src_dir
45        self.inst_dir = inst_dir
46        self.build_opts = build_opts
47        self.manifest = manifest
48        self.final_install_prefix = final_install_prefix
49
50    def _get_cmd_prefix(self):
51        if self.build_opts.is_windows():
52            vcvarsall = self.build_opts.get_vcvars_path()
53            if vcvarsall is not None:
54                # Since it sets rather a large number of variables we mildly abuse
55                # the cmd quoting rules to assemble a command that calls the script
56                # to prep the environment and then triggers the actual command that
57                # we wanted to run.
58                return [vcvarsall, "amd64", "&&"]
59        return []
60
61    def _run_cmd(self, cmd, cwd=None, env=None, use_cmd_prefix=True, allow_fail=False):
62        if env:
63            e = self.env.copy()
64            e.update(env)
65            env = e
66        else:
67            env = self.env
68
69        if use_cmd_prefix:
70            cmd_prefix = self._get_cmd_prefix()
71            if cmd_prefix:
72                cmd = cmd_prefix + cmd
73
74        log_file = os.path.join(self.build_dir, "getdeps_build.log")
75        return run_cmd(
76            cmd=cmd,
77            env=env,
78            cwd=cwd or self.build_dir,
79            log_file=log_file,
80            allow_fail=allow_fail,
81        )
82
83    def build(self, install_dirs, reconfigure):
84        print("Building %s..." % self.manifest.name)
85
86        if self.build_dir is not None:
87            if not os.path.isdir(self.build_dir):
88                os.makedirs(self.build_dir)
89                reconfigure = True
90
91        self._build(install_dirs=install_dirs, reconfigure=reconfigure)
92
93        # On Windows, emit a wrapper script that can be used to run build artifacts
94        # directly from the build directory, without installing them.  On Windows $PATH
95        # needs to be updated to include all of the directories containing the runtime
96        # library dependencies in order to run the binaries.
97        if self.build_opts.is_windows():
98            script_path = self.get_dev_run_script_path()
99            dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs)
100            dep_dirs = self.get_dev_run_extra_path_dirs(install_dirs, dep_munger)
101            dep_munger.emit_dev_run_script(script_path, dep_dirs)
102
103    def run_tests(
104        self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
105    ):
106        """Execute any tests that we know how to run.  If they fail,
107        raise an exception."""
108        pass
109
110    def _build(self, install_dirs, reconfigure):
111        """Perform the build.
112        install_dirs contains the list of installation directories for
113        the dependencies of this project.
114        reconfigure will be set to true if the fetcher determined
115        that the sources have changed in such a way that the build
116        system needs to regenerate its rules."""
117        pass
118
119    def _compute_env(self, install_dirs):
120        # CMAKE_PREFIX_PATH is only respected when passed through the
121        # environment, so we construct an appropriate path to pass down
122        return self.build_opts.compute_env_for_install_dirs(
123            install_dirs, env=self.env, manifest=self.manifest
124        )
125
126    def get_dev_run_script_path(self):
127        assert self.build_opts.is_windows()
128        return os.path.join(self.build_dir, "run.ps1")
129
130    def get_dev_run_extra_path_dirs(self, install_dirs, dep_munger=None):
131        assert self.build_opts.is_windows()
132        if dep_munger is None:
133            dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs)
134        return dep_munger.compute_dependency_paths(self.build_dir)
135
136
137class MakeBuilder(BuilderBase):
138    def __init__(
139        self,
140        build_opts,
141        ctx,
142        manifest,
143        src_dir,
144        build_dir,
145        inst_dir,
146        build_args,
147        install_args,
148        test_args,
149    ):
150        super(MakeBuilder, self).__init__(
151            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
152        )
153        self.build_args = build_args or []
154        self.install_args = install_args or []
155        self.test_args = test_args
156
157    def _get_prefix(self):
158        return ["PREFIX=" + self.inst_dir, "prefix=" + self.inst_dir]
159
160    def _build(self, install_dirs, reconfigure):
161        env = self._compute_env(install_dirs)
162
163        # Need to ensure that PREFIX is set prior to install because
164        # libbpf uses it when generating its pkg-config file.
165        # The lowercase prefix is used by some projects.
166        cmd = (
167            ["make", "-j%s" % self.build_opts.num_jobs]
168            + self.build_args
169            + self._get_prefix()
170        )
171        self._run_cmd(cmd, env=env)
172
173        install_cmd = ["make"] + self.install_args + self._get_prefix()
174        self._run_cmd(install_cmd, env=env)
175
176    def run_tests(
177        self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
178    ):
179        if not self.test_args:
180            return
181
182        env = self._compute_env(install_dirs)
183
184        cmd = ["make"] + self.test_args + self._get_prefix()
185        self._run_cmd(cmd, env=env)
186
187
188class CMakeBootStrapBuilder(MakeBuilder):
189    def _build(self, install_dirs, reconfigure):
190        self._run_cmd(
191            [
192                "./bootstrap",
193                "--prefix=" + self.inst_dir,
194                f"--parallel={self.build_opts.num_jobs}",
195            ]
196        )
197        super(CMakeBootStrapBuilder, self)._build(install_dirs, reconfigure)
198
199
200class AutoconfBuilder(BuilderBase):
201    def __init__(
202        self,
203        build_opts,
204        ctx,
205        manifest,
206        src_dir,
207        build_dir,
208        inst_dir,
209        args,
210        conf_env_args,
211    ):
212        super(AutoconfBuilder, self).__init__(
213            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
214        )
215        self.args = args or []
216        self.conf_env_args = conf_env_args or {}
217
218    def _build(self, install_dirs, reconfigure):
219        configure_path = os.path.join(self.src_dir, "configure")
220        autogen_path = os.path.join(self.src_dir, "autogen.sh")
221
222        env = self._compute_env(install_dirs)
223
224        # Some configure scripts need additional env values passed derived from cmds
225        for (k, cmd_args) in self.conf_env_args.items():
226            out = (
227                subprocess.check_output(cmd_args, env=dict(env.items()))
228                .decode("utf-8")
229                .strip()
230            )
231            if out:
232                env.set(k, out)
233
234        if not os.path.exists(configure_path):
235            print("%s doesn't exist, so reconfiguring" % configure_path)
236            # This libtoolize call is a bit gross; the issue is that
237            # `autoreconf` as invoked by libsodium's `autogen.sh` doesn't
238            # seem to realize that it should invoke libtoolize and then
239            # error out when the configure script references a libtool
240            # related symbol.
241            self._run_cmd(["libtoolize"], cwd=self.src_dir, env=env)
242
243            # We generally prefer to call the `autogen.sh` script provided
244            # by the project on the basis that it may know more than plain
245            # autoreconf does.
246            if os.path.exists(autogen_path):
247                self._run_cmd(["bash", autogen_path], cwd=self.src_dir, env=env)
248            else:
249                self._run_cmd(["autoreconf", "-ivf"], cwd=self.src_dir, env=env)
250        configure_cmd = [configure_path, "--prefix=" + self.inst_dir] + self.args
251        self._run_cmd(configure_cmd, env=env)
252        self._run_cmd(["make", "-j%s" % self.build_opts.num_jobs], env=env)
253        self._run_cmd(["make", "install"], env=env)
254
255
256class Iproute2Builder(BuilderBase):
257    # ./configure --prefix does not work for iproute2.
258    # Thus, explicitly copy sources from src_dir to build_dir, bulid,
259    # and then install to inst_dir using DESTDIR
260    # lastly, also copy include from build_dir to inst_dir
261    def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir):
262        super(Iproute2Builder, self).__init__(
263            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
264        )
265
266    def _patch(self):
267        # FBOSS build currently depends on an old version of iproute2 (commit
268        # 7ca63aef7d1b0c808da0040c6b366ef7a61f38c1). This is missing a commit
269        # (ae717baf15fb4d30749ada3948d9445892bac239) needed to build iproute2
270        # successfully. Apply it viz.: include stdint.h
271        # Reference: https://fburl.com/ilx9g5xm
272        with open(self.build_dir + "/tc/tc_core.c", "r") as f:
273            data = f.read()
274
275        with open(self.build_dir + "/tc/tc_core.c", "w") as f:
276            f.write("#include <stdint.h>\n")
277            f.write(data)
278
279    def _build(self, install_dirs, reconfigure):
280        configure_path = os.path.join(self.src_dir, "configure")
281
282        env = self.env.copy()
283        self._run_cmd([configure_path], env=env)
284        shutil.rmtree(self.build_dir)
285        shutil.copytree(self.src_dir, self.build_dir)
286        self._patch()
287        self._run_cmd(["make", "-j%s" % self.build_opts.num_jobs], env=env)
288        install_cmd = ["make", "install", "DESTDIR=" + self.inst_dir]
289
290        for d in ["include", "lib"]:
291            if not os.path.isdir(os.path.join(self.inst_dir, d)):
292                shutil.copytree(
293                    os.path.join(self.build_dir, d), os.path.join(self.inst_dir, d)
294                )
295
296        self._run_cmd(install_cmd, env=env)
297
298
299class BistroBuilder(BuilderBase):
300    def _build(self, install_dirs, reconfigure):
301        p = os.path.join(self.src_dir, "bistro", "bistro")
302        env = self._compute_env(install_dirs)
303        env["PATH"] = env["PATH"] + ":" + os.path.join(p, "bin")
304        env["TEMPLATES_PATH"] = os.path.join(p, "include", "thrift", "templates")
305        self._run_cmd(
306            [
307                os.path.join(".", "cmake", "run-cmake.sh"),
308                "Release",
309                "-DCMAKE_INSTALL_PREFIX=" + self.inst_dir,
310            ],
311            cwd=p,
312            env=env,
313        )
314        self._run_cmd(
315            [
316                "make",
317                "install",
318                "-j",
319                str(self.build_opts.num_jobs),
320            ],
321            cwd=os.path.join(p, "cmake", "Release"),
322            env=env,
323        )
324
325    def run_tests(
326        self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
327    ):
328        env = self._compute_env(install_dirs)
329        build_dir = os.path.join(self.src_dir, "bistro", "bistro", "cmake", "Release")
330        NUM_RETRIES = 5
331        for i in range(NUM_RETRIES):
332            cmd = ["ctest", "--output-on-failure"]
333            if i > 0:
334                cmd.append("--rerun-failed")
335            cmd.append(build_dir)
336            try:
337                self._run_cmd(
338                    cmd,
339                    cwd=build_dir,
340                    env=env,
341                )
342            except Exception:
343                print(f"Tests failed... retrying ({i+1}/{NUM_RETRIES})")
344            else:
345                return
346        raise Exception(f"Tests failed even after {NUM_RETRIES} retries")
347
348
349class CMakeBuilder(BuilderBase):
350    MANUAL_BUILD_SCRIPT = """\
351#!{sys.executable}
352
353from __future__ import absolute_import, division, print_function, unicode_literals
354
355import argparse
356import subprocess
357import sys
358
359CMAKE = {cmake!r}
360CTEST = {ctest!r}
361SRC_DIR = {src_dir!r}
362BUILD_DIR = {build_dir!r}
363INSTALL_DIR = {install_dir!r}
364CMD_PREFIX = {cmd_prefix!r}
365CMAKE_ENV = {env_str}
366CMAKE_DEFINE_ARGS = {define_args_str}
367
368
369def get_jobs_argument(num_jobs_arg: int) -> str:
370    if num_jobs_arg > 0:
371        return "-j" + str(num_jobs_arg)
372
373    import multiprocessing
374    num_jobs = multiprocessing.cpu_count() // 2
375    return "-j" + str(num_jobs)
376
377
378def main():
379    ap = argparse.ArgumentParser()
380    ap.add_argument(
381      "cmake_args",
382      nargs=argparse.REMAINDER,
383      help='Any extra arguments after an "--" argument will be passed '
384      "directly to CMake."
385    )
386    ap.add_argument(
387      "--mode",
388      choices=["configure", "build", "install", "test"],
389      default="configure",
390      help="The mode to run: configure, build, or install.  "
391      "Defaults to configure",
392    )
393    ap.add_argument(
394      "--build",
395      action="store_const",
396      const="build",
397      dest="mode",
398      help="An alias for --mode=build",
399    )
400    ap.add_argument(
401      "-j",
402      "--num-jobs",
403      action="store",
404      type=int,
405      default=0,
406      help="Run the build or tests with the specified number of parallel jobs",
407    )
408    ap.add_argument(
409      "--install",
410      action="store_const",
411      const="install",
412      dest="mode",
413      help="An alias for --mode=install",
414    )
415    ap.add_argument(
416      "--test",
417      action="store_const",
418      const="test",
419      dest="mode",
420      help="An alias for --mode=test",
421    )
422    args = ap.parse_args()
423
424    # Strip off a leading "--" from the additional CMake arguments
425    if args.cmake_args and args.cmake_args[0] == "--":
426        args.cmake_args = args.cmake_args[1:]
427
428    env = CMAKE_ENV
429
430    if args.mode == "configure":
431        full_cmd = CMD_PREFIX + [CMAKE, SRC_DIR] + CMAKE_DEFINE_ARGS + args.cmake_args
432    elif args.mode in ("build", "install"):
433        target = "all" if args.mode == "build" else "install"
434        full_cmd = CMD_PREFIX + [
435                CMAKE,
436                "--build",
437                BUILD_DIR,
438                "--target",
439                target,
440                "--config",
441                "Release",
442                get_jobs_argument(args.num_jobs),
443        ] + args.cmake_args
444    elif args.mode == "test":
445        full_cmd = CMD_PREFIX + [
446            {dev_run_script}CTEST,
447            "--output-on-failure",
448            get_jobs_argument(args.num_jobs),
449        ] + args.cmake_args
450    else:
451        ap.error("unknown invocation mode: %s" % (args.mode,))
452
453    cmd_str = " ".join(full_cmd)
454    print("Running: %r" % (cmd_str,))
455    proc = subprocess.run(full_cmd, env=env, cwd=BUILD_DIR)
456    sys.exit(proc.returncode)
457
458
459if __name__ == "__main__":
460    main()
461"""
462
463    def __init__(
464        self,
465        build_opts,
466        ctx,
467        manifest,
468        src_dir,
469        build_dir,
470        inst_dir,
471        defines,
472        loader=None,
473        final_install_prefix=None,
474        extra_cmake_defines=None,
475    ):
476        super(CMakeBuilder, self).__init__(
477            build_opts,
478            ctx,
479            manifest,
480            src_dir,
481            build_dir,
482            inst_dir,
483            final_install_prefix=final_install_prefix,
484        )
485        self.defines = defines or {}
486        if extra_cmake_defines:
487            self.defines.update(extra_cmake_defines)
488        self.loader = loader
489
490    def _invalidate_cache(self):
491        for name in [
492            "CMakeCache.txt",
493            "CMakeFiles/CMakeError.log",
494            "CMakeFiles/CMakeOutput.log",
495        ]:
496            name = os.path.join(self.build_dir, name)
497            if os.path.isdir(name):
498                shutil.rmtree(name)
499            elif os.path.exists(name):
500                os.unlink(name)
501
502    def _needs_reconfigure(self):
503        for name in ["CMakeCache.txt", "build.ninja"]:
504            name = os.path.join(self.build_dir, name)
505            if not os.path.exists(name):
506                return True
507        return False
508
509    def _write_build_script(self, **kwargs):
510        env_lines = ["    {!r}: {!r},".format(k, v) for k, v in kwargs["env"].items()]
511        kwargs["env_str"] = "\n".join(["{"] + env_lines + ["}"])
512
513        if self.build_opts.is_windows():
514            kwargs["dev_run_script"] = '"powershell.exe", {!r}, '.format(
515                self.get_dev_run_script_path()
516            )
517        else:
518            kwargs["dev_run_script"] = ""
519
520        define_arg_lines = ["["]
521        for arg in kwargs["define_args"]:
522            # Replace the CMAKE_INSTALL_PREFIX argument to use the INSTALL_DIR
523            # variable that we define in the MANUAL_BUILD_SCRIPT code.
524            if arg.startswith("-DCMAKE_INSTALL_PREFIX="):
525                value = "    {!r}.format(INSTALL_DIR),".format(
526                    "-DCMAKE_INSTALL_PREFIX={}"
527                )
528            else:
529                value = "    {!r},".format(arg)
530            define_arg_lines.append(value)
531        define_arg_lines.append("]")
532        kwargs["define_args_str"] = "\n".join(define_arg_lines)
533
534        # In order to make it easier for developers to manually run builds for
535        # CMake-based projects, write out some build scripts that can be used to invoke
536        # CMake manually.
537        build_script_path = os.path.join(self.build_dir, "run_cmake.py")
538        script_contents = self.MANUAL_BUILD_SCRIPT.format(**kwargs)
539        with open(build_script_path, "wb") as f:
540            f.write(script_contents.encode())
541        os.chmod(build_script_path, 0o755)
542
543    def _compute_cmake_define_args(self, env):
544        defines = {
545            "CMAKE_INSTALL_PREFIX": self.final_install_prefix or self.inst_dir,
546            "BUILD_SHARED_LIBS": "OFF",
547            # Some of the deps (rsocket) default to UBSAN enabled if left
548            # unspecified.  Some of the deps fail to compile in release mode
549            # due to warning->error promotion.  RelWithDebInfo is the happy
550            # medium.
551            "CMAKE_BUILD_TYPE": "RelWithDebInfo",
552        }
553        if "SANDCASTLE" not in os.environ:
554            # We sometimes see intermittent ccache related breakages on some
555            # of the FB internal CI hosts, so we prefer to disable ccache
556            # when running in that environment.
557            ccache = path_search(env, "ccache")
558            if ccache:
559                defines["CMAKE_CXX_COMPILER_LAUNCHER"] = ccache
560        else:
561            # rocksdb does its own probing for ccache.
562            # Ensure that it is disabled on sandcastle
563            env["CCACHE_DISABLE"] = "1"
564            # Some sandcastle hosts have broken ccache related dirs, and
565            # even though we've asked for it to be disabled ccache is
566            # still invoked by rocksdb's cmake.
567            # Redirect its config directory to somewhere that is guaranteed
568            # fresh to us, and that won't have any ccache data inside.
569            env["CCACHE_DIR"] = f"{self.build_opts.scratch_dir}/ccache"
570
571        if "GITHUB_ACTIONS" in os.environ and self.build_opts.is_windows():
572            # GitHub actions: the host has both gcc and msvc installed, and
573            # the default behavior of cmake is to prefer gcc.
574            # Instruct cmake that we want it to use cl.exe; this is important
575            # because Boost prefers cl.exe and the mismatch results in cmake
576            # with gcc not being able to find boost built with cl.exe.
577            defines["CMAKE_C_COMPILER"] = "cl.exe"
578            defines["CMAKE_CXX_COMPILER"] = "cl.exe"
579
580        if self.build_opts.is_darwin():
581            # Try to persuade cmake to set the rpath to match the lib
582            # dirs of the dependencies.  This isn't automatic, and to
583            # make things more interesting, cmake uses `;` as the path
584            # separator, so translate the runtime path to something
585            # that cmake will parse
586            defines["CMAKE_INSTALL_RPATH"] = ";".join(
587                env.get("DYLD_LIBRARY_PATH", "").split(":")
588            )
589            # Tell cmake that we want to set the rpath in the tree
590            # at build time.  Without this the rpath is only set
591            # at the moment that the binaries are installed.  That
592            # default is problematic for example when using the
593            # gtest integration in cmake which runs the built test
594            # executables during the build to discover the set of
595            # tests.
596            defines["CMAKE_BUILD_WITH_INSTALL_RPATH"] = "ON"
597
598        boost_169_is_required = False
599        if self.loader:
600            for m in self.loader.manifests_in_dependency_order():
601                preinstalled = m.get_section_as_dict("preinstalled.env", self.ctx)
602                boost_169_is_required = "BOOST_ROOT_1_69_0" in preinstalled.keys()
603                if boost_169_is_required:
604                    break
605
606        if (
607            boost_169_is_required
608            and self.build_opts.allow_system_packages
609            and self.build_opts.host_type.get_package_manager()
610            and self.build_opts.host_type.get_package_manager() == "rpm"
611        ):
612            # Boost 1.69 rpms don't install cmake config to the system, so to point to them explicitly
613            defines["BOOST_INCLUDEDIR"] = "/usr/include/boost169"
614            defines["BOOST_LIBRARYDIR"] = "/usr/lib64/boost169"
615
616        defines.update(self.defines)
617        define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()]
618
619        # if self.build_opts.is_windows():
620        #    define_args += ["-G", "Visual Studio 15 2017 Win64"]
621        define_args += ["-G", "Ninja"]
622
623        return define_args
624
625    def _build(self, install_dirs, reconfigure):
626        reconfigure = reconfigure or self._needs_reconfigure()
627
628        env = self._compute_env(install_dirs)
629        if not self.build_opts.is_windows() and self.final_install_prefix:
630            env["DESTDIR"] = self.inst_dir
631
632        # Resolve the cmake that we installed
633        cmake = path_search(env, "cmake")
634        if cmake is None:
635            raise Exception("Failed to find CMake")
636
637        if reconfigure:
638            define_args = self._compute_cmake_define_args(env)
639            self._write_build_script(
640                cmd_prefix=self._get_cmd_prefix(),
641                cmake=cmake,
642                ctest=path_search(env, "ctest"),
643                env=env,
644                define_args=define_args,
645                src_dir=self.src_dir,
646                build_dir=self.build_dir,
647                install_dir=self.inst_dir,
648                sys=sys,
649            )
650
651            self._invalidate_cache()
652            self._run_cmd([cmake, self.src_dir] + define_args, env=env)
653
654        self._run_cmd(
655            [
656                cmake,
657                "--build",
658                self.build_dir,
659                "--target",
660                "install",
661                "--config",
662                "Release",
663                "-j",
664                str(self.build_opts.num_jobs),
665            ],
666            env=env,
667        )
668
669    def run_tests(
670        self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
671    ):
672        env = self._compute_env(install_dirs)
673        ctest = path_search(env, "ctest")
674        cmake = path_search(env, "cmake")
675
676        # On Windows, we also need to update $PATH to include the directories that
677        # contain runtime library dependencies.  This is not needed on other platforms
678        # since CMake will emit RPATH properly in the binary so they can find these
679        # dependencies.
680        if self.build_opts.is_windows():
681            path_entries = self.get_dev_run_extra_path_dirs(install_dirs)
682            path = env.get("PATH")
683            if path:
684                path_entries.insert(0, path)
685            env["PATH"] = ";".join(path_entries)
686
687        # Don't use the cmd_prefix when running tests.  This is vcvarsall.bat on
688        # Windows.  vcvarsall.bat is only needed for the build, not tests.  It
689        # unfortunately fails if invoked with a long PATH environment variable when
690        # running the tests.
691        use_cmd_prefix = False
692
693        def get_property(test, propname, defval=None):
694            """extracts a named property from a cmake test info json blob.
695            The properties look like:
696            [{"name": "WORKING_DIRECTORY"},
697             {"value": "something"}]
698            We assume that it is invalid for the same named property to be
699            listed more than once.
700            """
701            props = test.get("properties", [])
702            for p in props:
703                if p.get("name", None) == propname:
704                    return p.get("value", defval)
705            return defval
706
707        def list_tests():
708            output = subprocess.check_output(
709                [ctest, "--show-only=json-v1"], env=env, cwd=self.build_dir
710            )
711            try:
712                data = json.loads(output.decode("utf-8"))
713            except ValueError as exc:
714                raise Exception(
715                    "Failed to decode cmake test info using %s: %s.  Output was: %r"
716                    % (ctest, str(exc), output)
717                )
718
719            tests = []
720            machine_suffix = self.build_opts.host_type.as_tuple_string()
721            for test in data["tests"]:
722                working_dir = get_property(test, "WORKING_DIRECTORY")
723                labels = []
724                machine_suffix = self.build_opts.host_type.as_tuple_string()
725                labels.append("tpx_test_config::buildsystem=getdeps")
726                labels.append("tpx_test_config::platform={}".format(machine_suffix))
727
728                if get_property(test, "DISABLED"):
729                    labels.append("disabled")
730                command = test["command"]
731                if working_dir:
732                    command = [cmake, "-E", "chdir", working_dir] + command
733
734                import os
735
736                tests.append(
737                    {
738                        "type": "custom",
739                        "target": "%s-%s-getdeps-%s"
740                        % (self.manifest.name, test["name"], machine_suffix),
741                        "command": command,
742                        "labels": labels,
743                        "env": {},
744                        "required_paths": [],
745                        "contacts": [],
746                        "cwd": os.getcwd(),
747                    }
748                )
749            return tests
750
751        if schedule_type == "continuous" or schedule_type == "testwarden":
752            # for continuous and testwarden runs, disabling retry can give up
753            # better signals for flaky tests.
754            retry = 0
755
756        testpilot = path_search(env, "testpilot")
757        tpx = path_search(env, "tpx")
758        if (tpx or testpilot) and not no_testpilot:
759            buck_test_info = list_tests()
760            import os
761
762            buck_test_info_name = os.path.join(self.build_dir, ".buck-test-info.json")
763            with open(buck_test_info_name, "w") as f:
764                json.dump(buck_test_info, f)
765
766            env.set("http_proxy", "")
767            env.set("https_proxy", "")
768            runs = []
769            from sys import platform
770
771            if platform == "win32":
772                machine_suffix = self.build_opts.host_type.as_tuple_string()
773                testpilot_args = [
774                    "parexec-testinfra.exe",
775                    "C:/tools/testpilot/sc_testpilot.par",
776                    # Need to force the repo type otherwise testpilot on windows
777                    # can be confused (presumably sparse profile related)
778                    "--force-repo",
779                    "fbcode",
780                    "--force-repo-root",
781                    self.build_opts.fbsource_dir,
782                    "--buck-test-info",
783                    buck_test_info_name,
784                    "--retry=%d" % retry,
785                    "-j=%s" % str(self.build_opts.num_jobs),
786                    "--test-config",
787                    "platform=%s" % machine_suffix,
788                    "buildsystem=getdeps",
789                    "--return-nonzero-on-failures",
790                ]
791            else:
792                testpilot_args = [
793                    tpx,
794                    "--buck-test-info",
795                    buck_test_info_name,
796                    "--retry=%d" % retry,
797                    "-j=%s" % str(self.build_opts.num_jobs),
798                    "--print-long-results",
799                ]
800
801            if owner:
802                testpilot_args += ["--contacts", owner]
803
804            if tpx and env:
805                testpilot_args.append("--env")
806                testpilot_args.extend(f"{key}={val}" for key, val in env.items())
807
808            if test_filter:
809                testpilot_args += ["--", test_filter]
810
811            if schedule_type == "continuous":
812                runs.append(
813                    [
814                        "--tag-new-tests",
815                        "--collection",
816                        "oss-continuous",
817                        "--purpose",
818                        "continuous",
819                    ]
820                )
821            elif schedule_type == "testwarden":
822                # One run to assess new tests
823                runs.append(
824                    [
825                        "--tag-new-tests",
826                        "--collection",
827                        "oss-new-test-stress",
828                        "--stress-runs",
829                        "10",
830                        "--purpose",
831                        "stress-run-new-test",
832                    ]
833                )
834                # And another for existing tests
835                runs.append(
836                    [
837                        "--tag-new-tests",
838                        "--collection",
839                        "oss-existing-test-stress",
840                        "--stress-runs",
841                        "10",
842                        "--purpose",
843                        "stress-run",
844                    ]
845                )
846            else:
847                runs.append(["--collection", "oss-diff", "--purpose", "diff"])
848
849            for run in runs:
850                self._run_cmd(
851                    testpilot_args + run,
852                    cwd=self.build_opts.fbcode_builder_dir,
853                    env=env,
854                    use_cmd_prefix=use_cmd_prefix,
855                )
856        else:
857            args = [ctest, "--output-on-failure", "-j", str(self.build_opts.num_jobs)]
858            if test_filter:
859                args += ["-R", test_filter]
860
861            count = 0
862            while count <= retry:
863                retcode = self._run_cmd(
864                    args, env=env, use_cmd_prefix=use_cmd_prefix, allow_fail=True
865                )
866
867                if retcode == 0:
868                    break
869                if count == 0:
870                    # Only add this option in the second run.
871                    args += ["--rerun-failed"]
872                count += 1
873            if retcode != 0:
874                # Allow except clause in getdeps.main to catch and exit gracefully
875                # This allows non-testpilot runs to fail through the same logic as failed testpilot runs, which may become handy in case if post test processing is needed in the future
876                raise subprocess.CalledProcessError(retcode, args)
877
878
879class NinjaBootstrap(BuilderBase):
880    def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir):
881        super(NinjaBootstrap, self).__init__(
882            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
883        )
884
885    def _build(self, install_dirs, reconfigure):
886        self._run_cmd([sys.executable, "configure.py", "--bootstrap"], cwd=self.src_dir)
887        src_ninja = os.path.join(self.src_dir, "ninja")
888        dest_ninja = os.path.join(self.inst_dir, "bin/ninja")
889        bin_dir = os.path.dirname(dest_ninja)
890        if not os.path.exists(bin_dir):
891            os.makedirs(bin_dir)
892        shutil.copyfile(src_ninja, dest_ninja)
893        shutil.copymode(src_ninja, dest_ninja)
894
895
896class OpenSSLBuilder(BuilderBase):
897    def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir):
898        super(OpenSSLBuilder, self).__init__(
899            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
900        )
901
902    def _build(self, install_dirs, reconfigure):
903        configure = os.path.join(self.src_dir, "Configure")
904
905        # prefer to resolve the perl that we installed from
906        # our manifest on windows, but fall back to the system
907        # path on eg: darwin
908        env = self.env.copy()
909        for d in install_dirs:
910            bindir = os.path.join(d, "bin")
911            add_path_entry(env, "PATH", bindir, append=False)
912
913        perl = path_search(env, "perl", "perl")
914
915        make_j_args = []
916        if self.build_opts.is_windows():
917            make = "nmake.exe"
918            args = ["VC-WIN64A-masm", "-utf-8"]
919        elif self.build_opts.is_darwin():
920            make = "make"
921            make_j_args = ["-j%s" % self.build_opts.num_jobs]
922            args = ["darwin64-x86_64-cc"]
923        elif self.build_opts.is_linux():
924            make = "make"
925            make_j_args = ["-j%s" % self.build_opts.num_jobs]
926            args = (
927                ["linux-x86_64"] if not self.build_opts.is_arm() else ["linux-aarch64"]
928            )
929        else:
930            raise Exception("don't know how to build openssl for %r" % self.ctx)
931
932        self._run_cmd(
933            [
934                perl,
935                configure,
936                "--prefix=%s" % self.inst_dir,
937                "--openssldir=%s" % self.inst_dir,
938            ]
939            + args
940            + [
941                "enable-static-engine",
942                "enable-capieng",
943                "no-makedepend",
944                "no-unit-test",
945                "no-tests",
946            ]
947        )
948        make_build = [make] + make_j_args
949        self._run_cmd(make_build)
950        make_install = [make, "install_sw", "install_ssldirs"]
951        self._run_cmd(make_install)
952
953
954class Boost(BuilderBase):
955    def __init__(
956        self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, b2_args
957    ):
958        children = os.listdir(src_dir)
959        assert len(children) == 1, "expected a single directory entry: %r" % (children,)
960        boost_src = children[0]
961        assert boost_src.startswith("boost")
962        src_dir = os.path.join(src_dir, children[0])
963        super(Boost, self).__init__(
964            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
965        )
966        self.b2_args = b2_args
967
968    def _build(self, install_dirs, reconfigure):
969        env = self._compute_env(install_dirs)
970        linkage = ["static"]
971        if self.build_opts.is_windows():
972            linkage.append("shared")
973
974        args = []
975        if self.build_opts.is_darwin():
976            clang = subprocess.check_output(["xcrun", "--find", "clang"])
977            user_config = os.path.join(self.build_dir, "project-config.jam")
978            with open(user_config, "w") as jamfile:
979                jamfile.write("using clang : : %s ;\n" % clang.decode().strip())
980            args.append("--user-config=%s" % user_config)
981
982        for link in linkage:
983            if self.build_opts.is_windows():
984                bootstrap = os.path.join(self.src_dir, "bootstrap.bat")
985                self._run_cmd([bootstrap], cwd=self.src_dir, env=env)
986                args += ["address-model=64"]
987            else:
988                bootstrap = os.path.join(self.src_dir, "bootstrap.sh")
989                self._run_cmd(
990                    [bootstrap, "--prefix=%s" % self.inst_dir],
991                    cwd=self.src_dir,
992                    env=env,
993                )
994
995            b2 = os.path.join(self.src_dir, "b2")
996            self._run_cmd(
997                [
998                    b2,
999                    "-j%s" % self.build_opts.num_jobs,
1000                    "--prefix=%s" % self.inst_dir,
1001                    "--builddir=%s" % self.build_dir,
1002                ]
1003                + args
1004                + self.b2_args
1005                + [
1006                    "link=%s" % link,
1007                    "runtime-link=shared",
1008                    "variant=release",
1009                    "threading=multi",
1010                    "debug-symbols=on",
1011                    "visibility=global",
1012                    "-d2",
1013                    "install",
1014                ],
1015                cwd=self.src_dir,
1016                env=env,
1017            )
1018
1019
1020class NopBuilder(BuilderBase):
1021    def __init__(self, build_opts, ctx, manifest, src_dir, inst_dir):
1022        super(NopBuilder, self).__init__(
1023            build_opts, ctx, manifest, src_dir, None, inst_dir
1024        )
1025
1026    def build(self, install_dirs, reconfigure):
1027        print("Installing %s -> %s" % (self.src_dir, self.inst_dir))
1028        parent = os.path.dirname(self.inst_dir)
1029        if not os.path.exists(parent):
1030            os.makedirs(parent)
1031
1032        install_files = self.manifest.get_section_as_ordered_pairs(
1033            "install.files", self.ctx
1034        )
1035        if install_files:
1036            for src_name, dest_name in self.manifest.get_section_as_ordered_pairs(
1037                "install.files", self.ctx
1038            ):
1039                full_dest = os.path.join(self.inst_dir, dest_name)
1040                full_src = os.path.join(self.src_dir, src_name)
1041
1042                dest_parent = os.path.dirname(full_dest)
1043                if not os.path.exists(dest_parent):
1044                    os.makedirs(dest_parent)
1045                if os.path.isdir(full_src):
1046                    if not os.path.exists(full_dest):
1047                        shutil.copytree(full_src, full_dest)
1048                else:
1049                    shutil.copyfile(full_src, full_dest)
1050                    shutil.copymode(full_src, full_dest)
1051                    # This is a bit gross, but the mac ninja.zip doesn't
1052                    # give ninja execute permissions, so force them on
1053                    # for things that look like they live in a bin dir
1054                    if os.path.dirname(dest_name) == "bin":
1055                        st = os.lstat(full_dest)
1056                        os.chmod(full_dest, st.st_mode | stat.S_IXUSR)
1057        else:
1058            if not os.path.exists(self.inst_dir):
1059                shutil.copytree(self.src_dir, self.inst_dir)
1060
1061
1062class OpenNSABuilder(NopBuilder):
1063    # OpenNSA libraries are stored with git LFS. As a result, fetcher fetches
1064    # LFS pointers and not the contents. Use git-lfs to pull the real contents
1065    # before copying to install dir using NoopBuilder.
1066    # In future, if more builders require git-lfs, we would consider installing
1067    # git-lfs as part of the sandcastle infra as against repeating similar
1068    # logic for each builder that requires git-lfs.
1069    def __init__(self, build_opts, ctx, manifest, src_dir, inst_dir):
1070        super(OpenNSABuilder, self).__init__(
1071            build_opts, ctx, manifest, src_dir, inst_dir
1072        )
1073
1074    def build(self, install_dirs, reconfigure):
1075        env = self._compute_env(install_dirs)
1076        self._run_cmd(["git", "lfs", "install", "--local"], cwd=self.src_dir, env=env)
1077        self._run_cmd(["git", "lfs", "pull"], cwd=self.src_dir, env=env)
1078
1079        super(OpenNSABuilder, self).build(install_dirs, reconfigure)
1080
1081
1082class SqliteBuilder(BuilderBase):
1083    def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir):
1084        super(SqliteBuilder, self).__init__(
1085            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
1086        )
1087
1088    def _build(self, install_dirs, reconfigure):
1089        for f in ["sqlite3.c", "sqlite3.h", "sqlite3ext.h"]:
1090            src = os.path.join(self.src_dir, f)
1091            dest = os.path.join(self.build_dir, f)
1092            copy_if_different(src, dest)
1093
1094        cmake_lists = """
1095cmake_minimum_required(VERSION 3.1.3 FATAL_ERROR)
1096project(sqlite3 C)
1097add_library(sqlite3 STATIC sqlite3.c)
1098# These options are taken from the defaults in Makefile.msc in
1099# the sqlite distribution
1100target_compile_definitions(sqlite3 PRIVATE
1101    -DSQLITE_ENABLE_COLUMN_METADATA=1
1102    -DSQLITE_ENABLE_FTS3=1
1103    -DSQLITE_ENABLE_RTREE=1
1104    -DSQLITE_ENABLE_GEOPOLY=1
1105    -DSQLITE_ENABLE_JSON1=1
1106    -DSQLITE_ENABLE_STMTVTAB=1
1107    -DSQLITE_ENABLE_DBPAGE_VTAB=1
1108    -DSQLITE_ENABLE_DBSTAT_VTAB=1
1109    -DSQLITE_INTROSPECTION_PRAGMAS=1
1110    -DSQLITE_ENABLE_DESERIALIZE=1
1111)
1112install(TARGETS sqlite3)
1113install(FILES sqlite3.h sqlite3ext.h DESTINATION include)
1114            """
1115
1116        with open(os.path.join(self.build_dir, "CMakeLists.txt"), "w") as f:
1117            f.write(cmake_lists)
1118
1119        defines = {
1120            "CMAKE_INSTALL_PREFIX": self.inst_dir,
1121            "BUILD_SHARED_LIBS": "OFF",
1122            "CMAKE_BUILD_TYPE": "RelWithDebInfo",
1123        }
1124        define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()]
1125        define_args += ["-G", "Ninja"]
1126
1127        env = self._compute_env(install_dirs)
1128
1129        # Resolve the cmake that we installed
1130        cmake = path_search(env, "cmake")
1131
1132        self._run_cmd([cmake, self.build_dir] + define_args, env=env)
1133        self._run_cmd(
1134            [
1135                cmake,
1136                "--build",
1137                self.build_dir,
1138                "--target",
1139                "install",
1140                "--config",
1141                "Release",
1142                "-j",
1143                str(self.build_opts.num_jobs),
1144            ],
1145            env=env,
1146        )
1147
1148
1149class CargoBuilder(BuilderBase):
1150    def __init__(
1151        self,
1152        build_opts,
1153        ctx,
1154        manifest,
1155        src_dir,
1156        build_dir,
1157        inst_dir,
1158        build_doc,
1159        workspace_dir,
1160        manifests_to_build,
1161        loader,
1162    ):
1163        super(CargoBuilder, self).__init__(
1164            build_opts, ctx, manifest, src_dir, build_dir, inst_dir
1165        )
1166        self.build_doc = build_doc
1167        self.ws_dir = workspace_dir
1168        self.manifests_to_build = manifests_to_build and manifests_to_build.split(",")
1169        self.loader = loader
1170
1171    def run_cargo(self, install_dirs, operation, args=None):
1172        args = args or []
1173        env = self._compute_env(install_dirs)
1174        # Enable using nightly features with stable compiler
1175        env["RUSTC_BOOTSTRAP"] = "1"
1176        env["LIBZ_SYS_STATIC"] = "1"
1177        cmd = [
1178            "cargo",
1179            operation,
1180            "--workspace",
1181            "-j%s" % self.build_opts.num_jobs,
1182        ] + args
1183        self._run_cmd(cmd, cwd=self.workspace_dir(), env=env)
1184
1185    def build_source_dir(self):
1186        return os.path.join(self.build_dir, "source")
1187
1188    def workspace_dir(self):
1189        return os.path.join(self.build_source_dir(), self.ws_dir or "")
1190
1191    def manifest_dir(self, manifest):
1192        return os.path.join(self.build_source_dir(), manifest)
1193
1194    def recreate_dir(self, src, dst):
1195        if os.path.isdir(dst):
1196            shutil.rmtree(dst)
1197        shutil.copytree(src, dst)
1198
1199    def _build(self, install_dirs, reconfigure):
1200        build_source_dir = self.build_source_dir()
1201        self.recreate_dir(self.src_dir, build_source_dir)
1202
1203        dot_cargo_dir = os.path.join(build_source_dir, ".cargo")
1204        if not os.path.isdir(dot_cargo_dir):
1205            os.mkdir(dot_cargo_dir)
1206
1207        with open(os.path.join(dot_cargo_dir, "config"), "w+") as f:
1208            f.write(
1209                """\
1210[build]
1211target-dir = '''{}'''
1212
1213[net]
1214git-fetch-with-cli = true
1215
1216[profile.dev]
1217debug = false
1218incremental = false
1219""".format(
1220                    self.build_dir.replace("\\", "\\\\")
1221                )
1222            )
1223
1224        if self.ws_dir is not None:
1225            self._patchup_workspace()
1226
1227        try:
1228            from getdeps.facebook.rust import vendored_crates
1229
1230            vendored_crates(self.build_opts, build_source_dir)
1231        except ImportError:
1232            # This FB internal module isn't shippped to github,
1233            # so just rely on cargo downloading crates on it's own
1234            pass
1235
1236        if self.manifests_to_build is None:
1237            self.run_cargo(
1238                install_dirs,
1239                "build",
1240                ["--out-dir", os.path.join(self.inst_dir, "bin"), "-Zunstable-options"],
1241            )
1242        else:
1243            for manifest in self.manifests_to_build:
1244                self.run_cargo(
1245                    install_dirs,
1246                    "build",
1247                    [
1248                        "--out-dir",
1249                        os.path.join(self.inst_dir, "bin"),
1250                        "-Zunstable-options",
1251                        "--manifest-path",
1252                        self.manifest_dir(manifest),
1253                    ],
1254                )
1255
1256        self.recreate_dir(build_source_dir, os.path.join(self.inst_dir, "source"))
1257
1258    def run_tests(
1259        self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
1260    ):
1261        if test_filter:
1262            args = ["--", test_filter]
1263        else:
1264            args = []
1265
1266        if self.manifests_to_build is None:
1267            self.run_cargo(install_dirs, "test", args)
1268            if self.build_doc:
1269                self.run_cargo(install_dirs, "doc", ["--no-deps"])
1270        else:
1271            for manifest in self.manifests_to_build:
1272                margs = ["--manifest-path", self.manifest_dir(manifest)]
1273                self.run_cargo(install_dirs, "test", args + margs)
1274                if self.build_doc:
1275                    self.run_cargo(install_dirs, "doc", ["--no-deps"] + margs)
1276
1277    def _patchup_workspace(self):
1278        """
1279        This method makes some assumptions about the state of the project and
1280        its cargo dependendies:
1281        1. Crates from cargo dependencies can be extracted from Cargo.toml files
1282           using _extract_crates function. It is using a heuristic so check its
1283           code to understand how it is done.
1284        2. The extracted cargo dependencies crates can be found in the
1285           dependency's install dir using _resolve_crate_to_path function
1286           which again is using a heuristic.
1287
1288        Notice that many things might go wrong here. E.g. if someone depends
1289        on another getdeps crate by writing in their Cargo.toml file:
1290
1291            my-rename-of-crate = { package = "crate", git = "..." }
1292
1293        they can count themselves lucky because the code will raise an
1294        Exception. There migh be more cases where the code will silently pass
1295        producing bad results.
1296        """
1297        workspace_dir = self.workspace_dir()
1298        config = self._resolve_config()
1299        if config:
1300            with open(os.path.join(workspace_dir, "Cargo.toml"), "r+") as f:
1301                manifest_content = f.read()
1302                if "[package]" not in manifest_content:
1303                    # A fake manifest has to be crated to change the virtual
1304                    # manifest into a non-virtual. The virtual manifests are limited
1305                    # in many ways and the inability to define patches on them is
1306                    # one. Check https://github.com/rust-lang/cargo/issues/4934 to
1307                    # see if it is resolved.
1308                    f.write(
1309                        """
1310    [package]
1311    name = "fake_manifest_of_{}"
1312    version = "0.0.0"
1313    [lib]
1314    path = "/dev/null"
1315    """.format(
1316                            self.manifest.name
1317                        )
1318                    )
1319                else:
1320                    f.write("\n")
1321                f.write(config)
1322
1323    def _resolve_config(self):
1324        """
1325        Returns a configuration to be put inside root Cargo.toml file which
1326        patches the dependencies git code with local getdeps versions.
1327        See https://doc.rust-lang.org/cargo/reference/manifest.html#the-patch-section
1328        """
1329        dep_to_git = self._resolve_dep_to_git()
1330        dep_to_crates = CargoBuilder._resolve_dep_to_crates(
1331            self.build_source_dir(), dep_to_git
1332        )
1333
1334        config = []
1335        for name in sorted(dep_to_git.keys()):
1336            git_conf = dep_to_git[name]
1337            crates = sorted(dep_to_crates.get(name, []))
1338            if not crates:
1339                continue  # nothing to patch, move along
1340            crates_patches = [
1341                '{} = {{ path = "{}" }}'.format(
1342                    crate,
1343                    CargoBuilder._resolve_crate_to_path(crate, git_conf).replace(
1344                        "\\", "\\\\"
1345                    ),
1346                )
1347                for crate in crates
1348            ]
1349
1350            config.append(
1351                '[patch."{0}"]\n'.format(git_conf["repo_url"])
1352                + "\n".join(crates_patches)
1353            )
1354        return "\n".join(config)
1355
1356    def _resolve_dep_to_git(self):
1357        """
1358        For each direct dependency of the currently build manifest check if it
1359        is also cargo-builded and if yes then extract it's git configs and
1360        install dir
1361        """
1362        dependencies = self.manifest.get_section_as_dict("dependencies", ctx=self.ctx)
1363        if not dependencies:
1364            return []
1365
1366        dep_to_git = {}
1367        for dep in dependencies.keys():
1368            dep_manifest = self.loader.load_manifest(dep)
1369            dep_builder = dep_manifest.get("build", "builder", ctx=self.ctx)
1370            if dep_builder not in ["cargo", "nop"] or dep == "rust":
1371                # This is a direct dependency, but it is not build with cargo
1372                # and it is not simply copying files with nop, so ignore it.
1373                # The "rust" dependency is an exception since it contains the
1374                # toolchain.
1375                continue
1376
1377            git_conf = dep_manifest.get_section_as_dict("git", ctx=self.ctx)
1378            if "repo_url" not in git_conf:
1379                raise Exception(
1380                    "A cargo dependency requires git.repo_url to be defined."
1381                )
1382            source_dir = self.loader.get_project_install_dir(dep_manifest)
1383            if dep_builder == "cargo":
1384                source_dir = os.path.join(source_dir, "source")
1385            git_conf["source_dir"] = source_dir
1386            dep_to_git[dep] = git_conf
1387        return dep_to_git
1388
1389    @staticmethod
1390    def _resolve_dep_to_crates(build_source_dir, dep_to_git):
1391        """
1392        This function traverse the build_source_dir in search of Cargo.toml
1393        files, extracts the crate names from them using _extract_crates
1394        function and returns a merged result containing crate names per
1395        dependency name from all Cargo.toml files in the project.
1396        """
1397        if not dep_to_git:
1398            return {}  # no deps, so don't waste time traversing files
1399
1400        dep_to_crates = {}
1401        for root, _, files in os.walk(build_source_dir):
1402            for f in files:
1403                if f == "Cargo.toml":
1404                    more_dep_to_crates = CargoBuilder._extract_crates(
1405                        os.path.join(root, f), dep_to_git
1406                    )
1407                    for name, crates in more_dep_to_crates.items():
1408                        dep_to_crates.setdefault(name, set()).update(crates)
1409        return dep_to_crates
1410
1411    @staticmethod
1412    def _extract_crates(cargo_toml_file, dep_to_git):
1413        """
1414        This functions reads content of provided cargo toml file and extracts
1415        crate names per each dependency. The extraction is done by a heuristic
1416        so it might be incorrect.
1417        """
1418        deps_to_crates = {}
1419        with open(cargo_toml_file, "r") as f:
1420            for line in f.readlines():
1421                if line.startswith("#") or "git = " not in line:
1422                    continue  # filter out commented lines and ones without git deps
1423                for name, conf in dep_to_git.items():
1424                    if 'git = "{}"'.format(conf["repo_url"]) in line:
1425                        pkg_template = ' package = "'
1426                        if pkg_template in line:
1427                            crate_name, _, _ = line.partition(pkg_template)[
1428                                2
1429                            ].partition('"')
1430                        else:
1431                            crate_name, _, _ = line.partition("=")
1432                        deps_to_crates.setdefault(name, set()).add(crate_name.strip())
1433        return deps_to_crates
1434
1435    @staticmethod
1436    def _resolve_crate_to_path(crate, git_conf):
1437        """
1438        Tries to find <crate> in git_conf["inst_dir"] by searching a [package]
1439        keyword followed by name = "<crate>".
1440        """
1441        source_dir = git_conf["source_dir"]
1442        search_pattern = '[package]\nname = "{}"'.format(crate)
1443
1444        for root, _, files in os.walk(source_dir):
1445            for fname in files:
1446                if fname == "Cargo.toml":
1447                    with open(os.path.join(root, fname), "r") as f:
1448                        if search_pattern in f.read():
1449                            return root
1450
1451        raise Exception("Failed to found crate {} in path {}".format(crate, source_dir))
1452