1#!/usr/bin/env python3
2# Copyright (c) Facebook, Inc. and its affiliates.
3#
4# This source code is licensed under the MIT license found in the
5# LICENSE file in the root directory of this source tree.
6
7from __future__ import absolute_import, division, print_function, unicode_literals
8
9import argparse
10import json
11import os
12import shutil
13import subprocess
14import sys
15import tarfile
16import tempfile
17
18# We don't import cache.create_cache directly as the facebook
19# specific import below may monkey patch it, and we want to
20# observe the patched version of this function!
21import getdeps.cache as cache_module
22from getdeps.buildopts import setup_build_options
23from getdeps.dyndeps import create_dyn_dep_munger
24from getdeps.errors import TransientFailure
25from getdeps.fetcher import (
26    SystemPackageFetcher,
27    file_name_is_cmake_file,
28    list_files_under_dir_newer_than_timestamp,
29)
30from getdeps.load import ManifestLoader
31from getdeps.manifest import ManifestParser
32from getdeps.platform import HostType
33from getdeps.runcmd import run_cmd
34from getdeps.subcmd import SubCmd, add_subcommands, cmd
35
36
37try:
38    import getdeps.facebook  # noqa: F401
39except ImportError:
40    # we don't ship the facebook specific subdir,
41    # so allow that to fail silently
42    pass
43
44
45sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "getdeps"))
46
47
48class UsageError(Exception):
49    pass
50
51
52@cmd("validate-manifest", "parse a manifest and validate that it is correct")
53class ValidateManifest(SubCmd):
54    def run(self, args):
55        try:
56            ManifestParser(file_name=args.file_name)
57            print("OK", file=sys.stderr)
58            return 0
59        except Exception as exc:
60            print("ERROR: %s" % str(exc), file=sys.stderr)
61            return 1
62
63    def setup_parser(self, parser):
64        parser.add_argument("file_name", help="path to the manifest file")
65
66
67@cmd("show-host-type", "outputs the host type tuple for the host machine")
68class ShowHostType(SubCmd):
69    def run(self, args):
70        host = HostType()
71        print("%s" % host.as_tuple_string())
72        return 0
73
74
75class ProjectCmdBase(SubCmd):
76    def run(self, args):
77        opts = setup_build_options(args)
78
79        if args.current_project is not None:
80            opts.repo_project = args.current_project
81        if args.project is None:
82            if opts.repo_project is None:
83                raise UsageError(
84                    "no project name specified, and no .projectid file found"
85                )
86            if opts.repo_project == "fbsource":
87                # The fbsource repository is a little special.  There is no project
88                # manifest file for it.  A specific project must always be explicitly
89                # specified when building from fbsource.
90                raise UsageError(
91                    "no project name specified (required when building in fbsource)"
92                )
93            args.project = opts.repo_project
94
95        ctx_gen = opts.get_context_generator(facebook_internal=args.facebook_internal)
96        if args.test_dependencies:
97            ctx_gen.set_value_for_all_projects("test", "on")
98        if args.enable_tests:
99            ctx_gen.set_value_for_project(args.project, "test", "on")
100        else:
101            ctx_gen.set_value_for_project(args.project, "test", "off")
102
103        loader = ManifestLoader(opts, ctx_gen)
104        self.process_project_dir_arguments(args, loader)
105
106        manifest = loader.load_manifest(args.project)
107
108        self.run_project_cmd(args, loader, manifest)
109
110    def process_project_dir_arguments(self, args, loader):
111        def parse_project_arg(arg, arg_type):
112            parts = arg.split(":")
113            if len(parts) == 2:
114                project, path = parts
115            elif len(parts) == 1:
116                project = args.project
117                path = parts[0]
118            # On Windows path contains colon, e.g. C:\open
119            elif os.name == "nt" and len(parts) == 3:
120                project = parts[0]
121                path = parts[1] + ":" + parts[2]
122            else:
123                raise UsageError(
124                    "invalid %s argument; too many ':' characters: %s" % (arg_type, arg)
125                )
126
127            return project, os.path.abspath(path)
128
129        # If we are currently running from a project repository,
130        # use the current repository for the project sources.
131        build_opts = loader.build_opts
132        if build_opts.repo_project is not None and build_opts.repo_root is not None:
133            loader.set_project_src_dir(build_opts.repo_project, build_opts.repo_root)
134
135        for arg in args.src_dir:
136            project, path = parse_project_arg(arg, "--src-dir")
137            loader.set_project_src_dir(project, path)
138
139        for arg in args.build_dir:
140            project, path = parse_project_arg(arg, "--build-dir")
141            loader.set_project_build_dir(project, path)
142
143        for arg in args.install_dir:
144            project, path = parse_project_arg(arg, "--install-dir")
145            loader.set_project_install_dir(project, path)
146
147        for arg in args.project_install_prefix:
148            project, path = parse_project_arg(arg, "--install-prefix")
149            loader.set_project_install_prefix(project, path)
150
151    def setup_parser(self, parser):
152        parser.add_argument(
153            "project",
154            nargs="?",
155            help=(
156                "name of the project or path to a manifest "
157                "file describing the project"
158            ),
159        )
160        parser.add_argument(
161            "--no-tests",
162            action="store_false",
163            dest="enable_tests",
164            default=True,
165            help="Disable building tests for this project.",
166        )
167        parser.add_argument(
168            "--test-dependencies",
169            action="store_true",
170            help="Enable building tests for dependencies as well.",
171        )
172        parser.add_argument(
173            "--current-project",
174            help="Specify the name of the fbcode_builder manifest file for the "
175            "current repository.  If not specified, the code will attempt to find "
176            "this in a .projectid file in the repository root.",
177        )
178        parser.add_argument(
179            "--src-dir",
180            default=[],
181            action="append",
182            help="Specify a local directory to use for the project source, "
183            "rather than fetching it.",
184        )
185        parser.add_argument(
186            "--build-dir",
187            default=[],
188            action="append",
189            help="Explicitly specify the build directory to use for the "
190            "project, instead of the default location in the scratch path. "
191            "This only affects the project specified, and not its dependencies.",
192        )
193        parser.add_argument(
194            "--install-dir",
195            default=[],
196            action="append",
197            help="Explicitly specify the install directory to use for the "
198            "project, instead of the default location in the scratch path. "
199            "This only affects the project specified, and not its dependencies.",
200        )
201        parser.add_argument(
202            "--project-install-prefix",
203            default=[],
204            action="append",
205            help="Specify the final deployment installation path for a project",
206        )
207
208        self.setup_project_cmd_parser(parser)
209
210    def setup_project_cmd_parser(self, parser):
211        pass
212
213
214class CachedProject(object):
215    """A helper that allows calling the cache logic for a project
216    from both the build and the fetch code"""
217
218    def __init__(self, cache, loader, m):
219        self.m = m
220        self.inst_dir = loader.get_project_install_dir(m)
221        self.project_hash = loader.get_project_hash(m)
222        self.ctx = loader.ctx_gen.get_context(m.name)
223        self.loader = loader
224        self.cache = cache
225
226        self.cache_file_name = "-".join(
227            (
228                m.name,
229                self.ctx.get("os"),
230                self.ctx.get("distro") or "none",
231                self.ctx.get("distro_vers") or "none",
232                self.project_hash,
233                "buildcache.tgz",
234            )
235        )
236
237    def is_cacheable(self):
238        """We only cache third party projects"""
239        return self.cache and self.m.shipit_project is None
240
241    def was_cached(self):
242        cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build")
243        return os.path.exists(cached_marker)
244
245    def download(self):
246        if self.is_cacheable() and not os.path.exists(self.inst_dir):
247            print("check cache for %s" % self.cache_file_name)
248            dl_dir = os.path.join(self.loader.build_opts.scratch_dir, "downloads")
249            if not os.path.exists(dl_dir):
250                os.makedirs(dl_dir)
251            try:
252                target_file_name = os.path.join(dl_dir, self.cache_file_name)
253                if self.cache.download_to_file(self.cache_file_name, target_file_name):
254                    tf = tarfile.open(target_file_name, "r")
255                    print(
256                        "Extracting %s -> %s..." % (self.cache_file_name, self.inst_dir)
257                    )
258                    tf.extractall(self.inst_dir)
259
260                    cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build")
261                    with open(cached_marker, "w") as f:
262                        f.write("\n")
263
264                    return True
265            except Exception as exc:
266                print("%s" % str(exc))
267
268        return False
269
270    def upload(self):
271        if self.is_cacheable():
272            # We can prepare an archive and stick it in LFS
273            tempdir = tempfile.mkdtemp()
274            tarfilename = os.path.join(tempdir, self.cache_file_name)
275            print("Archiving for cache: %s..." % tarfilename)
276            tf = tarfile.open(tarfilename, "w:gz")
277            tf.add(self.inst_dir, arcname=".")
278            tf.close()
279            try:
280                self.cache.upload_from_file(self.cache_file_name, tarfilename)
281            except Exception as exc:
282                print(
283                    "Failed to upload to cache (%s), continue anyway" % str(exc),
284                    file=sys.stderr,
285                )
286            shutil.rmtree(tempdir)
287
288
289@cmd("fetch", "fetch the code for a given project")
290class FetchCmd(ProjectCmdBase):
291    def setup_project_cmd_parser(self, parser):
292        parser.add_argument(
293            "--recursive",
294            help="fetch the transitive deps also",
295            action="store_true",
296            default=False,
297        )
298        parser.add_argument(
299            "--host-type",
300            help=(
301                "When recursively fetching, fetch deps for "
302                "this host type rather than the current system"
303            ),
304        )
305
306    def run_project_cmd(self, args, loader, manifest):
307        if args.recursive:
308            projects = loader.manifests_in_dependency_order()
309        else:
310            projects = [manifest]
311
312        cache = cache_module.create_cache()
313        for m in projects:
314            cached_project = CachedProject(cache, loader, m)
315            if cached_project.download():
316                continue
317
318            inst_dir = loader.get_project_install_dir(m)
319            built_marker = os.path.join(inst_dir, ".built-by-getdeps")
320            if os.path.exists(built_marker):
321                with open(built_marker, "r") as f:
322                    built_hash = f.read().strip()
323
324                project_hash = loader.get_project_hash(m)
325                if built_hash == project_hash:
326                    continue
327
328            # We need to fetch the sources
329            fetcher = loader.create_fetcher(m)
330            fetcher.update()
331
332
333@cmd("install-system-deps", "Install system packages to satisfy the deps for a project")
334class InstallSysDepsCmd(ProjectCmdBase):
335    def setup_project_cmd_parser(self, parser):
336        parser.add_argument(
337            "--recursive",
338            help="install the transitive deps also",
339            action="store_true",
340            default=False,
341        )
342        parser.add_argument(
343            "--dry-run",
344            action="store_true",
345            default=False,
346            help="Don't install, just print the commands specs we would run",
347        )
348        parser.add_argument(
349            "--package-type",
350            choices=["rpm", "deb"],
351            default=None,
352            help="Allow overriding the package type so can see deb from centos",
353        )
354
355    def run_project_cmd(self, args, loader, manifest):
356        if args.recursive:
357            projects = loader.manifests_in_dependency_order()
358        else:
359            projects = [manifest]
360
361        all_packages = {}
362        for m in projects:
363            ctx = loader.ctx_gen.get_context(m.name)
364            packages = m.get_required_system_packages(ctx)
365            for k, v in packages.items():
366                merged = all_packages.get(k, [])
367                merged += v
368                all_packages[k] = merged
369
370        if args.package_type:
371            manager = args.package_type
372        else:
373            manager = loader.build_opts.host_type.get_package_manager()
374
375        cmd_args = None
376        if manager == "rpm":
377            packages = sorted(list(set(all_packages["rpm"])))
378            if packages:
379                cmd_args = ["dnf", "install", "-y"] + packages
380        elif manager == "deb":
381            packages = sorted(list(set(all_packages["deb"])))
382            if packages:
383                cmd_args = ["apt", "install", "-y"] + packages
384        else:
385            host_tuple = loader.build_opts.host_type.as_tuple_string()
386            print(
387                f"I don't know how to install any packages on this system {host_tuple}"
388            )
389            return
390
391        if cmd_args:
392            if args.dry_run:
393                print(" ".join(cmd_args))
394            else:
395                run_cmd(cmd_args)
396        else:
397            print("no packages to install")
398
399
400@cmd("list-deps", "lists the transitive deps for a given project")
401class ListDepsCmd(ProjectCmdBase):
402    def run_project_cmd(self, args, loader, manifest):
403        for m in loader.manifests_in_dependency_order():
404            print(m.name)
405        return 0
406
407    def setup_project_cmd_parser(self, parser):
408        parser.add_argument(
409            "--host-type",
410            help=(
411                "Produce the list for the specified host type, "
412                "rather than that of the current system"
413            ),
414        )
415
416
417def clean_dirs(opts):
418    for d in ["build", "installed", "extracted", "shipit"]:
419        d = os.path.join(opts.scratch_dir, d)
420        print("Cleaning %s..." % d)
421        if os.path.exists(d):
422            shutil.rmtree(d)
423
424
425@cmd("clean", "clean up the scratch dir")
426class CleanCmd(SubCmd):
427    def run(self, args):
428        opts = setup_build_options(args)
429        clean_dirs(opts)
430
431
432@cmd("show-build-dir", "print the build dir for a given project")
433class ShowBuildDirCmd(ProjectCmdBase):
434    def run_project_cmd(self, args, loader, manifest):
435        if args.recursive:
436            manifests = loader.manifests_in_dependency_order()
437        else:
438            manifests = [manifest]
439
440        for m in manifests:
441            inst_dir = loader.get_project_build_dir(m)
442            print(inst_dir)
443
444    def setup_project_cmd_parser(self, parser):
445        parser.add_argument(
446            "--recursive",
447            help="print the transitive deps also",
448            action="store_true",
449            default=False,
450        )
451
452
453@cmd("show-inst-dir", "print the installation dir for a given project")
454class ShowInstDirCmd(ProjectCmdBase):
455    def run_project_cmd(self, args, loader, manifest):
456        if args.recursive:
457            manifests = loader.manifests_in_dependency_order()
458        else:
459            manifests = [manifest]
460
461        for m in manifests:
462            inst_dir = loader.get_project_install_dir_respecting_install_prefix(m)
463            print(inst_dir)
464
465    def setup_project_cmd_parser(self, parser):
466        parser.add_argument(
467            "--recursive",
468            help="print the transitive deps also",
469            action="store_true",
470            default=False,
471        )
472
473
474@cmd("show-source-dir", "print the source dir for a given project")
475class ShowSourceDirCmd(ProjectCmdBase):
476    def run_project_cmd(self, args, loader, manifest):
477        if args.recursive:
478            manifests = loader.manifests_in_dependency_order()
479        else:
480            manifests = [manifest]
481
482        for m in manifests:
483            fetcher = loader.create_fetcher(m)
484            print(fetcher.get_src_dir())
485
486    def setup_project_cmd_parser(self, parser):
487        parser.add_argument(
488            "--recursive",
489            help="print the transitive deps also",
490            action="store_true",
491            default=False,
492        )
493
494
495@cmd("build", "build a given project")
496class BuildCmd(ProjectCmdBase):
497    def run_project_cmd(self, args, loader, manifest):
498        if args.clean:
499            clean_dirs(loader.build_opts)
500
501        print("Building on %s" % loader.ctx_gen.get_context(args.project))
502        projects = loader.manifests_in_dependency_order()
503
504        cache = cache_module.create_cache() if args.use_build_cache else None
505
506        # Accumulate the install directories so that the build steps
507        # can find their dep installation
508        install_dirs = []
509
510        for m in projects:
511            fetcher = loader.create_fetcher(m)
512
513            if isinstance(fetcher, SystemPackageFetcher):
514                # We are guaranteed that if the fetcher is set to
515                # SystemPackageFetcher then this item is completely
516                # satisfied by the appropriate system packages
517                continue
518
519            if args.clean:
520                fetcher.clean()
521
522            build_dir = loader.get_project_build_dir(m)
523            inst_dir = loader.get_project_install_dir(m)
524
525            if (
526                m == manifest
527                and not args.only_deps
528                or m != manifest
529                and not args.no_deps
530            ):
531                print("Assessing %s..." % m.name)
532                project_hash = loader.get_project_hash(m)
533                ctx = loader.ctx_gen.get_context(m.name)
534                built_marker = os.path.join(inst_dir, ".built-by-getdeps")
535
536                cached_project = CachedProject(cache, loader, m)
537
538                reconfigure, sources_changed = self.compute_source_change_status(
539                    cached_project, fetcher, m, built_marker, project_hash
540                )
541
542                if os.path.exists(built_marker) and not cached_project.was_cached():
543                    # We've previously built this. We may need to reconfigure if
544                    # our deps have changed, so let's check them.
545                    dep_reconfigure, dep_build = self.compute_dep_change_status(
546                        m, built_marker, loader
547                    )
548                    if dep_reconfigure:
549                        reconfigure = True
550                    if dep_build:
551                        sources_changed = True
552
553                extra_cmake_defines = (
554                    json.loads(args.extra_cmake_defines)
555                    if args.extra_cmake_defines
556                    else {}
557                )
558
559                if sources_changed or reconfigure or not os.path.exists(built_marker):
560                    if os.path.exists(built_marker):
561                        os.unlink(built_marker)
562                    src_dir = fetcher.get_src_dir()
563                    builder = m.create_builder(
564                        loader.build_opts,
565                        src_dir,
566                        build_dir,
567                        inst_dir,
568                        ctx,
569                        loader,
570                        final_install_prefix=loader.get_project_install_prefix(m),
571                        extra_cmake_defines=extra_cmake_defines,
572                    )
573                    builder.build(install_dirs, reconfigure=reconfigure)
574
575                    with open(built_marker, "w") as f:
576                        f.write(project_hash)
577
578                    # Only populate the cache from continuous build runs
579                    if args.schedule_type == "continuous":
580                        cached_project.upload()
581                elif args.verbose:
582                    print("found good %s" % built_marker)
583
584            install_dirs.append(inst_dir)
585
586    def compute_dep_change_status(self, m, built_marker, loader):
587        reconfigure = False
588        sources_changed = False
589        st = os.lstat(built_marker)
590
591        ctx = loader.ctx_gen.get_context(m.name)
592        dep_list = sorted(m.get_section_as_dict("dependencies", ctx).keys())
593        for dep in dep_list:
594            if reconfigure and sources_changed:
595                break
596
597            dep_manifest = loader.load_manifest(dep)
598            dep_root = loader.get_project_install_dir(dep_manifest)
599            for dep_file in list_files_under_dir_newer_than_timestamp(
600                dep_root, st.st_mtime
601            ):
602                if os.path.basename(dep_file) == ".built-by-getdeps":
603                    continue
604                if file_name_is_cmake_file(dep_file):
605                    if not reconfigure:
606                        reconfigure = True
607                        print(
608                            f"Will reconfigure cmake because {dep_file} is newer than {built_marker}"
609                        )
610                else:
611                    if not sources_changed:
612                        sources_changed = True
613                        print(
614                            f"Will run build because {dep_file} is newer than {built_marker}"
615                        )
616
617                if reconfigure and sources_changed:
618                    break
619
620        return reconfigure, sources_changed
621
622    def compute_source_change_status(
623        self, cached_project, fetcher, m, built_marker, project_hash
624    ):
625        reconfigure = False
626        sources_changed = False
627        if not cached_project.download():
628            check_fetcher = True
629            if os.path.exists(built_marker):
630                check_fetcher = False
631                with open(built_marker, "r") as f:
632                    built_hash = f.read().strip()
633                if built_hash == project_hash:
634                    if cached_project.is_cacheable():
635                        # We can blindly trust the build status
636                        reconfigure = False
637                        sources_changed = False
638                    else:
639                        # Otherwise, we may have changed the source, so let's
640                        # check in with the fetcher layer
641                        check_fetcher = True
642                else:
643                    # Some kind of inconsistency with a prior build,
644                    # let's run it again to be sure
645                    os.unlink(built_marker)
646                    reconfigure = True
647                    sources_changed = True
648                    # While we don't need to consult the fetcher for the
649                    # status in this case, we may still need to have eg: shipit
650                    # run in order to have a correct source tree.
651                    fetcher.update()
652
653            if check_fetcher:
654                change_status = fetcher.update()
655                reconfigure = change_status.build_changed()
656                sources_changed = change_status.sources_changed()
657
658        return reconfigure, sources_changed
659
660    def setup_project_cmd_parser(self, parser):
661        parser.add_argument(
662            "--clean",
663            action="store_true",
664            default=False,
665            help=(
666                "Clean up the build and installation area prior to building, "
667                "causing the projects to be built from scratch"
668            ),
669        )
670        parser.add_argument(
671            "--no-deps",
672            action="store_true",
673            default=False,
674            help=(
675                "Only build the named project, not its deps. "
676                "This is most useful after you've built all of the deps, "
677                "and helps to avoid waiting for relatively "
678                "slow up-to-date-ness checks"
679            ),
680        )
681        parser.add_argument(
682            "--only-deps",
683            action="store_true",
684            default=False,
685            help=(
686                "Only build the named project's deps. "
687                "This is most useful when you want to separate out building "
688                "of all of the deps and your project"
689            ),
690        )
691        parser.add_argument(
692            "--no-build-cache",
693            action="store_false",
694            default=True,
695            dest="use_build_cache",
696            help="Do not attempt to use the build cache.",
697        )
698        parser.add_argument(
699            "--schedule-type", help="Indicates how the build was activated"
700        )
701        parser.add_argument(
702            "--extra-cmake-defines",
703            help=(
704                "Input json map that contains extra cmake defines to be used "
705                "when compiling the current project and all its deps. "
706                'e.g: \'{"CMAKE_CXX_FLAGS": "--bla"}\''
707            ),
708        )
709
710
711@cmd("fixup-dyn-deps", "Adjusts dynamic dependencies for packaging purposes")
712class FixupDeps(ProjectCmdBase):
713    def run_project_cmd(self, args, loader, manifest):
714        projects = loader.manifests_in_dependency_order()
715
716        # Accumulate the install directories so that the build steps
717        # can find their dep installation
718        install_dirs = []
719
720        for m in projects:
721            inst_dir = loader.get_project_install_dir_respecting_install_prefix(m)
722            install_dirs.append(inst_dir)
723
724            if m == manifest:
725                dep_munger = create_dyn_dep_munger(
726                    loader.build_opts, install_dirs, args.strip
727                )
728                dep_munger.process_deps(args.destdir, args.final_install_prefix)
729
730    def setup_project_cmd_parser(self, parser):
731        parser.add_argument("destdir", help="Where to copy the fixed up executables")
732        parser.add_argument(
733            "--final-install-prefix", help="specify the final installation prefix"
734        )
735        parser.add_argument(
736            "--strip",
737            action="store_true",
738            default=False,
739            help="Strip debug info while processing executables",
740        )
741
742
743@cmd("test", "test a given project")
744class TestCmd(ProjectCmdBase):
745    def run_project_cmd(self, args, loader, manifest):
746        projects = loader.manifests_in_dependency_order()
747
748        # Accumulate the install directories so that the test steps
749        # can find their dep installation
750        install_dirs = []
751
752        for m in projects:
753            inst_dir = loader.get_project_install_dir(m)
754
755            if m == manifest or args.test_dependencies:
756                built_marker = os.path.join(inst_dir, ".built-by-getdeps")
757                if not os.path.exists(built_marker):
758                    print("project %s has not been built" % m.name)
759                    # TODO: we could just go ahead and build it here, but I
760                    # want to tackle that as part of adding build-for-test
761                    # support.
762                    return 1
763                fetcher = loader.create_fetcher(m)
764                src_dir = fetcher.get_src_dir()
765                ctx = loader.ctx_gen.get_context(m.name)
766                build_dir = loader.get_project_build_dir(m)
767                builder = m.create_builder(
768                    loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader
769                )
770
771                builder.run_tests(
772                    install_dirs,
773                    schedule_type=args.schedule_type,
774                    owner=args.test_owner,
775                    test_filter=args.filter,
776                    retry=args.retry,
777                    no_testpilot=args.no_testpilot,
778                )
779
780            install_dirs.append(inst_dir)
781
782    def setup_project_cmd_parser(self, parser):
783        parser.add_argument(
784            "--schedule-type", help="Indicates how the build was activated"
785        )
786        parser.add_argument("--test-owner", help="Owner for testpilot")
787        parser.add_argument("--filter", help="Only run the tests matching the regex")
788        parser.add_argument(
789            "--retry",
790            type=int,
791            default=3,
792            help="Number of immediate retries for failed tests "
793            "(noop in continuous and testwarden runs)",
794        )
795        parser.add_argument(
796            "--no-testpilot",
797            help="Do not use Test Pilot even when available",
798            action="store_true",
799        )
800
801
802@cmd("generate-github-actions", "generate a GitHub actions configuration")
803class GenerateGitHubActionsCmd(ProjectCmdBase):
804    RUN_ON_ALL = """ [push, pull_request]"""
805
806    def run_project_cmd(self, args, loader, manifest):
807        platforms = [
808            HostType("linux", "ubuntu", "18"),
809            HostType("darwin", None, None),
810            HostType("windows", None, None),
811        ]
812
813        for p in platforms:
814            if args.os_types and p.ostype not in args.os_types:
815                continue
816            self.write_job_for_platform(p, args)
817
818    def get_run_on(self, args):
819        if args.run_on_all_branches:
820            return self.RUN_ON_ALL
821        return f"""
822  push:
823    branches:
824    - {args.main_branch}
825  pull_request:
826    branches:
827    - {args.main_branch}"""
828
829    # TODO: Break up complex function
830    def write_job_for_platform(self, platform, args):  # noqa: C901
831        build_opts = setup_build_options(args, platform)
832        ctx_gen = build_opts.get_context_generator(facebook_internal=False)
833        loader = ManifestLoader(build_opts, ctx_gen)
834        manifest = loader.load_manifest(args.project)
835        manifest_ctx = loader.ctx_gen.get_context(manifest.name)
836        run_on = self.get_run_on(args)
837
838        # Some projects don't do anything "useful" as a leaf project, only
839        # as a dep for a leaf project. Check for those here; we don't want
840        # to waste the effort scheduling them on CI.
841        # We do this by looking at the builder type in the manifest file
842        # rather than creating a builder and checking its type because we
843        # don't know enough to create the full builder instance here.
844        if manifest.get("build", "builder", ctx=manifest_ctx) == "nop":
845            return None
846
847        # We want to be sure that we're running things with python 3
848        # but python versioning is honestly a bit of a frustrating mess.
849        # `python` may be version 2 or version 3 depending on the system.
850        # python3 may not be a thing at all!
851        # Assume an optimistic default
852        py3 = "python3"
853
854        if build_opts.is_linux():
855            job_name = "linux"
856            runs_on = f"ubuntu-{args.ubuntu_version}"
857        elif build_opts.is_windows():
858            # We're targeting the windows-2016 image because it has
859            # Visual Studio 2017 installed, and at the time of writing,
860            # the version of boost in the manifests (1.69) is not
861            # buildable with Visual Studio 2019
862            job_name = "windows"
863            runs_on = "windows-2016"
864            # The windows runners are python 3 by default; python2.exe
865            # is available if needed.
866            py3 = "python"
867        else:
868            job_name = "mac"
869            runs_on = "macOS-latest"
870
871        os.makedirs(args.output_dir, exist_ok=True)
872        output_file = os.path.join(args.output_dir, f"getdeps_{job_name}.yml")
873        with open(output_file, "w") as out:
874            # Deliberate line break here because the @ and the generated
875            # symbols are meaningful to our internal tooling when they
876            # appear in a single token
877            out.write("# This file was @")
878            out.write("generated by getdeps.py\n")
879            out.write(
880                f"""
881name: {job_name}
882
883on:{run_on}
884
885jobs:
886"""
887            )
888
889            getdepscmd = f"{py3} build/fbcode_builder/getdeps.py"
890
891            out.write("  build:\n")
892            out.write("    runs-on: %s\n" % runs_on)
893            out.write("    steps:\n")
894            out.write("    - uses: actions/checkout@v1\n")
895
896            if build_opts.is_windows():
897                # cmake relies on BOOST_ROOT but GH deliberately don't set it in order
898                # to avoid versioning issues:
899                # https://github.com/actions/virtual-environments/issues/319
900                # Instead, set the version we think we need; this is effectively
901                # coupled with the boost manifest
902                # This is the unusual syntax for setting an env var for the rest of
903                # the steps in a workflow:
904                # https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/
905                out.write("    - name: Export boost environment\n")
906                out.write(
907                    '      run: "echo BOOST_ROOT=%BOOST_ROOT_1_69_0% >> %GITHUB_ENV%"\n'
908                )
909                out.write("      shell: cmd\n")
910
911                # The git installation may not like long filenames, so tell it
912                # that we want it to use them!
913                out.write("    - name: Fix Git config\n")
914                out.write("      run: git config --system core.longpaths true\n")
915
916            projects = loader.manifests_in_dependency_order()
917
918            for m in projects:
919                if m != manifest:
920                    out.write("    - name: Fetch %s\n" % m.name)
921                    out.write(f"      run: {getdepscmd} fetch --no-tests {m.name}\n")
922
923            for m in projects:
924                if m != manifest:
925                    out.write("    - name: Build %s\n" % m.name)
926                    out.write(f"      run: {getdepscmd} build --no-tests {m.name}\n")
927
928            out.write("    - name: Build %s\n" % manifest.name)
929
930            project_prefix = ""
931            if not build_opts.is_windows():
932                project_prefix = (
933                    " --project-install-prefix %s:/usr/local" % manifest.name
934                )
935
936            out.write(
937                f"      run: {getdepscmd} build --src-dir=. {manifest.name} {project_prefix}\n"
938            )
939
940            out.write("    - name: Copy artifacts\n")
941            if build_opts.is_linux():
942                # Strip debug info from the binaries, but only on linux.
943                # While the `strip` utility is also available on macOS,
944                # attempting to strip there results in an error.
945                # The `strip` utility is not available on Windows.
946                strip = " --strip"
947            else:
948                strip = ""
949
950            out.write(
951                f"      run: {getdepscmd} fixup-dyn-deps{strip} "
952                f"--src-dir=. {manifest.name} _artifacts/{job_name} {project_prefix} "
953                f"--final-install-prefix /usr/local\n"
954            )
955
956            out.write("    - uses: actions/upload-artifact@v2\n")
957            out.write("      with:\n")
958            out.write("        name: %s\n" % manifest.name)
959            out.write("        path: _artifacts\n")
960
961            out.write("    - name: Test %s\n" % manifest.name)
962            out.write(
963                f"      run: {getdepscmd} test --src-dir=. {manifest.name} {project_prefix}\n"
964            )
965
966    def setup_project_cmd_parser(self, parser):
967        parser.add_argument(
968            "--disallow-system-packages",
969            help="Disallow satisfying third party deps from installed system packages",
970            action="store_true",
971            default=False,
972        )
973        parser.add_argument(
974            "--output-dir", help="The directory that will contain the yml files"
975        )
976        parser.add_argument(
977            "--run-on-all-branches",
978            action="store_true",
979            help="Allow CI to fire on all branches - Handy for testing",
980        )
981        parser.add_argument(
982            "--ubuntu-version", default="18.04", help="Version of Ubuntu to use"
983        )
984        parser.add_argument(
985            "--main-branch",
986            default="main",
987            help="Main branch to trigger GitHub Action on",
988        )
989        parser.add_argument(
990            "--os-type",
991            help="Filter to just this OS type to run",
992            choices=["linux", "darwin", "windows"],
993            action="append",
994            dest="os_types",
995            default=[],
996        )
997
998
999def get_arg_var_name(args):
1000    for arg in args:
1001        if arg.startswith("--"):
1002            return arg[2:].replace("-", "_")
1003
1004    raise Exception("unable to determine argument variable name from %r" % (args,))
1005
1006
1007def parse_args():
1008    # We want to allow common arguments to be specified either before or after
1009    # the subcommand name.  In order to do this we add them to the main parser
1010    # and to subcommand parsers.  In order for this to work, we need to tell
1011    # argparse that the default value is SUPPRESS, so that the default values
1012    # from the subparser arguments won't override values set by the user from
1013    # the main parser.  We maintain our own list of desired defaults in the
1014    # common_defaults dictionary, and manually set those if the argument wasn't
1015    # present at all.
1016    common_args = argparse.ArgumentParser(add_help=False)
1017    common_defaults = {}
1018
1019    def add_common_arg(*args, **kwargs):
1020        var_name = get_arg_var_name(args)
1021        default_value = kwargs.pop("default", None)
1022        common_defaults[var_name] = default_value
1023        kwargs["default"] = argparse.SUPPRESS
1024        common_args.add_argument(*args, **kwargs)
1025
1026    add_common_arg("--scratch-path", help="Where to maintain checkouts and build dirs")
1027    add_common_arg(
1028        "--vcvars-path", default=None, help="Path to the vcvarsall.bat on Windows."
1029    )
1030    add_common_arg(
1031        "--install-prefix",
1032        help=(
1033            "Where the final build products will be installed "
1034            "(default is [scratch-path]/installed)"
1035        ),
1036    )
1037    add_common_arg(
1038        "--num-jobs",
1039        type=int,
1040        help=(
1041            "Number of concurrent jobs to use while building. "
1042            "(default=number of cpu cores)"
1043        ),
1044    )
1045    add_common_arg(
1046        "--use-shipit",
1047        help="use the real ShipIt instead of the simple shipit transformer",
1048        action="store_true",
1049        default=False,
1050    )
1051    add_common_arg(
1052        "--facebook-internal",
1053        help="Setup the build context as an FB internal build",
1054        action="store_true",
1055        default=None,
1056    )
1057    add_common_arg(
1058        "--no-facebook-internal",
1059        help="Perform a non-FB internal build, even when in an fbsource repository",
1060        action="store_false",
1061        dest="facebook_internal",
1062    )
1063    add_common_arg(
1064        "--allow-system-packages",
1065        help="Allow satisfying third party deps from installed system packages",
1066        action="store_true",
1067        default=False,
1068    )
1069    add_common_arg(
1070        "-v",
1071        "--verbose",
1072        help="Print more output",
1073        action="store_true",
1074        default=False,
1075    )
1076    add_common_arg(
1077        "--lfs-path",
1078        help="Provide a parent directory for lfs when fbsource is unavailable",
1079        default=None,
1080    )
1081
1082    ap = argparse.ArgumentParser(
1083        description="Get and build dependencies and projects", parents=[common_args]
1084    )
1085    sub = ap.add_subparsers(
1086        # metavar suppresses the long and ugly default list of subcommands on a
1087        # single line.  We still render the nicer list below where we would
1088        # have shown the nasty one.
1089        metavar="",
1090        title="Available commands",
1091        help="",
1092    )
1093
1094    add_subcommands(sub, common_args)
1095
1096    args = ap.parse_args()
1097    for var_name, default_value in common_defaults.items():
1098        if not hasattr(args, var_name):
1099            setattr(args, var_name, default_value)
1100
1101    return ap, args
1102
1103
1104def main():
1105    ap, args = parse_args()
1106    if getattr(args, "func", None) is None:
1107        ap.print_help()
1108        return 0
1109    try:
1110        return args.func(args)
1111    except UsageError as exc:
1112        ap.error(str(exc))
1113        return 1
1114    except TransientFailure as exc:
1115        print("TransientFailure: %s" % str(exc))
1116        # This return code is treated as a retryable transient infrastructure
1117        # error by Facebook's internal CI, rather than eg: a build or code
1118        # related error that needs to be fixed before progress can be made.
1119        return 128
1120    except subprocess.CalledProcessError as exc:
1121        print("%s" % str(exc), file=sys.stderr)
1122        print("!! Failed", file=sys.stderr)
1123        return 1
1124
1125
1126if __name__ == "__main__":
1127    sys.exit(main())
1128