1# This Source Code Form is subject to the terms of the Mozilla Public
2# License, v. 2.0. If a copy of the MPL was not distributed with this
3# file, You can obtain one at http://mozilla.org/MPL/2.0/.
4
5"""
6Fetch build artifacts from a Firefox tree.
7
8This provides an (at-the-moment special purpose) interface to download Android
9artifacts from Mozilla's Task Cluster.
10
11This module performs the following steps:
12
13* find a candidate hg parent revision.  At one time we used the local pushlog,
14  which required the mozext hg extension.  This isn't feasible with git, and it
15  is only mildly less efficient to not use the pushlog, so we don't use it even
16  when querying hg.
17
18* map the candidate parent to candidate Task Cluster tasks and artifact
19  locations.  Pushlog entries might not correspond to tasks (yet), and those
20  tasks might not produce the desired class of artifacts.
21
22* fetch fresh Task Cluster artifacts and purge old artifacts, using a simple
23  Least Recently Used cache.
24
25* post-process fresh artifacts, to speed future installation.  In particular,
26  extract relevant files from Mac OS X DMG files into a friendly archive format
27  so we don't have to mount DMG files frequently.
28
29This module requires certain modules be importable from the ambient Python
30environment.  |mach artifact| ensures these modules are available, but other
31consumers will need to arrange this themselves.
32"""
33
34
35from __future__ import absolute_import, print_function, unicode_literals
36
37import buildconfig
38import collections
39import functools
40import glob
41import logging
42import operator
43import os
44import pickle
45import re
46import requests
47import shutil
48import six
49import stat
50import subprocess
51import tarfile
52import tempfile
53import six.moves.urllib_parse as urlparse
54import zipfile
55
56import pylru
57from taskgraph.util.taskcluster import find_task_id, get_artifact_url, list_artifacts
58
59from mach.util import UserError
60
61from mozbuild.artifact_cache import ArtifactCache
62from mozbuild.artifact_builds import JOB_CHOICES
63from mozbuild.util import ensureParentDir, FileAvoidWrite, mkdir
64import mozinstall
65from mozpack.files import JarFinder, TarFinder
66from mozpack.mozjar import JarReader, JarWriter
67from mozpack.packager.unpack import UnpackFinder
68import mozpack.path as mozpath
69
70# Number of candidate pushheads to cache per parent changeset.
71NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
72
73# Number of parent changesets to consider as possible pushheads.
74# There isn't really such a thing as a reasonable default here, because we don't
75# know how many pushheads we'll need to look at to find a build with our artifacts,
76# and we don't know how many changesets will be in each push. For now we assume
77# we'll find a build in the last 50 pushes, assuming each push contains 10 changesets.
78NUM_REVISIONS_TO_QUERY = 500
79
80MAX_CACHED_TASKS = 400  # Number of pushheads to cache Task Cluster task data for.
81
82# Downloaded artifacts are cached, and a subset of their contents extracted for
83# easy installation.  This is most noticeable on Mac OS X: since mounting and
84# copying from DMG files is very slow, we extract the desired binaries to a
85# separate archive for fast re-installation.
86PROCESSED_SUFFIX = ".processed.jar"
87
88
89class ArtifactJob(object):
90    trust_domain = "gecko"
91    default_candidate_trees = [
92        "releases/mozilla-release",
93    ]
94    nightly_candidate_trees = [
95        "mozilla-central",
96        "integration/autoland",
97    ]
98    beta_candidate_trees = [
99        "releases/mozilla-beta",
100    ]
101    # The list below list should be updated when we have new ESRs.
102    esr_candidate_trees = [
103        "releases/mozilla-esr91",
104    ]
105    try_tree = "try"
106
107    # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
108    # Each item is a pair of (pattern, (src_prefix, dest_prefix), where src_prefix
109    # is the prefix of the pattern relevant to its location in the archive, and
110    # dest_prefix is the prefix to be added that will yield the final path relative
111    # to dist/.
112    test_artifact_patterns = {
113        ("bin/BadCertAndPinningServer", ("bin", "bin")),
114        ("bin/DelegatedCredentialsServer", ("bin", "bin")),
115        ("bin/EncryptedClientHelloServer", ("bin", "bin")),
116        ("bin/GenerateOCSPResponse", ("bin", "bin")),
117        ("bin/OCSPStaplingServer", ("bin", "bin")),
118        ("bin/SanctionsTestServer", ("bin", "bin")),
119        ("bin/certutil", ("bin", "bin")),
120        ("bin/geckodriver", ("bin", "bin")),
121        ("bin/pk12util", ("bin", "bin")),
122        ("bin/screentopng", ("bin", "bin")),
123        ("bin/ssltunnel", ("bin", "bin")),
124        ("bin/xpcshell", ("bin", "bin")),
125        ("bin/http3server", ("bin", "bin")),
126        ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")),
127        ("bin/plugins/*", ("bin/plugins", "plugins")),
128        ("bin/components/*.xpt", ("bin/components", "bin/components")),
129    }
130
131    # We can tell our input is a test archive by this suffix, which happens to
132    # be the same across platforms.
133    _test_zip_archive_suffix = ".common.tests.zip"
134    _test_tar_archive_suffix = ".common.tests.tar.gz"
135
136    def __init__(
137        self,
138        log=None,
139        download_tests=True,
140        download_symbols=False,
141        download_host_bins=False,
142        download_maven_zip=False,
143        substs=None,
144        mozbuild=None,
145    ):
146        self._package_re = re.compile(self.package_re)
147        self._tests_re = None
148        if download_tests:
149            self._tests_re = re.compile(
150                r"public/build/(en-US/)?target\.common\.tests\.(zip|tar\.gz)"
151            )
152        self._host_bins_re = None
153        if download_host_bins:
154            self._host_bins_re = re.compile(
155                r"public/build/host/bin/(mar|mbsdiff)(.exe)?"
156            )
157        self._maven_zip_re = None
158        if download_maven_zip:
159            self._maven_zip_re = re.compile(r"public/build/target\.maven\.zip")
160        self._log = log
161        self._substs = substs
162        self._symbols_archive_suffix = None
163        if download_symbols == "full":
164            self._symbols_archive_suffix = "crashreporter-symbols-full.tar.zst"
165        elif download_symbols:
166            self._symbols_archive_suffix = "crashreporter-symbols.zip"
167        self._mozbuild = mozbuild
168        self._candidate_trees = None
169
170    def log(self, *args, **kwargs):
171        if self._log:
172            self._log(*args, **kwargs)
173
174    def find_candidate_artifacts(self, artifacts):
175        # TODO: Handle multiple artifacts, taking the latest one.
176        tests_artifact = None
177        maven_zip_artifact = None
178        for artifact in artifacts:
179            name = artifact["name"]
180            if self._maven_zip_re:
181                if self._maven_zip_re.match(name):
182                    maven_zip_artifact = name
183                    yield name
184                else:
185                    continue
186            elif self._package_re and self._package_re.match(name):
187                yield name
188            elif self._host_bins_re and self._host_bins_re.match(name):
189                yield name
190            elif self._tests_re and self._tests_re.match(name):
191                tests_artifact = name
192                yield name
193            elif self._symbols_archive_suffix and name.endswith(
194                self._symbols_archive_suffix
195            ):
196                yield name
197            else:
198                self.log(
199                    logging.DEBUG,
200                    "artifact",
201                    {"name": name},
202                    "Not yielding artifact named {name} as a candidate artifact",
203                )
204        if self._tests_re and not tests_artifact:
205            raise ValueError(
206                'Expected tests archive matching "{re}", but '
207                "found none!".format(re=self._tests_re)
208            )
209        if self._maven_zip_re and not maven_zip_artifact:
210            raise ValueError(
211                'Expected Maven zip archive matching "{re}", but '
212                "found none!".format(re=self._maven_zip_re)
213            )
214
215    def process_artifact(self, filename, processed_filename):
216        if filename.endswith(ArtifactJob._test_zip_archive_suffix) and self._tests_re:
217            return self.process_tests_zip_artifact(filename, processed_filename)
218        if filename.endswith(ArtifactJob._test_tar_archive_suffix) and self._tests_re:
219            return self.process_tests_tar_artifact(filename, processed_filename)
220        if self._symbols_archive_suffix and filename.endswith(
221            self._symbols_archive_suffix
222        ):
223            return self.process_symbols_archive(filename, processed_filename)
224        if self._host_bins_re:
225            # Turn 'HASH-mar.exe' into 'mar.exe'.  `filename` is a path on disk
226            # without the full path to the artifact, so we must reconstruct
227            # that path here.
228            orig_basename = os.path.basename(filename).split("-", 1)[1]
229            if self._host_bins_re.match(
230                "public/build/host/bin/{}".format(orig_basename)
231            ):
232                return self.process_host_bin(filename, processed_filename)
233        return self.process_package_artifact(filename, processed_filename)
234
235    def process_package_artifact(self, filename, processed_filename):
236        raise NotImplementedError(
237            "Subclasses must specialize process_package_artifact!"
238        )
239
240    def process_tests_zip_artifact(self, filename, processed_filename):
241        from mozbuild.action.test_archive import OBJDIR_TEST_FILES
242
243        added_entry = False
244
245        with JarWriter(file=processed_filename, compress_level=5) as writer:
246            reader = JarReader(filename)
247            for filename, entry in six.iteritems(reader.entries):
248                for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
249                    if not mozpath.match(filename, pattern):
250                        continue
251                    destpath = mozpath.relpath(filename, src_prefix)
252                    destpath = mozpath.join(dest_prefix, destpath)
253                    self.log(
254                        logging.DEBUG,
255                        "artifact",
256                        {"destpath": destpath},
257                        "Adding {destpath} to processed archive",
258                    )
259                    mode = entry["external_attr"] >> 16
260                    writer.add(destpath.encode("utf-8"), reader[filename], mode=mode)
261                    added_entry = True
262                    break
263
264                if filename.endswith(".ini"):
265                    # The artifact build writes test .ini files into the object
266                    # directory; they don't come from the upstream test archive.
267                    self.log(
268                        logging.DEBUG,
269                        "artifact",
270                        {"filename": filename},
271                        "Skipping test INI file {filename}",
272                    )
273                    continue
274
275                for files_entry in OBJDIR_TEST_FILES.values():
276                    origin_pattern = files_entry["pattern"]
277                    leaf_filename = filename
278                    if "dest" in files_entry:
279                        dest = files_entry["dest"]
280                        origin_pattern = mozpath.join(dest, origin_pattern)
281                        leaf_filename = filename[len(dest) + 1 :]
282                    if mozpath.match(filename, origin_pattern):
283                        destpath = mozpath.join(
284                            "..", files_entry["base"], leaf_filename
285                        )
286                        mode = entry["external_attr"] >> 16
287                        writer.add(
288                            destpath.encode("utf-8"), reader[filename], mode=mode
289                        )
290
291        if not added_entry:
292            raise ValueError(
293                'Archive format changed! No pattern from "{patterns}"'
294                "matched an archive path.".format(
295                    patterns=LinuxArtifactJob.test_artifact_patterns
296                )
297            )
298
299    def process_tests_tar_artifact(self, filename, processed_filename):
300        from mozbuild.action.test_archive import OBJDIR_TEST_FILES
301
302        added_entry = False
303
304        with JarWriter(file=processed_filename, compress_level=5) as writer:
305            with tarfile.open(filename) as reader:
306                for filename, entry in TarFinder(filename, reader):
307                    for (
308                        pattern,
309                        (src_prefix, dest_prefix),
310                    ) in self.test_artifact_patterns:
311                        if not mozpath.match(filename, pattern):
312                            continue
313
314                        destpath = mozpath.relpath(filename, src_prefix)
315                        destpath = mozpath.join(dest_prefix, destpath)
316                        self.log(
317                            logging.DEBUG,
318                            "artifact",
319                            {"destpath": destpath},
320                            "Adding {destpath} to processed archive",
321                        )
322                        mode = entry.mode
323                        writer.add(destpath.encode("utf-8"), entry.open(), mode=mode)
324                        added_entry = True
325                        break
326
327                    if filename.endswith(".ini"):
328                        # The artifact build writes test .ini files into the object
329                        # directory; they don't come from the upstream test archive.
330                        self.log(
331                            logging.DEBUG,
332                            "artifact",
333                            {"filename": filename},
334                            "Skipping test INI file {filename}",
335                        )
336                        continue
337
338                    for files_entry in OBJDIR_TEST_FILES.values():
339                        origin_pattern = files_entry["pattern"]
340                        leaf_filename = filename
341                        if "dest" in files_entry:
342                            dest = files_entry["dest"]
343                            origin_pattern = mozpath.join(dest, origin_pattern)
344                            leaf_filename = filename[len(dest) + 1 :]
345                        if mozpath.match(filename, origin_pattern):
346                            destpath = mozpath.join(
347                                "..", files_entry["base"], leaf_filename
348                            )
349                            mode = entry.mode
350                            writer.add(
351                                destpath.encode("utf-8"), entry.open(), mode=mode
352                            )
353
354        if not added_entry:
355            raise ValueError(
356                'Archive format changed! No pattern from "{patterns}"'
357                "matched an archive path.".format(
358                    patterns=LinuxArtifactJob.test_artifact_patterns
359                )
360            )
361
362    def process_symbols_archive(
363        self, filename, processed_filename, skip_compressed=False
364    ):
365        with JarWriter(file=processed_filename, compress_level=5) as writer:
366            for filename, entry in self.iter_artifact_archive(filename):
367                if skip_compressed and filename.endswith(".gz"):
368                    self.log(
369                        logging.DEBUG,
370                        "artifact",
371                        {"filename": filename},
372                        "Skipping compressed ELF debug symbol file {filename}",
373                    )
374                    continue
375                destpath = mozpath.join("crashreporter-symbols", filename)
376                self.log(
377                    logging.INFO,
378                    "artifact",
379                    {"destpath": destpath},
380                    "Adding {destpath} to processed archive",
381                )
382                writer.add(destpath.encode("utf-8"), entry)
383
384    def process_host_bin(self, filename, processed_filename):
385        with JarWriter(file=processed_filename, compress_level=5) as writer:
386            # Turn 'HASH-mar.exe' into 'mar.exe'.  `filename` is a path on disk
387            # without any of the path parts of the artifact, so we must inject
388            # the desired `host/bin` prefix here.
389            orig_basename = os.path.basename(filename).split("-", 1)[1]
390            destpath = mozpath.join("host/bin", orig_basename)
391            writer.add(destpath.encode("utf-8"), open(filename, "rb"))
392
393    def iter_artifact_archive(self, filename):
394        if filename.endswith(".zip"):
395            reader = JarReader(filename)
396            for filename in reader.entries:
397                yield filename, reader[filename]
398        elif filename.endswith(".tar.zst") and self._mozbuild is not None:
399            self._mozbuild._ensure_zstd()
400            import zstandard
401
402            ctx = zstandard.ZstdDecompressor()
403            uncompressed = ctx.stream_reader(open(filename, "rb"))
404            with tarfile.open(
405                mode="r|", fileobj=uncompressed, bufsize=1024 * 1024
406            ) as reader:
407                while True:
408                    info = reader.next()
409                    if info is None:
410                        break
411                    yield info.name, reader.extractfile(info)
412        else:
413            raise RuntimeError("Unsupported archive type for %s" % filename)
414
415    @property
416    def candidate_trees(self):
417        if not self._candidate_trees:
418            self._candidate_trees = self.select_candidate_trees()
419        return self._candidate_trees
420
421    def select_candidate_trees(self):
422        version_display = buildconfig.substs.get("MOZ_APP_VERSION_DISPLAY")
423
424        if "esr" in version_display:
425            return self.esr_candidate_trees
426        elif re.search("a\d+$", version_display):
427            return self.nightly_candidate_trees
428        elif re.search("b\d+$", version_display):
429            return self.beta_candidate_trees
430
431        return self.default_candidate_trees
432
433
434class AndroidArtifactJob(ArtifactJob):
435    package_re = r"public/build/geckoview_example\.apk"
436    product = "mobile"
437
438    package_artifact_patterns = {"**/*.so"}
439
440    def process_package_artifact(self, filename, processed_filename):
441        # Extract all .so files into the root, which will get copied into dist/bin.
442        with JarWriter(file=processed_filename, compress_level=5) as writer:
443            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
444                if not any(
445                    mozpath.match(p, pat) for pat in self.package_artifact_patterns
446                ):
447                    continue
448
449                dirname, basename = os.path.split(p)
450                self.log(
451                    logging.DEBUG,
452                    "artifact",
453                    {"basename": basename},
454                    "Adding {basename} to processed archive",
455                )
456
457                basedir = "bin"
458                if not basename.endswith(".so"):
459                    basedir = mozpath.join("bin", dirname.lstrip("assets/"))
460                basename = mozpath.join(basedir, basename)
461                writer.add(basename.encode("utf-8"), f.open())
462
463    def process_symbols_archive(self, filename, processed_filename):
464        ArtifactJob.process_symbols_archive(
465            self, filename, processed_filename, skip_compressed=True
466        )
467
468        if not self._symbols_archive_suffix.startswith("crashreporter-symbols-full."):
469            return
470
471        import gzip
472
473        with JarWriter(file=processed_filename, compress_level=5) as writer:
474            for filename, entry in self.iter_artifact_archive(filename):
475                if not filename.endswith(".gz"):
476                    continue
477
478                # Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz"
479                # into "libxul.so.dbg".
480                #
481                # After running `settings append target.debug-file-search-paths $file`,
482                # where file=/path/to/topobjdir/dist/crashreporter-symbols,
483                # Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
484                #
485                # There are other paths that will work but none seem more desireable.  See
486                # https://github.com/llvm-mirror/lldb/blob/882670690ca69d9dd96b7236c620987b11894af9/source/Host/common/Symbols.cpp#L324.
487                basename = os.path.basename(filename).replace(".gz", "")
488                destpath = mozpath.join("crashreporter-symbols", basename)
489                self.log(
490                    logging.DEBUG,
491                    "artifact",
492                    {"destpath": destpath},
493                    "Adding uncompressed ELF debug symbol file "
494                    "{destpath} to processed archive",
495                )
496                writer.add(destpath.encode("utf-8"), gzip.GzipFile(fileobj=entry))
497
498
499class LinuxArtifactJob(ArtifactJob):
500    package_re = r"public/build/target\.tar\.bz2"
501    product = "firefox"
502
503    _package_artifact_patterns = {
504        "{product}/crashreporter",
505        "{product}/dependentlibs.list",
506        "{product}/{product}",
507        "{product}/{product}-bin",
508        "{product}/minidump-analyzer",
509        "{product}/pingsender",
510        "{product}/plugin-container",
511        "{product}/updater",
512        "{product}/**/*.so",
513    }
514
515    @property
516    def package_artifact_patterns(self):
517        return {p.format(product=self.product) for p in self._package_artifact_patterns}
518
519    def process_package_artifact(self, filename, processed_filename):
520        added_entry = False
521
522        with JarWriter(file=processed_filename, compress_level=5) as writer:
523            with tarfile.open(filename) as reader:
524                for p, f in UnpackFinder(TarFinder(filename, reader)):
525                    if not any(
526                        mozpath.match(p, pat) for pat in self.package_artifact_patterns
527                    ):
528                        continue
529
530                    # We strip off the relative "firefox/" bit from the path,
531                    # but otherwise preserve it.
532                    destpath = mozpath.join("bin", mozpath.relpath(p, self.product))
533                    self.log(
534                        logging.DEBUG,
535                        "artifact",
536                        {"destpath": destpath},
537                        "Adding {destpath} to processed archive",
538                    )
539                    writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode)
540                    added_entry = True
541
542        if not added_entry:
543            raise ValueError(
544                'Archive format changed! No pattern from "{patterns}" '
545                "matched an archive path.".format(
546                    patterns=LinuxArtifactJob.package_artifact_patterns
547                )
548            )
549
550
551class MacArtifactJob(ArtifactJob):
552    package_re = r"public/build/target\.dmg"
553    product = "firefox"
554
555    # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
556    _paths_no_keep_path = (
557        "Contents/MacOS",
558        [
559            "crashreporter.app/Contents/MacOS/crashreporter",
560            "{product}",
561            "{product}-bin",
562            "*.dylib",
563            "minidump-analyzer",
564            "pingsender",
565            "plugin-container.app/Contents/MacOS/plugin-container",
566            "updater.app/Contents/MacOS/org.mozilla.updater",
567            # 'xpcshell',
568            "XUL",
569        ],
570    )
571
572    @property
573    def paths_no_keep_path(self):
574        root, paths = self._paths_no_keep_path
575        return (root, [p.format(product=self.product) for p in paths])
576
577    def process_package_artifact(self, filename, processed_filename):
578        tempdir = tempfile.mkdtemp()
579        oldcwd = os.getcwd()
580        try:
581            self.log(
582                logging.DEBUG,
583                "artifact",
584                {"tempdir": tempdir},
585                "Unpacking DMG into {tempdir}",
586            )
587            if self._substs["HOST_OS_ARCH"] == "Linux":
588                # This is a cross build, use hfsplus and dmg tools to extract the dmg.
589                os.chdir(tempdir)
590                with open(os.devnull, "wb") as devnull:
591                    subprocess.check_call(
592                        [
593                            self._substs["DMG_TOOL"],
594                            "extract",
595                            filename,
596                            "extracted_img",
597                        ],
598                        stdout=devnull,
599                    )
600                    subprocess.check_call(
601                        [self._substs["HFS_TOOL"], "extracted_img", "extractall"],
602                        stdout=devnull,
603                    )
604            else:
605                mozinstall.install(filename, tempdir)
606
607            bundle_dirs = glob.glob(mozpath.join(tempdir, "*.app"))
608            if len(bundle_dirs) != 1:
609                raise ValueError(
610                    "Expected one source bundle, found: {}".format(bundle_dirs)
611                )
612            [source] = bundle_dirs
613
614            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
615            paths_keep_path = [
616                (
617                    "Contents/Resources",
618                    [
619                        "browser/components/libbrowsercomps.dylib",
620                        "dependentlibs.list",
621                        # 'firefox',
622                        "gmp-clearkey/0.1/libclearkey.dylib",
623                        # 'gmp-fake/1.0/libfake.dylib',
624                        # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
625                    ],
626                )
627            ]
628
629            with JarWriter(file=processed_filename, compress_level=5) as writer:
630                root, paths = self.paths_no_keep_path
631                finder = UnpackFinder(mozpath.join(source, root))
632                for path in paths:
633                    for p, f in finder.find(path):
634                        self.log(
635                            logging.DEBUG,
636                            "artifact",
637                            {"path": p},
638                            "Adding {path} to processed archive",
639                        )
640                        destpath = mozpath.join("bin", os.path.basename(p))
641                        writer.add(destpath.encode("utf-8"), f, mode=f.mode)
642
643                for root, paths in paths_keep_path:
644                    finder = UnpackFinder(mozpath.join(source, root))
645                    for path in paths:
646                        for p, f in finder.find(path):
647                            self.log(
648                                logging.DEBUG,
649                                "artifact",
650                                {"path": p},
651                                "Adding {path} to processed archive",
652                            )
653                            destpath = mozpath.join("bin", p)
654                            writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode)
655
656        finally:
657            os.chdir(oldcwd)
658            try:
659                shutil.rmtree(tempdir)
660            except (OSError, IOError):
661                self.log(
662                    logging.WARN,
663                    "artifact",
664                    {"tempdir": tempdir},
665                    "Unable to delete {tempdir}",
666                )
667                pass
668
669
670class WinArtifactJob(ArtifactJob):
671    package_re = r"public/build/target\.(zip|tar\.gz)"
672    product = "firefox"
673
674    _package_artifact_patterns = {
675        "{product}/dependentlibs.list",
676        "{product}/**/*.dll",
677        "{product}/*.exe",
678        "{product}/*.tlb",
679    }
680
681    @property
682    def package_artifact_patterns(self):
683        return {p.format(product=self.product) for p in self._package_artifact_patterns}
684
685    # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
686    test_artifact_patterns = {
687        ("bin/BadCertAndPinningServer.exe", ("bin", "bin")),
688        ("bin/DelegatedCredentialsServer.exe", ("bin", "bin")),
689        ("bin/EncryptedClientHelloServer.exe", ("bin", "bin")),
690        ("bin/GenerateOCSPResponse.exe", ("bin", "bin")),
691        ("bin/OCSPStaplingServer.exe", ("bin", "bin")),
692        ("bin/SanctionsTestServer.exe", ("bin", "bin")),
693        ("bin/certutil.exe", ("bin", "bin")),
694        ("bin/geckodriver.exe", ("bin", "bin")),
695        ("bin/minidumpwriter.exe", ("bin", "bin")),
696        ("bin/pk12util.exe", ("bin", "bin")),
697        ("bin/screenshot.exe", ("bin", "bin")),
698        ("bin/ssltunnel.exe", ("bin", "bin")),
699        ("bin/xpcshell.exe", ("bin", "bin")),
700        ("bin/http3server.exe", ("bin", "bin")),
701        ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")),
702        ("bin/plugins/*", ("bin/plugins", "plugins")),
703        ("bin/components/*", ("bin/components", "bin/components")),
704    }
705
706    def process_package_artifact(self, filename, processed_filename):
707        added_entry = False
708        with JarWriter(file=processed_filename, compress_level=5) as writer:
709            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
710                if not any(
711                    mozpath.match(p, pat) for pat in self.package_artifact_patterns
712                ):
713                    continue
714
715                # strip off the relative "firefox/" bit from the path:
716                basename = mozpath.relpath(p, self.product)
717                basename = mozpath.join("bin", basename)
718                self.log(
719                    logging.DEBUG,
720                    "artifact",
721                    {"basename": basename},
722                    "Adding {basename} to processed archive",
723                )
724                writer.add(basename.encode("utf-8"), f.open(), mode=f.mode)
725                added_entry = True
726
727        if not added_entry:
728            raise ValueError(
729                'Archive format changed! No pattern from "{patterns}"'
730                "matched an archive path.".format(patterns=self.artifact_patterns)
731            )
732
733
734class ThunderbirdMixin(object):
735    trust_domain = "comm"
736    product = "thunderbird"
737    try_tree = "try-comm-central"
738
739    nightly_candidate_trees = [
740        "comm-central",
741    ]
742    beta_candidate_trees = [
743        "releases/comm-beta",
744    ]
745    # The list below list should be updated when we have new ESRs.
746    esr_candidate_trees = [
747        "releases/comm-esr91",
748    ]
749
750
751class LinuxThunderbirdArtifactJob(ThunderbirdMixin, LinuxArtifactJob):
752    pass
753
754
755class MacThunderbirdArtifactJob(ThunderbirdMixin, MacArtifactJob):
756    pass
757
758
759class WinThunderbirdArtifactJob(ThunderbirdMixin, WinArtifactJob):
760    pass
761
762
763def startswithwhich(s, prefixes):
764    for prefix in prefixes:
765        if s.startswith(prefix):
766            return prefix
767
768
769MOZ_JOB_DETAILS = {
770    j: {
771        "android": AndroidArtifactJob,
772        "linux": LinuxArtifactJob,
773        "macosx": MacArtifactJob,
774        "win": WinArtifactJob,
775    }[startswithwhich(j, ("android", "linux", "macosx", "win"))]
776    for j in JOB_CHOICES
777}
778COMM_JOB_DETAILS = {
779    j: {
780        "android": None,
781        "linux": LinuxThunderbirdArtifactJob,
782        "macosx": MacThunderbirdArtifactJob,
783        "win": WinThunderbirdArtifactJob,
784    }[startswithwhich(j, ("android", "linux", "macosx", "win"))]
785    for j in JOB_CHOICES
786}
787
788
789def cachedmethod(cachefunc):
790    """Decorator to wrap a class or instance method with a memoizing callable that
791    saves results in a (possibly shared) cache.
792    """
793
794    def decorator(method):
795        def wrapper(self, *args, **kwargs):
796            mapping = cachefunc(self)
797            if mapping is None:
798                return method(self, *args, **kwargs)
799            key = (method.__name__, args, tuple(sorted(kwargs.items())))
800            try:
801                value = mapping[key]
802                return value
803            except KeyError:
804                pass
805            result = method(self, *args, **kwargs)
806            mapping[key] = result
807            return result
808
809        return functools.update_wrapper(wrapper, method)
810
811    return decorator
812
813
814class CacheManager(object):
815    """Maintain an LRU cache.  Provide simple persistence, including support for
816    loading and saving the state using a "with" block.  Allow clearing the cache
817    and printing the cache for debugging.
818
819    Provide simple logging.
820    """
821
822    def __init__(
823        self,
824        cache_dir,
825        cache_name,
826        cache_size,
827        cache_callback=None,
828        log=None,
829        skip_cache=False,
830    ):
831        self._skip_cache = skip_cache
832        self._cache = pylru.lrucache(cache_size, callback=cache_callback)
833        self._cache_filename = mozpath.join(cache_dir, cache_name + "-cache.pickle")
834        self._log = log
835        mkdir(cache_dir, not_indexed=True)
836
837    def log(self, *args, **kwargs):
838        if self._log:
839            self._log(*args, **kwargs)
840
841    def load_cache(self):
842        if self._skip_cache:
843            self.log(
844                logging.INFO, "artifact", {}, "Skipping cache: ignoring load_cache!"
845            )
846            return
847
848        try:
849            items = pickle.load(open(self._cache_filename, "rb"))
850            for key, value in items:
851                self._cache[key] = value
852        except Exception as e:
853            # Corrupt cache, perhaps?  Sadly, pickle raises many different
854            # exceptions, so it's not worth trying to be fine grained here.
855            # We ignore any exception, so the cache is effectively dropped.
856            self.log(
857                logging.INFO,
858                "artifact",
859                {"filename": self._cache_filename, "exception": repr(e)},
860                "Ignoring exception unpickling cache file {filename}: {exception}",
861            )
862            pass
863
864    def dump_cache(self):
865        if self._skip_cache:
866            self.log(
867                logging.INFO, "artifact", {}, "Skipping cache: ignoring dump_cache!"
868            )
869            return
870
871        ensureParentDir(self._cache_filename)
872        pickle.dump(
873            list(reversed(list(self._cache.items()))),
874            open(self._cache_filename, "wb"),
875            -1,
876        )
877
878    def clear_cache(self):
879        if self._skip_cache:
880            self.log(
881                logging.INFO, "artifact", {}, "Skipping cache: ignoring clear_cache!"
882            )
883            return
884
885        with self:
886            self._cache.clear()
887
888    def __enter__(self):
889        self.load_cache()
890        return self
891
892    def __exit__(self, type, value, traceback):
893        self.dump_cache()
894
895
896class PushheadCache(CacheManager):
897    """Helps map tree/revision pairs to parent pushheads according to the pushlog."""
898
899    def __init__(self, cache_dir, log=None, skip_cache=False):
900        CacheManager.__init__(
901            self,
902            cache_dir,
903            "pushhead_cache",
904            MAX_CACHED_TASKS,
905            log=log,
906            skip_cache=skip_cache,
907        )
908
909    @cachedmethod(operator.attrgetter("_cache"))
910    def parent_pushhead_id(self, tree, revision):
911        cset_url_tmpl = (
912            "https://hg.mozilla.org/{tree}/json-pushes?"
913            "changeset={changeset}&version=2&tipsonly=1"
914        )
915        req = requests.get(
916            cset_url_tmpl.format(tree=tree, changeset=revision),
917            headers={"Accept": "application/json"},
918        )
919        if req.status_code not in range(200, 300):
920            raise ValueError
921        result = req.json()
922        [found_pushid] = result["pushes"].keys()
923        return int(found_pushid)
924
925    @cachedmethod(operator.attrgetter("_cache"))
926    def pushid_range(self, tree, start, end):
927        pushid_url_tmpl = (
928            "https://hg.mozilla.org/{tree}/json-pushes?"
929            "startID={start}&endID={end}&version=2&tipsonly=1"
930        )
931
932        req = requests.get(
933            pushid_url_tmpl.format(tree=tree, start=start, end=end),
934            headers={"Accept": "application/json"},
935        )
936        result = req.json()
937        return [p["changesets"][-1] for p in result["pushes"].values()]
938
939
940class TaskCache(CacheManager):
941    """Map candidate pushheads to Task Cluster task IDs and artifact URLs."""
942
943    def __init__(self, cache_dir, log=None, skip_cache=False):
944        CacheManager.__init__(
945            self,
946            cache_dir,
947            "artifact_url",
948            MAX_CACHED_TASKS,
949            log=log,
950            skip_cache=skip_cache,
951        )
952
953    @cachedmethod(operator.attrgetter("_cache"))
954    def artifacts(self, tree, job, artifact_job_class, rev):
955        # Grab the second part of the repo name, which is generally how things
956        # are indexed. Eg: 'integration/autoland' is indexed as
957        # 'autoland'
958        tree = tree.split("/")[1] if "/" in tree else tree
959
960        if job.endswith("-opt"):
961            tree += ".shippable"
962
963        namespace = "{trust_domain}.v2.{tree}.revision.{rev}.{product}.{job}".format(
964            trust_domain=artifact_job_class.trust_domain,
965            rev=rev,
966            tree=tree,
967            product=artifact_job_class.product,
968            job=job,
969        )
970        self.log(
971            logging.DEBUG,
972            "artifact",
973            {"namespace": namespace},
974            "Searching Taskcluster index with namespace: {namespace}",
975        )
976        try:
977            taskId = find_task_id(namespace)
978        except KeyError:
979            # Not all revisions correspond to pushes that produce the job we
980            # care about; and even those that do may not have completed yet.
981            raise ValueError(
982                "Task for {namespace} does not exist (yet)!".format(namespace=namespace)
983            )
984
985        return taskId, list_artifacts(taskId)
986
987
988class Artifacts(object):
989    """Maintain state to efficiently fetch build artifacts from a Firefox tree."""
990
991    def __init__(
992        self,
993        tree,
994        substs,
995        defines,
996        job=None,
997        log=None,
998        cache_dir=".",
999        hg=None,
1000        git=None,
1001        skip_cache=False,
1002        topsrcdir=None,
1003        download_tests=True,
1004        download_symbols=False,
1005        download_host_bins=False,
1006        download_maven_zip=False,
1007        no_process=False,
1008        mozbuild=None,
1009    ):
1010        if (hg and git) or (not hg and not git):
1011            raise ValueError("Must provide path to exactly one of hg and git")
1012
1013        self._substs = substs
1014        self._defines = defines
1015        self._tree = tree
1016        self._job = job or self._guess_artifact_job()
1017        self._log = log
1018        self._hg = hg
1019        self._git = git
1020        self._cache_dir = cache_dir
1021        self._skip_cache = skip_cache
1022        self._topsrcdir = topsrcdir
1023        self._no_process = no_process
1024
1025        app = self._substs.get("MOZ_BUILD_APP")
1026        job_details = COMM_JOB_DETAILS if app == "comm/mail" else MOZ_JOB_DETAILS
1027
1028        try:
1029            cls = job_details[self._job]
1030            self._artifact_job = cls(
1031                log=self._log,
1032                download_tests=download_tests,
1033                download_symbols=download_symbols,
1034                download_host_bins=download_host_bins,
1035                download_maven_zip=download_maven_zip,
1036                substs=self._substs,
1037                mozbuild=mozbuild,
1038            )
1039        except KeyError:
1040            self.log(logging.INFO, "artifact", {"job": self._job}, "Unknown job {job}")
1041            raise KeyError("Unknown job")
1042
1043        self._task_cache = TaskCache(
1044            self._cache_dir, log=self._log, skip_cache=self._skip_cache
1045        )
1046        self._artifact_cache = ArtifactCache(
1047            self._cache_dir, log=self._log, skip_cache=self._skip_cache
1048        )
1049        self._pushhead_cache = PushheadCache(
1050            self._cache_dir, log=self._log, skip_cache=self._skip_cache
1051        )
1052
1053    def log(self, *args, **kwargs):
1054        if self._log:
1055            self._log(*args, **kwargs)
1056
1057    def run_hg(self, *args, **kwargs):
1058        env = kwargs.get("env", {})
1059        env["HGPLAIN"] = "1"
1060        kwargs["universal_newlines"] = True
1061        return subprocess.check_output([self._hg] + list(args), **kwargs)
1062
1063    def _guess_artifact_job(self):
1064        # Add the "-debug" suffix to the guessed artifact job name
1065        # if MOZ_DEBUG is enabled.
1066        if self._substs.get("MOZ_DEBUG"):
1067            target_suffix = "-debug"
1068        else:
1069            target_suffix = "-opt"
1070
1071        if self._substs.get("MOZ_BUILD_APP", "") == "mobile/android":
1072            if self._substs["ANDROID_CPU_ARCH"] == "x86_64":
1073                return "android-x86_64" + target_suffix
1074            if self._substs["ANDROID_CPU_ARCH"] == "x86":
1075                return "android-x86" + target_suffix
1076            if self._substs["ANDROID_CPU_ARCH"] == "arm64-v8a":
1077                return "android-aarch64" + target_suffix
1078            return "android-arm" + target_suffix
1079
1080        target_64bit = False
1081        if self._substs["target_cpu"] == "x86_64":
1082            target_64bit = True
1083
1084        if self._defines.get("XP_LINUX", False):
1085            return ("linux64" if target_64bit else "linux") + target_suffix
1086        if self._defines.get("XP_WIN", False):
1087            if self._substs["target_cpu"] == "aarch64":
1088                return "win64-aarch64" + target_suffix
1089            return ("win64" if target_64bit else "win32") + target_suffix
1090        if self._defines.get("XP_MACOSX", False):
1091            # We only produce unified builds in automation, so the target_cpu
1092            # check is not relevant.
1093            return "macosx64" + target_suffix
1094        raise Exception("Cannot determine default job for |mach artifact|!")
1095
1096    def _pushheads_from_rev(self, rev, count):
1097        """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby
1098        ancestors or `rev`. Multiple trees are queried, as the `rev` may
1099        already have been pushed to multiple repositories. For each repository
1100        containing `rev`, the pushhead introducing `rev` and the previous
1101        `count` pushheads from that point are included in the output.
1102        """
1103
1104        with self._pushhead_cache as pushhead_cache:
1105            found_pushids = {}
1106
1107            search_trees = self._artifact_job.candidate_trees
1108            for tree in search_trees:
1109                self.log(
1110                    logging.DEBUG,
1111                    "artifact",
1112                    {"tree": tree, "rev": rev},
1113                    "Attempting to find a pushhead containing {rev} on {tree}.",
1114                )
1115                try:
1116                    pushid = pushhead_cache.parent_pushhead_id(tree, rev)
1117                    found_pushids[tree] = pushid
1118                except ValueError:
1119                    continue
1120
1121            candidate_pushheads = collections.defaultdict(list)
1122
1123            for tree, pushid in six.iteritems(found_pushids):
1124                end = pushid
1125                start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT
1126
1127                self.log(
1128                    logging.DEBUG,
1129                    "artifact",
1130                    {
1131                        "tree": tree,
1132                        "pushid": pushid,
1133                        "num": NUM_PUSHHEADS_TO_QUERY_PER_PARENT,
1134                    },
1135                    "Retrieving the last {num} pushheads starting with id {pushid} on {tree}",
1136                )
1137                for pushhead in pushhead_cache.pushid_range(tree, start, end):
1138                    candidate_pushheads[pushhead].append(tree)
1139
1140        return candidate_pushheads
1141
1142    def _get_hg_revisions_from_git(self):
1143        rev_list = subprocess.check_output(
1144            [
1145                self._git,
1146                "rev-list",
1147                "--topo-order",
1148                "--max-count={num}".format(num=NUM_REVISIONS_TO_QUERY),
1149                "HEAD",
1150            ],
1151            universal_newlines=True,
1152            cwd=self._topsrcdir,
1153        )
1154
1155        hg_hash_list = subprocess.check_output(
1156            [self._git, "cinnabar", "git2hg"] + rev_list.splitlines(),
1157            universal_newlines=True,
1158            cwd=self._topsrcdir,
1159        )
1160
1161        zeroes = "0" * 40
1162
1163        hashes = []
1164        for hg_hash in hg_hash_list.splitlines():
1165            hg_hash = hg_hash.strip()
1166            if not hg_hash or hg_hash == zeroes:
1167                continue
1168            hashes.append(hg_hash)
1169        if not hashes:
1170            msg = (
1171                "Could not list any recent revisions in your clone. Does "
1172                "your clone have git-cinnabar metadata? If not, consider "
1173                "re-cloning using the directions at "
1174                "https://github.com/glandium/git-cinnabar/wiki/Mozilla:-A-"
1175                "git-workflow-for-Gecko-development"
1176            )
1177            try:
1178                subprocess.check_output(
1179                    [
1180                        self._git,
1181                        "cat-file",
1182                        "-e",
1183                        "05e5d33a570d48aed58b2d38f5dfc0a7870ff8d3^{commit}",
1184                    ],
1185                    stderr=subprocess.STDOUT,
1186                )
1187                # If the above commit exists, we're probably in a clone of
1188                # `gecko-dev`, and this documentation applies.
1189                msg += (
1190                    "\n\nNOTE: Consider following the directions "
1191                    "at https://github.com/glandium/git-cinnabar/wiki/"
1192                    "Mozilla:-Using-a-git-clone-of-gecko%E2%80%90dev-"
1193                    "to-push-to-mercurial to resolve this issue."
1194                )
1195            except subprocess.CalledProcessError:
1196                pass
1197            raise UserError(msg)
1198        return hashes
1199
1200    def _get_recent_public_revisions(self):
1201        """Returns recent ancestors of the working parent that are likely to
1202        to be known to Mozilla automation.
1203
1204        If we're using git, retrieves hg revisions from git-cinnabar.
1205        """
1206        if self._git:
1207            return self._get_hg_revisions_from_git()
1208
1209        # Mercurial updated the ordering of "last" in 4.3. We use revision
1210        # numbers to order here to accommodate multiple versions of hg.
1211        last_revs = self.run_hg(
1212            "log",
1213            "--template",
1214            "{rev}:{node}\n",
1215            "-r",
1216            "last(public() and ::., {num})".format(num=NUM_REVISIONS_TO_QUERY),
1217            cwd=self._topsrcdir,
1218        ).splitlines()
1219
1220        if len(last_revs) == 0:
1221            raise UserError(
1222                """\
1223There are no public revisions.
1224This can happen if the repository is created from bundle file and never pulled
1225from remote.  Please run `hg pull` and build again.
1226https://firefox-source-docs.mozilla.org/contributing/vcs/mercurial_bundles.html
1227"""
1228            )
1229
1230        self.log(
1231            logging.DEBUG,
1232            "artifact",
1233            {"len": len(last_revs)},
1234            "hg suggested {len} candidate revisions",
1235        )
1236
1237        def to_pair(line):
1238            rev, node = line.split(":", 1)
1239            return (int(rev), node)
1240
1241        pairs = [to_pair(r) for r in last_revs]
1242
1243        # Python's tuple sort orders by first component: here, the (local)
1244        # revision number.
1245        nodes = [pair[1] for pair in sorted(pairs, reverse=True)]
1246
1247        for node in nodes[:20]:
1248            self.log(
1249                logging.DEBUG,
1250                "artifact",
1251                {"node": node},
1252                "hg suggested candidate revision: {node}",
1253            )
1254        self.log(
1255            logging.DEBUG,
1256            "artifact",
1257            {"remaining": max(0, len(nodes) - 20)},
1258            "hg suggested candidate revision: and {remaining} more",
1259        )
1260
1261        return nodes
1262
1263    def _find_pushheads(self):
1264        """Returns an iterator of recent pushhead revisions, starting with the
1265        working parent.
1266        """
1267
1268        last_revs = self._get_recent_public_revisions()
1269        candidate_pushheads = self._pushheads_from_rev(
1270            last_revs[0].rstrip(), NUM_PUSHHEADS_TO_QUERY_PER_PARENT
1271        )
1272        count = 0
1273        for rev in last_revs:
1274            rev = rev.rstrip()
1275            if not rev:
1276                continue
1277            if rev not in candidate_pushheads:
1278                continue
1279            count += 1
1280            yield candidate_pushheads[rev], rev
1281
1282        if not count:
1283            raise Exception(
1284                "Could not find any candidate pushheads in the last {num} revisions.\n"
1285                "Search started with {rev}, which must be known to Mozilla automation.\n\n"
1286                "see https://firefox-source-docs.mozilla.org/contributing/build/artifact_builds.html".format(  # noqa E501
1287                    rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT
1288                )
1289            )
1290
1291    def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
1292        try:
1293            taskId, artifacts = task_cache.artifacts(
1294                tree, job, self._artifact_job.__class__, pushhead
1295            )
1296        except ValueError:
1297            return None
1298
1299        urls = []
1300        for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
1301            url = get_artifact_url(taskId, artifact_name)
1302            urls.append(url)
1303        if urls:
1304            self.log(
1305                logging.DEBUG,
1306                "artifact",
1307                {"pushhead": pushhead, "tree": tree},
1308                "Installing from remote pushhead {pushhead} on {tree}",
1309            )
1310            return urls
1311        return None
1312
1313    def install_from_file(self, filename, distdir):
1314        self.log(
1315            logging.DEBUG,
1316            "artifact",
1317            {"filename": filename},
1318            "Installing from {filename}",
1319        )
1320
1321        # Copy all .so files, avoiding modification where possible.
1322        ensureParentDir(mozpath.join(distdir, ".dummy"))
1323
1324        if self._no_process:
1325            orig_basename = os.path.basename(filename)
1326            # Turn 'HASH-target...' into 'target...' if possible.  It might not
1327            # be possible if the file is given directly on the command line.
1328            before, _sep, after = orig_basename.rpartition("-")
1329            if re.match(r"[0-9a-fA-F]{16}$", before):
1330                orig_basename = after
1331            path = mozpath.join(distdir, orig_basename)
1332            with FileAvoidWrite(path, readmode="rb") as fh:
1333                shutil.copyfileobj(open(filename, mode="rb"), fh)
1334            self.log(
1335                logging.DEBUG,
1336                "artifact",
1337                {"path": path},
1338                "Copied unprocessed artifact: to {path}",
1339            )
1340            return
1341
1342        # Do we need to post-process?
1343        processed_filename = filename + PROCESSED_SUFFIX
1344
1345        if self._skip_cache and os.path.exists(processed_filename):
1346            self.log(
1347                logging.INFO,
1348                "artifact",
1349                {"path": processed_filename},
1350                "Skipping cache: removing cached processed artifact {path}",
1351            )
1352            os.remove(processed_filename)
1353
1354        if not os.path.exists(processed_filename):
1355            self.log(
1356                logging.DEBUG,
1357                "artifact",
1358                {"filename": filename},
1359                "Processing contents of {filename}",
1360            )
1361            self.log(
1362                logging.DEBUG,
1363                "artifact",
1364                {"processed_filename": processed_filename},
1365                "Writing processed {processed_filename}",
1366            )
1367            self._artifact_job.process_artifact(filename, processed_filename)
1368
1369        self._artifact_cache._persist_limit.register_file(processed_filename)
1370
1371        self.log(
1372            logging.DEBUG,
1373            "artifact",
1374            {"processed_filename": processed_filename},
1375            "Installing from processed {processed_filename}",
1376        )
1377
1378        with zipfile.ZipFile(processed_filename) as zf:
1379            for info in zf.infolist():
1380                n = mozpath.join(distdir, info.filename)
1381                fh = FileAvoidWrite(n, readmode="rb")
1382                shutil.copyfileobj(zf.open(info), fh)
1383                file_existed, file_updated = fh.close()
1384                self.log(
1385                    logging.DEBUG,
1386                    "artifact",
1387                    {
1388                        "updating": "Updating" if file_updated else "Not updating",
1389                        "filename": n,
1390                    },
1391                    "{updating} {filename}",
1392                )
1393                if not file_existed or file_updated:
1394                    # Libraries and binaries may need to be marked executable,
1395                    # depending on platform.
1396                    perms = (
1397                        info.external_attr >> 16
1398                    )  # See http://stackoverflow.com/a/434689.
1399                    perms |= (
1400                        stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
1401                    )  # u+w, a+r.
1402                    os.chmod(n, perms)
1403        return 0
1404
1405    def install_from_url(self, url, distdir):
1406        self.log(logging.DEBUG, "artifact", {"url": url}, "Installing from {url}")
1407        filename = self._artifact_cache.fetch(url)
1408        return self.install_from_file(filename, distdir)
1409
1410    def _install_from_hg_pushheads(self, hg_pushheads, distdir):
1411        """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
1412        and tree-sets they are known to be in, trying to download and
1413        install from each.
1414        """
1415
1416        urls = None
1417        count = 0
1418        # with blocks handle handle persistence.
1419        with self._task_cache as task_cache:
1420            for trees, hg_hash in hg_pushheads:
1421                for tree in trees:
1422                    count += 1
1423                    self.log(
1424                        logging.DEBUG,
1425                        "artifact",
1426                        {"hg_hash": hg_hash, "tree": tree},
1427                        "Trying to find artifacts for hg revision {hg_hash} on tree {tree}.",
1428                    )
1429                    urls = self.find_pushhead_artifacts(
1430                        task_cache, self._job, tree, hg_hash
1431                    )
1432                    if urls:
1433                        for url in urls:
1434                            if self.install_from_url(url, distdir):
1435                                return 1
1436                        return 0
1437
1438        self.log(
1439            logging.ERROR,
1440            "artifact",
1441            {"count": count},
1442            "Tried {count} pushheads, no built artifacts found.",
1443        )
1444        return 1
1445
1446    def install_from_recent(self, distdir):
1447        hg_pushheads = self._find_pushheads()
1448        return self._install_from_hg_pushheads(hg_pushheads, distdir)
1449
1450    def install_from_revset(self, revset, distdir):
1451        revision = None
1452        try:
1453            if self._hg:
1454                revision = self.run_hg(
1455                    "log", "--template", "{node}\n", "-r", revset, cwd=self._topsrcdir
1456                ).strip()
1457            elif self._git:
1458                revset = subprocess.check_output(
1459                    [self._git, "rev-parse", "%s^{commit}" % revset],
1460                    stderr=open(os.devnull, "w"),
1461                    universal_newlines=True,
1462                    cwd=self._topsrcdir,
1463                ).strip()
1464            else:
1465                # Fallback to the exception handling case from both hg and git
1466                raise subprocess.CalledProcessError()
1467        except subprocess.CalledProcessError:
1468            # If the mercurial of git commands above failed, it means the given
1469            # revset is not known locally to the VCS. But if the revset looks
1470            # like a complete sha1, assume it is a mercurial sha1 that hasn't
1471            # been pulled, and use that.
1472            if re.match(r"^[A-Fa-f0-9]{40}$", revset):
1473                revision = revset
1474
1475        if revision is None and self._git:
1476            revision = subprocess.check_output(
1477                [self._git, "cinnabar", "git2hg", revset],
1478                universal_newlines=True,
1479                cwd=self._topsrcdir,
1480            ).strip()
1481
1482        if revision == "0" * 40 or revision is None:
1483            raise ValueError(
1484                "revision specification must resolve to a commit known to hg"
1485            )
1486        if len(revision.split("\n")) != 1:
1487            raise ValueError(
1488                "revision specification must resolve to exactly one commit"
1489            )
1490
1491        self.log(
1492            logging.INFO,
1493            "artifact",
1494            {"revset": revset, "revision": revision},
1495            "Will only accept artifacts from a pushhead at {revision} "
1496            '(matched revset "{revset}").',
1497        )
1498        # Include try in our search to allow pulling from a specific push.
1499        pushheads = [
1500            (
1501                self._artifact_job.candidate_trees + [self._artifact_job.try_tree],
1502                revision,
1503            )
1504        ]
1505        return self._install_from_hg_pushheads(pushheads, distdir)
1506
1507    def install_from_task(self, taskId, distdir):
1508        artifacts = list_artifacts(taskId)
1509
1510        urls = []
1511        for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
1512            url = get_artifact_url(taskId, artifact_name)
1513            urls.append(url)
1514        if not urls:
1515            raise ValueError(
1516                "Task {taskId} existed, but no artifacts found!".format(taskId=taskId)
1517            )
1518        for url in urls:
1519            if self.install_from_url(url, distdir):
1520                return 1
1521        return 0
1522
1523    def install_from(self, source, distdir):
1524        """Install artifacts from a ``source`` into the given ``distdir``."""
1525        if source and os.path.isfile(source):
1526            return self.install_from_file(source, distdir)
1527        elif source and urlparse(source).scheme:
1528            return self.install_from_url(source, distdir)
1529        else:
1530            if source is None and "MOZ_ARTIFACT_REVISION" in os.environ:
1531                source = os.environ["MOZ_ARTIFACT_REVISION"]
1532
1533            if source:
1534                return self.install_from_revset(source, distdir)
1535
1536            for var in (
1537                "MOZ_ARTIFACT_TASK_%s" % self._job.upper().replace("-", "_"),
1538                "MOZ_ARTIFACT_TASK",
1539            ):
1540                if var in os.environ:
1541                    return self.install_from_task(os.environ[var], distdir)
1542
1543            return self.install_from_recent(distdir)
1544
1545    def clear_cache(self):
1546        self.log(logging.INFO, "artifact", {}, "Deleting cached artifacts and caches.")
1547        self._task_cache.clear_cache()
1548        self._artifact_cache.clear_cache()
1549        self._pushhead_cache.clear_cache()
1550