1# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License
14# along with this program; if not, write to the Free Software
15# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
17"""A Git repository implementation that uses a Bazaar transport."""
18
19from io import BytesIO
20
21import os
22import sys
23import posixpath
24
25from dulwich.errors import (
26    NoIndexPresent,
27    )
28from dulwich.file import (
29    GitFile,
30    FileLocked,
31    )
32from dulwich.objects import (
33    ShaFile,
34    )
35from dulwich.object_store import (
36    PackBasedObjectStore,
37    PACKDIR,
38    read_packs_file,
39    )
40from dulwich.pack import (
41    MemoryPackIndex,
42    PackData,
43    PackIndexer,
44    Pack,
45    PackStreamCopier,
46    iter_sha1,
47    load_pack_index_file,
48    write_pack_objects,
49    write_pack_index_v2,
50    write_pack_header,
51    compute_file_sha,
52    write_pack_object,
53    )
54from dulwich.repo import (
55    BaseRepo,
56    InfoRefsContainer,
57    RefsContainer,
58    BASE_DIRECTORIES,
59    COMMONDIR,
60    CONTROLDIR,
61    INDEX_FILENAME,
62    OBJECTDIR,
63    SYMREF,
64    check_ref_format,
65    read_packed_refs_with_peeled,
66    read_packed_refs,
67    write_packed_refs,
68    )
69
70from .. import (
71    osutils,
72    transport as _mod_transport,
73    urlutils,
74    )
75from ..errors import (
76    AlreadyControlDirError,
77    FileExists,
78    LockBroken,
79    LockContention,
80    NotLocalUrl,
81    NoSuchFile,
82    ReadError,
83    TransportNotPossible,
84    )
85
86from ..lock import LogicalLockResult
87from ..trace import warning
88
89
90class TransportRefsContainer(RefsContainer):
91    """Refs container that reads refs from a transport."""
92
93    def __init__(self, transport, worktree_transport=None):
94        self.transport = transport
95        if worktree_transport is None:
96            worktree_transport = transport
97        self.worktree_transport = worktree_transport
98        self._packed_refs = None
99        self._peeled_refs = None
100
101    def __repr__(self):
102        return "%s(%r)" % (self.__class__.__name__, self.transport)
103
104    def _ensure_dir_exists(self, path):
105        self.transport.clone(posixpath.dirname(path)).create_prefix()
106
107    def subkeys(self, base):
108        """Refs present in this container under a base.
109
110        :param base: The base to return refs under.
111        :return: A set of valid refs in this container under the base; the base
112            prefix is stripped from the ref names returned.
113        """
114        keys = set()
115        base_len = len(base) + 1
116        for refname in self.allkeys():
117            if refname.startswith(base):
118                keys.add(refname[base_len:])
119        return keys
120
121    def allkeys(self):
122        keys = set()
123        try:
124            self.worktree_transport.get_bytes("HEAD")
125        except NoSuchFile:
126            pass
127        else:
128            keys.add(b"HEAD")
129        try:
130            iter_files = list(self.transport.clone(
131                "refs").iter_files_recursive())
132            for filename in iter_files:
133                unquoted_filename = urlutils.unquote_to_bytes(filename)
134                refname = osutils.pathjoin(b"refs", unquoted_filename)
135                if check_ref_format(refname):
136                    keys.add(refname)
137        except (TransportNotPossible, NoSuchFile):
138            pass
139        keys.update(self.get_packed_refs())
140        return keys
141
142    def get_packed_refs(self):
143        """Get contents of the packed-refs file.
144
145        :return: Dictionary mapping ref names to SHA1s
146
147        :note: Will return an empty dictionary when no packed-refs file is
148            present.
149        """
150        # TODO: invalidate the cache on repacking
151        if self._packed_refs is None:
152            # set both to empty because we want _peeled_refs to be
153            # None if and only if _packed_refs is also None.
154            self._packed_refs = {}
155            self._peeled_refs = {}
156            try:
157                f = self.transport.get("packed-refs")
158            except NoSuchFile:
159                return {}
160            try:
161                first_line = next(iter(f)).rstrip()
162                if (first_line.startswith(b"# pack-refs") and b" peeled" in
163                        first_line):
164                    for sha, name, peeled in read_packed_refs_with_peeled(f):
165                        self._packed_refs[name] = sha
166                        if peeled:
167                            self._peeled_refs[name] = peeled
168                else:
169                    f.seek(0)
170                    for sha, name in read_packed_refs(f):
171                        self._packed_refs[name] = sha
172            finally:
173                f.close()
174        return self._packed_refs
175
176    def get_peeled(self, name):
177        """Return the cached peeled value of a ref, if available.
178
179        :param name: Name of the ref to peel
180        :return: The peeled value of the ref. If the ref is known not point to
181            a tag, this will be the SHA the ref refers to. If the ref may point
182            to a tag, but no cached information is available, None is returned.
183        """
184        self.get_packed_refs()
185        if self._peeled_refs is None or name not in self._packed_refs:
186            # No cache: no peeled refs were read, or this ref is loose
187            return None
188        if name in self._peeled_refs:
189            return self._peeled_refs[name]
190        else:
191            # Known not peelable
192            return self[name]
193
194    def read_loose_ref(self, name):
195        """Read a reference file and return its contents.
196
197        If the reference file a symbolic reference, only read the first line of
198        the file. Otherwise, only read the first 40 bytes.
199
200        :param name: the refname to read, relative to refpath
201        :return: The contents of the ref file, or None if the file does not
202            exist.
203        :raises IOError: if any other error occurs
204        """
205        if name == b'HEAD':
206            transport = self.worktree_transport
207        else:
208            transport = self.transport
209        try:
210            f = transport.get(urlutils.quote_from_bytes(name))
211        except NoSuchFile:
212            return None
213        with f:
214            try:
215                header = f.read(len(SYMREF))
216            except ReadError:
217                # probably a directory
218                return None
219            if header == SYMREF:
220                # Read only the first line
221                return header + next(iter(f)).rstrip(b"\r\n")
222            else:
223                # Read only the first 40 bytes
224                return header + f.read(40 - len(SYMREF))
225
226    def _remove_packed_ref(self, name):
227        if self._packed_refs is None:
228            return
229        # reread cached refs from disk, while holding the lock
230
231        self._packed_refs = None
232        self.get_packed_refs()
233
234        if name not in self._packed_refs:
235            return
236
237        del self._packed_refs[name]
238        if name in self._peeled_refs:
239            del self._peeled_refs[name]
240        with self.transport.open_write_stream("packed-refs") as f:
241            write_packed_refs(f, self._packed_refs, self._peeled_refs)
242
243    def set_symbolic_ref(self, name, other):
244        """Make a ref point at another ref.
245
246        :param name: Name of the ref to set
247        :param other: Name of the ref to point at
248        """
249        self._check_refname(name)
250        self._check_refname(other)
251        if name != b'HEAD':
252            transport = self.transport
253            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
254        else:
255            transport = self.worktree_transport
256        transport.put_bytes(urlutils.quote_from_bytes(
257            name), SYMREF + other + b'\n')
258
259    def set_if_equals(self, name, old_ref, new_ref):
260        """Set a refname to new_ref only if it currently equals old_ref.
261
262        This method follows all symbolic references, and can be used to perform
263        an atomic compare-and-swap operation.
264
265        :param name: The refname to set.
266        :param old_ref: The old sha the refname must refer to, or None to set
267            unconditionally.
268        :param new_ref: The new sha the refname will refer to.
269        :return: True if the set was successful, False otherwise.
270        """
271        self._check_refname(name)
272        try:
273            realnames, _ = self.follow(name)
274            realname = realnames[-1]
275        except (KeyError, IndexError):
276            realname = name
277        if realname == b'HEAD':
278            transport = self.worktree_transport
279        else:
280            transport = self.transport
281            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
282        transport.put_bytes(urlutils.quote_from_bytes(
283            realname), new_ref + b"\n")
284        return True
285
286    def add_if_new(self, name, ref):
287        """Add a new reference only if it does not already exist.
288
289        This method follows symrefs, and only ensures that the last ref in the
290        chain does not exist.
291
292        :param name: The refname to set.
293        :param ref: The new sha the refname will refer to.
294        :return: True if the add was successful, False otherwise.
295        """
296        try:
297            realnames, contents = self.follow(name)
298            if contents is not None:
299                return False
300            realname = realnames[-1]
301        except (KeyError, IndexError):
302            realname = name
303        self._check_refname(realname)
304        if realname == b'HEAD':
305            transport = self.worktree_transport
306        else:
307            transport = self.transport
308            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
309        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
310        return True
311
312    def remove_if_equals(self, name, old_ref):
313        """Remove a refname only if it currently equals old_ref.
314
315        This method does not follow symbolic references. It can be used to
316        perform an atomic compare-and-delete operation.
317
318        :param name: The refname to delete.
319        :param old_ref: The old sha the refname must refer to, or None to
320            delete unconditionally.
321        :return: True if the delete was successful, False otherwise.
322        """
323        self._check_refname(name)
324        # may only be packed
325        if name == b'HEAD':
326            transport = self.worktree_transport
327        else:
328            transport = self.transport
329        try:
330            transport.delete(urlutils.quote_from_bytes(name))
331        except NoSuchFile:
332            pass
333        self._remove_packed_ref(name)
334        return True
335
336    def get(self, name, default=None):
337        try:
338            return self[name]
339        except KeyError:
340            return default
341
342    def unlock_ref(self, name):
343        if name == b"HEAD":
344            transport = self.worktree_transport
345        else:
346            transport = self.transport
347        lockname = name + b".lock"
348        try:
349            transport.delete(urlutils.quote_from_bytes(lockname))
350        except NoSuchFile:
351            pass
352
353    def lock_ref(self, name):
354        if name == b"HEAD":
355            transport = self.worktree_transport
356        else:
357            transport = self.transport
358        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
359        lockname = urlutils.quote_from_bytes(name + b".lock")
360        try:
361            local_path = transport.local_abspath(
362                urlutils.quote_from_bytes(name))
363        except NotLocalUrl:
364            # This is racy, but what can we do?
365            if transport.has(lockname):
366                raise LockContention(name)
367            transport.put_bytes(lockname, b'Locked by brz-git')
368            return LogicalLockResult(lambda: transport.delete(lockname))
369        else:
370            try:
371                gf = GitFile(local_path, 'wb')
372            except FileLocked as e:
373                raise LockContention(name, e)
374            else:
375                def unlock():
376                    try:
377                        transport.delete(lockname)
378                    except NoSuchFile:
379                        raise LockBroken(lockname)
380                    # GitFile.abort doesn't care if the lock has already
381                    # disappeared
382                    gf.abort()
383                return LogicalLockResult(unlock)
384
385
386# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
387# rather than bytes..
388def read_gitfile(f):
389    """Read a ``.git`` file.
390
391    The first line of the file should start with "gitdir: "
392
393    :param f: File-like object to read from
394    :return: A path
395    """
396    cs = f.read()
397    if not cs.startswith(b"gitdir: "):
398        raise ValueError("Expected file to start with 'gitdir: '")
399    return cs[len(b"gitdir: "):].rstrip(b"\n")
400
401
402class TransportRepo(BaseRepo):
403
404    def __init__(self, transport, bare, refs_text=None):
405        self.transport = transport
406        self.bare = bare
407        try:
408            with transport.get(CONTROLDIR) as f:
409                path = read_gitfile(f)
410        except (ReadError, NoSuchFile):
411            if self.bare:
412                self._controltransport = self.transport
413            else:
414                self._controltransport = self.transport.clone('.git')
415        else:
416            self._controltransport = self.transport.clone(
417                urlutils.quote_from_bytes(path))
418        commondir = self.get_named_file(COMMONDIR)
419        if commondir is not None:
420            with commondir:
421                commondir = os.path.join(
422                    self.controldir(),
423                    commondir.read().rstrip(b"\r\n").decode(
424                        sys.getfilesystemencoding()))
425                self._commontransport = \
426                    _mod_transport.get_transport_from_path(commondir)
427        else:
428            self._commontransport = self._controltransport
429        config = self.get_config()
430        object_store = TransportObjectStore.from_config(
431            self._commontransport.clone(OBJECTDIR),
432            config)
433        if refs_text is not None:
434            refs_container = InfoRefsContainer(BytesIO(refs_text))
435            try:
436                head = TransportRefsContainer(
437                    self._commontransport).read_loose_ref(b"HEAD")
438            except KeyError:
439                pass
440            else:
441                refs_container._refs[b"HEAD"] = head
442        else:
443            refs_container = TransportRefsContainer(
444                self._commontransport, self._controltransport)
445        super(TransportRepo, self).__init__(object_store,
446                                            refs_container)
447
448    def controldir(self):
449        return self._controltransport.local_abspath('.')
450
451    def commondir(self):
452        return self._commontransport.local_abspath('.')
453
454    @property
455    def path(self):
456        return self.transport.local_abspath('.')
457
458    def _determine_file_mode(self):
459        # Be consistent with bzr
460        if sys.platform == 'win32':
461            return False
462        return True
463
464    def get_named_file(self, path):
465        """Get a file from the control dir with a specific name.
466
467        Although the filename should be interpreted as a filename relative to
468        the control dir in a disk-baked Repo, the object returned need not be
469        pointing to a file in that location.
470
471        :param path: The path to the file, relative to the control dir.
472        :return: An open file object, or None if the file does not exist.
473        """
474        try:
475            return self._controltransport.get(path.lstrip('/'))
476        except NoSuchFile:
477            return None
478
479    def _put_named_file(self, relpath, contents):
480        self._controltransport.put_bytes(relpath, contents)
481
482    def index_path(self):
483        """Return the path to the index file."""
484        return self._controltransport.local_abspath(INDEX_FILENAME)
485
486    def open_index(self):
487        """Open the index for this repository."""
488        from dulwich.index import Index
489        if not self.has_index():
490            raise NoIndexPresent()
491        return Index(self.index_path())
492
493    def has_index(self):
494        """Check if an index is present."""
495        # Bare repos must never have index files; non-bare repos may have a
496        # missing index file, which is treated as empty.
497        return not self.bare
498
499    def get_config(self):
500        from dulwich.config import ConfigFile
501        try:
502            with self._controltransport.get('config') as f:
503                return ConfigFile.from_file(f)
504        except NoSuchFile:
505            return ConfigFile()
506
507    def get_config_stack(self):
508        from dulwich.config import StackedConfig
509        backends = []
510        p = self.get_config()
511        if p is not None:
512            backends.append(p)
513            writable = p
514        else:
515            writable = None
516        backends.extend(StackedConfig.default_backends())
517        return StackedConfig(backends, writable=writable)
518
519    # Here for compatibility with dulwich < 0.19.17
520    def generate_pack_data(self, have, want, progress=None, ofs_delta=None):
521        """Generate pack data objects for a set of wants/haves.
522
523        Args:
524          have: List of SHA1s of objects that should not be sent
525          want: List of SHA1s of objects that should be sent
526          ofs_delta: Whether OFS deltas can be included
527          progress: Optional progress reporting method
528        """
529        shallow = self.get_shallow()
530        if shallow:
531            return self.object_store.generate_pack_data(
532                have, want, shallow=shallow,
533                progress=progress, ofs_delta=ofs_delta)
534        else:
535            return self.object_store.generate_pack_data(
536                have, want, progress=progress, ofs_delta=ofs_delta)
537
538    def __repr__(self):
539        return "<%s for %r>" % (self.__class__.__name__, self.transport)
540
541    @classmethod
542    def init(cls, transport, bare=False):
543        if not bare:
544            try:
545                transport.mkdir(".git")
546            except FileExists:
547                raise AlreadyControlDirError(transport.base)
548            control_transport = transport.clone(".git")
549        else:
550            control_transport = transport
551        for d in BASE_DIRECTORIES:
552            try:
553                control_transport.mkdir("/".join(d))
554            except FileExists:
555                pass
556        try:
557            control_transport.mkdir(OBJECTDIR)
558        except FileExists:
559            raise AlreadyControlDirError(transport.base)
560        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
561        ret = cls(transport, bare)
562        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
563        ret._init_files(bare)
564        return ret
565
566
567class TransportObjectStore(PackBasedObjectStore):
568    """Git-style object store that exists on disk."""
569
570    def __init__(self, transport,
571                 loose_compression_level=-1, pack_compression_level=-1):
572        """Open an object store.
573
574        :param transport: Transport to open data from
575        """
576        super(TransportObjectStore, self).__init__()
577        self.pack_compression_level = pack_compression_level
578        self.loose_compression_level = loose_compression_level
579        self.transport = transport
580        self.pack_transport = self.transport.clone(PACKDIR)
581        self._alternates = None
582
583    @classmethod
584    def from_config(cls, path, config):
585        try:
586            default_compression_level = int(config.get(
587                (b'core', ), b'compression').decode())
588        except KeyError:
589            default_compression_level = -1
590        try:
591            loose_compression_level = int(config.get(
592                (b'core', ), b'looseCompression').decode())
593        except KeyError:
594            loose_compression_level = default_compression_level
595        try:
596            pack_compression_level = int(config.get(
597                (b'core', ), 'packCompression').decode())
598        except KeyError:
599            pack_compression_level = default_compression_level
600        return cls(path, loose_compression_level, pack_compression_level)
601
602    def __eq__(self, other):
603        if not isinstance(other, TransportObjectStore):
604            return False
605        return self.transport == other.transport
606
607    def __repr__(self):
608        return "%s(%r)" % (self.__class__.__name__, self.transport)
609
610    @property
611    def alternates(self):
612        if self._alternates is not None:
613            return self._alternates
614        self._alternates = []
615        for path in self._read_alternate_paths():
616            # FIXME: Check path
617            t = _mod_transport.get_transport_from_path(path)
618            self._alternates.append(self.__class__(t))
619        return self._alternates
620
621    def _read_alternate_paths(self):
622        try:
623            f = self.transport.get("info/alternates")
624        except NoSuchFile:
625            return []
626        ret = []
627        with f:
628            for l in f.read().splitlines():
629                if l[0] == b"#":
630                    continue
631                if os.path.isabs(l):
632                    continue
633                ret.append(l)
634            return ret
635
636    def _update_pack_cache(self):
637        pack_files = set(self._pack_names())
638        new_packs = []
639        for basename in pack_files:
640            pack_name = basename + ".pack"
641            if basename not in self._pack_cache:
642                try:
643                    size = self.pack_transport.stat(pack_name).st_size
644                except TransportNotPossible:
645                    try:
646                        f = self.pack_transport.get(pack_name)
647                    except NoSuchFile:
648                        warning('Unable to read pack file %s',
649                                self.pack_transport.abspath(pack_name))
650                        continue
651                    # TODO(jelmer): Don't read entire file into memory?
652                    f = BytesIO(f.read())
653                    pd = PackData(pack_name, f)
654                else:
655                    pd = PackData(
656                        pack_name, self.pack_transport.get(pack_name),
657                        size=size)
658                idxname = basename + ".idx"
659                idx = load_pack_index_file(
660                    idxname, self.pack_transport.get(idxname))
661                pack = Pack.from_objects(pd, idx)
662                pack._basename = basename
663                self._pack_cache[basename] = pack
664                new_packs.append(pack)
665        # Remove disappeared pack files
666        for f in set(self._pack_cache) - pack_files:
667            self._pack_cache.pop(f).close()
668        return new_packs
669
670    def _pack_names(self):
671        pack_files = []
672        try:
673            dir_contents = self.pack_transport.list_dir(".")
674            for name in dir_contents:
675                if name.startswith("pack-") and name.endswith(".pack"):
676                    # verify that idx exists first (otherwise the pack was not yet
677                    # fully written)
678                    idx_name = os.path.splitext(name)[0] + ".idx"
679                    if idx_name in dir_contents:
680                        pack_files.append(os.path.splitext(name)[0])
681        except TransportNotPossible:
682            try:
683                f = self.transport.get('info/packs')
684            except NoSuchFile:
685                warning('No info/packs on remote host;'
686                        'run \'git update-server-info\' on remote.')
687            else:
688                with f:
689                    pack_files = [
690                        os.path.splitext(name)[0]
691                        for name in read_packs_file(f)]
692        except NoSuchFile:
693            pass
694        return pack_files
695
696    def _remove_pack(self, pack):
697        self.pack_transport.delete(os.path.basename(pack.index.path))
698        self.pack_transport.delete(pack.data.filename)
699        try:
700            del self._pack_cache[os.path.basename(pack._basename)]
701        except KeyError:
702            pass
703
704    def _iter_loose_objects(self):
705        for base in self.transport.list_dir('.'):
706            if len(base) != 2:
707                continue
708            for rest in self.transport.list_dir(base):
709                yield (base + rest).encode(sys.getfilesystemencoding())
710
711    def _split_loose_object(self, sha):
712        return (sha[:2], sha[2:])
713
714    def _remove_loose_object(self, sha):
715        path = osutils.joinpath(self._split_loose_object(sha))
716        self.transport.delete(urlutils.quote_from_bytes(path))
717
718    def _get_loose_object(self, sha):
719        path = osutils.joinpath(self._split_loose_object(sha))
720        try:
721            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
722                return ShaFile.from_file(f)
723        except NoSuchFile:
724            return None
725
726    def add_object(self, obj):
727        """Add a single object to this object store.
728
729        :param obj: Object to add
730        """
731        (dir, file) = self._split_loose_object(obj.id)
732        try:
733            self.transport.mkdir(urlutils.quote_from_bytes(dir))
734        except FileExists:
735            pass
736        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
737        if self.transport.has(path):
738            return  # Already there, no need to write again
739        # Backwards compatibility with Dulwich < 0.20, which doesn't support
740        # the compression_level parameter.
741        if self.loose_compression_level not in (-1, None):
742            raw_string = obj.as_legacy_object(
743                compression_level=self.loose_compression_level)
744        else:
745            raw_string = obj.as_legacy_object()
746        self.transport.put_bytes(path, raw_string)
747
748    def move_in_pack(self, f):
749        """Move a specific file containing a pack into the pack directory.
750
751        :note: The file should be on the same file system as the
752            packs directory.
753
754        :param path: Path to the pack file.
755        """
756        f.seek(0)
757        p = PackData("", f, len(f.getvalue()))
758        entries = p.sorted_entries()
759        basename = "pack-%s" % iter_sha1(entry[0]
760                                         for entry in entries).decode('ascii')
761        p._filename = basename + ".pack"
762        f.seek(0)
763        self.pack_transport.put_file(basename + ".pack", f)
764        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
765            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
766        idxfile = self.pack_transport.get(basename + ".idx")
767        idx = load_pack_index_file(basename + ".idx", idxfile)
768        final_pack = Pack.from_objects(p, idx)
769        final_pack._basename = basename
770        self._add_cached_pack(basename, final_pack)
771        return final_pack
772
773    def move_in_thin_pack(self, f):
774        """Move a specific file containing a pack into the pack directory.
775
776        :note: The file should be on the same file system as the
777            packs directory.
778
779        :param path: Path to the pack file.
780        """
781        f.seek(0)
782        p = Pack('', resolve_ext_ref=self.get_raw)
783        p._data = PackData.from_file(f, len(f.getvalue()))
784        p._data.pack = p
785        p._idx_load = lambda: MemoryPackIndex(
786            p.data.sorted_entries(), p.data.get_stored_checksum())
787
788        pack_sha = p.index.objects_sha1()
789
790        with self.pack_transport.open_write_stream(
791                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
792            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
793        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
794        with self.pack_transport.open_write_stream(
795                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
796            write_pack_index_v2(idxfile, entries, data_sum)
797
798    def add_pack(self):
799        """Add a new pack to this object store.
800
801        :return: Fileobject to write to and a commit function to
802            call when the pack is finished.
803        """
804        f = BytesIO()
805
806        def commit():
807            if len(f.getvalue()) > 0:
808                return self.move_in_pack(f)
809            else:
810                return None
811
812        def abort():
813            return None
814        return f, commit, abort
815
816    @classmethod
817    def init(cls, transport):
818        try:
819            transport.mkdir('info')
820        except FileExists:
821            pass
822        try:
823            transport.mkdir(PACKDIR)
824        except FileExists:
825            pass
826        return cls(transport)
827
828    def _get_pack_basepath(self, entries):
829        suffix = iter_sha1(entry[0] for entry in entries)
830        # TODO: Handle self.pack_dir being bytes
831        suffix = suffix.decode('ascii')
832        return self.pack_transport.local_abspath("pack-" + suffix)
833
834    def _complete_thin_pack(self, f, path, copier, indexer):
835        """Move a specific file containing a pack into the pack directory.
836
837        Note: The file should be on the same file system as the
838            packs directory.
839
840        Args:
841          f: Open file object for the pack.
842          path: Path to the pack file.
843          copier: A PackStreamCopier to use for writing pack data.
844          indexer: A PackIndexer for indexing the pack.
845        """
846        entries = list(indexer)
847
848        # Update the header with the new number of objects.
849        f.seek(0)
850        write_pack_header(f, len(entries) + len(indexer.ext_refs()))
851
852        # Must flush before reading (http://bugs.python.org/issue3207)
853        f.flush()
854
855        # Rescan the rest of the pack, computing the SHA with the new header.
856        new_sha = compute_file_sha(f, end_ofs=-20)
857
858        # Must reposition before writing (http://bugs.python.org/issue3207)
859        f.seek(0, os.SEEK_CUR)
860
861        # Complete the pack.
862        for ext_sha in indexer.ext_refs():
863            type_num, data = self.get_raw(ext_sha)
864            offset = f.tell()
865            crc32 = write_pack_object(
866                f, type_num, data, sha=new_sha,
867                compression_level=self.pack_compression_level)
868            entries.append((ext_sha, offset, crc32))
869        pack_sha = new_sha.digest()
870        f.write(pack_sha)
871        f.close()
872
873        # Move the pack in.
874        entries.sort()
875        pack_base_name = self._get_pack_basepath(entries)
876        target_pack = pack_base_name + '.pack'
877        if sys.platform == 'win32':
878            # Windows might have the target pack file lingering. Attempt
879            # removal, silently passing if the target does not exist.
880            try:
881                os.remove(target_pack)
882            except FileNotFoundError:
883                pass
884        os.rename(path, target_pack)
885
886        # Write the index.
887        index_file = GitFile(pack_base_name + '.idx', 'wb')
888        try:
889            write_pack_index_v2(index_file, entries, pack_sha)
890            index_file.close()
891        finally:
892            index_file.abort()
893
894        # Add the pack to the store and return it.
895        final_pack = Pack(pack_base_name)
896        final_pack.check_length_and_checksum()
897        self._add_cached_pack(pack_base_name, final_pack)
898        return final_pack
899
900    def add_thin_pack(self, read_all, read_some):
901        """Add a new thin pack to this object store.
902
903        Thin packs are packs that contain deltas with parents that exist
904        outside the pack. They should never be placed in the object store
905        directly, and always indexed and completed as they are copied.
906
907        Args:
908          read_all: Read function that blocks until the number of
909            requested bytes are read.
910          read_some: Read function that returns at least one byte, but may
911            not return the number of bytes requested.
912        Returns: A Pack object pointing at the now-completed thin pack in the
913            objects/pack directory.
914        """
915        import tempfile
916        fd, path = tempfile.mkstemp(dir=self.pack_transport.local_abspath('.'), prefix='tmp_pack_')
917        with os.fdopen(fd, 'w+b') as f:
918            indexer = PackIndexer(f, resolve_ext_ref=self.get_raw)
919            copier = PackStreamCopier(read_all, read_some, f,
920                                      delta_iter=indexer)
921            copier.verify()
922            return self._complete_thin_pack(f, path, copier, indexer)
923