1# porcelain.py -- Porcelain-like layer on top of Dulwich 2# Copyright (C) 2013 Jelmer Vernooij <jelmer@jelmer.uk> 3# 4# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU 5# General Public License as public by the Free Software Foundation; version 2.0 6# or (at your option) any later version. You can redistribute it and/or 7# modify it under the terms of either of these two licenses. 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# 15# You should have received a copy of the licenses; if not, see 16# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License 17# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache 18# License, Version 2.0. 19# 20 21"""Simple wrapper that provides porcelain-like functions on top of Dulwich. 22 23Currently implemented: 24 * archive 25 * add 26 * branch{_create,_delete,_list} 27 * check-ignore 28 * checkout 29 * clone 30 * commit 31 * commit-tree 32 * daemon 33 * describe 34 * diff-tree 35 * fetch 36 * init 37 * ls-files 38 * ls-remote 39 * ls-tree 40 * pull 41 * push 42 * rm 43 * remote{_add} 44 * receive-pack 45 * reset 46 * rev-list 47 * tag{_create,_delete,_list} 48 * upload-pack 49 * update-server-info 50 * status 51 * symbolic-ref 52 53These functions are meant to behave similarly to the git subcommands. 54Differences in behaviour are considered bugs. 55 56Functions should generally accept both unicode strings and bytestrings 57""" 58 59from collections import namedtuple 60from contextlib import ( 61 closing, 62 contextmanager, 63) 64from io import BytesIO, RawIOBase 65import datetime 66import os 67import posixpath 68import shutil 69import stat 70import sys 71import time 72 73from dulwich.archive import ( 74 tar_stream, 75 ) 76from dulwich.client import ( 77 get_transport_and_path, 78 ) 79from dulwich.config import ( 80 StackedConfig, 81 ) 82from dulwich.diff_tree import ( 83 CHANGE_ADD, 84 CHANGE_DELETE, 85 CHANGE_MODIFY, 86 CHANGE_RENAME, 87 CHANGE_COPY, 88 RENAME_CHANGE_TYPES, 89 ) 90from dulwich.errors import ( 91 SendPackError, 92 UpdateRefsError, 93 ) 94from dulwich.ignore import IgnoreFilterManager 95from dulwich.index import ( 96 blob_from_path_and_stat, 97 get_unstaged_changes, 98 ) 99from dulwich.object_store import ( 100 tree_lookup_path, 101 ) 102from dulwich.objects import ( 103 Commit, 104 Tag, 105 format_timezone, 106 parse_timezone, 107 pretty_format_tree_entry, 108 ) 109from dulwich.objectspec import ( 110 parse_commit, 111 parse_object, 112 parse_ref, 113 parse_reftuples, 114 parse_tree, 115 ) 116from dulwich.pack import ( 117 write_pack_index, 118 write_pack_objects, 119 ) 120from dulwich.patch import write_tree_diff 121from dulwich.protocol import ( 122 Protocol, 123 ZERO_SHA, 124 ) 125from dulwich.refs import ( 126 ANNOTATED_TAG_SUFFIX, 127 LOCAL_BRANCH_PREFIX, 128 strip_peeled_refs, 129) 130from dulwich.repo import (BaseRepo, Repo) 131from dulwich.server import ( 132 FileSystemBackend, 133 TCPGitServer, 134 ReceivePackHandler, 135 UploadPackHandler, 136 update_server_info as server_update_server_info, 137 ) 138 139 140# Module level tuple definition for status output 141GitStatus = namedtuple('GitStatus', 'staged unstaged untracked') 142 143 144class NoneStream(RawIOBase): 145 """Fallback if stdout or stderr are unavailable, does nothing.""" 146 def read(self, size=-1): 147 return None 148 149 def readall(self): 150 return None 151 152 def readinto(self, b): 153 return None 154 155 def write(self, b): 156 return None 157 158 159if sys.version_info[0] == 2: 160 default_bytes_out_stream = sys.stdout or NoneStream() 161 default_bytes_err_stream = sys.stderr or NoneStream() 162else: 163 default_bytes_out_stream = ( 164 getattr(sys.stdout, 'buffer', None) or NoneStream()) 165 default_bytes_err_stream = ( 166 getattr(sys.stderr, 'buffer', None) or NoneStream()) 167 168 169DEFAULT_ENCODING = 'utf-8' 170 171 172class RemoteExists(Exception): 173 """Raised when the remote already exists.""" 174 175 176def open_repo(path_or_repo): 177 """Open an argument that can be a repository or a path for a repository.""" 178 if isinstance(path_or_repo, BaseRepo): 179 return path_or_repo 180 return Repo(path_or_repo) 181 182 183@contextmanager 184def _noop_context_manager(obj): 185 """Context manager that has the same api as closing but does nothing.""" 186 yield obj 187 188 189def open_repo_closing(path_or_repo): 190 """Open an argument that can be a repository or a path for a repository. 191 returns a context manager that will close the repo on exit if the argument 192 is a path, else does nothing if the argument is a repo. 193 """ 194 if isinstance(path_or_repo, BaseRepo): 195 return _noop_context_manager(path_or_repo) 196 return closing(Repo(path_or_repo)) 197 198 199def path_to_tree_path(repopath, path): 200 """Convert a path to a path usable in an index, e.g. bytes and relative to 201 the repository root. 202 203 Args: 204 repopath: Repository path, absolute or relative to the cwd 205 path: A path, absolute or relative to the cwd 206 Returns: A path formatted for use in e.g. an index 207 """ 208 if not isinstance(path, bytes): 209 path = path.encode(sys.getfilesystemencoding()) 210 if not isinstance(repopath, bytes): 211 repopath = repopath.encode(sys.getfilesystemencoding()) 212 treepath = os.path.relpath(path, repopath) 213 if treepath.startswith(b'..'): 214 raise ValueError('Path not in repo') 215 if os.path.sep != '/': 216 treepath = treepath.replace(os.path.sep.encode('ascii'), b'/') 217 return treepath 218 219 220def archive(repo, committish=None, outstream=default_bytes_out_stream, 221 errstream=default_bytes_err_stream): 222 """Create an archive. 223 224 Args: 225 repo: Path of repository for which to generate an archive. 226 committish: Commit SHA1 or ref to use 227 outstream: Output stream (defaults to stdout) 228 errstream: Error stream (defaults to stderr) 229 """ 230 231 if committish is None: 232 committish = "HEAD" 233 with open_repo_closing(repo) as repo_obj: 234 c = parse_commit(repo_obj, committish) 235 for chunk in tar_stream( 236 repo_obj.object_store, repo_obj.object_store[c.tree], 237 c.commit_time): 238 outstream.write(chunk) 239 240 241def update_server_info(repo="."): 242 """Update server info files for a repository. 243 244 Args: 245 repo: path to the repository 246 """ 247 with open_repo_closing(repo) as r: 248 server_update_server_info(r) 249 250 251def symbolic_ref(repo, ref_name, force=False): 252 """Set git symbolic ref into HEAD. 253 254 Args: 255 repo: path to the repository 256 ref_name: short name of the new ref 257 force: force settings without checking if it exists in refs/heads 258 """ 259 with open_repo_closing(repo) as repo_obj: 260 ref_path = _make_branch_ref(ref_name) 261 if not force and ref_path not in repo_obj.refs.keys(): 262 raise ValueError('fatal: ref `%s` is not a ref' % ref_name) 263 repo_obj.refs.set_symbolic_ref(b'HEAD', ref_path) 264 265 266def commit(repo=".", message=None, author=None, committer=None, encoding=None): 267 """Create a new commit. 268 269 Args: 270 repo: Path to repository 271 message: Optional commit message 272 author: Optional author name and email 273 committer: Optional committer name and email 274 Returns: SHA1 of the new commit 275 """ 276 # FIXME: Support --all argument 277 # FIXME: Support --signoff argument 278 if getattr(message, 'encode', None): 279 message = message.encode(encoding or DEFAULT_ENCODING) 280 if getattr(author, 'encode', None): 281 author = author.encode(encoding or DEFAULT_ENCODING) 282 if getattr(committer, 'encode', None): 283 committer = committer.encode(encoding or DEFAULT_ENCODING) 284 with open_repo_closing(repo) as r: 285 return r.do_commit( 286 message=message, author=author, committer=committer, 287 encoding=encoding) 288 289 290def commit_tree(repo, tree, message=None, author=None, committer=None): 291 """Create a new commit object. 292 293 Args: 294 repo: Path to repository 295 tree: An existing tree object 296 author: Optional author name and email 297 committer: Optional committer name and email 298 """ 299 with open_repo_closing(repo) as r: 300 return r.do_commit( 301 message=message, tree=tree, committer=committer, author=author) 302 303 304def init(path=".", bare=False): 305 """Create a new git repository. 306 307 Args: 308 path: Path to repository. 309 bare: Whether to create a bare repository. 310 Returns: A Repo instance 311 """ 312 if not os.path.exists(path): 313 os.mkdir(path) 314 315 if bare: 316 return Repo.init_bare(path) 317 else: 318 return Repo.init(path) 319 320 321def clone(source, target=None, bare=False, checkout=None, 322 errstream=default_bytes_err_stream, outstream=None, 323 origin=b"origin", depth=None, **kwargs): 324 """Clone a local or remote git repository. 325 326 Args: 327 source: Path or URL for source repository 328 target: Path to target repository (optional) 329 bare: Whether or not to create a bare repository 330 checkout: Whether or not to check-out HEAD after cloning 331 errstream: Optional stream to write progress to 332 outstream: Optional stream to write progress to (deprecated) 333 origin: Name of remote from the repository used to clone 334 depth: Depth to fetch at 335 Returns: The new repository 336 """ 337 # TODO(jelmer): This code overlaps quite a bit with Repo.clone 338 if outstream is not None: 339 import warnings 340 warnings.warn( 341 "outstream= has been deprecated in favour of errstream=.", 342 DeprecationWarning, stacklevel=3) 343 errstream = outstream 344 345 if checkout is None: 346 checkout = (not bare) 347 if checkout and bare: 348 raise ValueError("checkout and bare are incompatible") 349 350 if target is None: 351 target = source.split("/")[-1] 352 353 if not os.path.exists(target): 354 os.mkdir(target) 355 356 if bare: 357 r = Repo.init_bare(target) 358 else: 359 r = Repo.init(target) 360 361 reflog_message = b'clone: from ' + source.encode('utf-8') 362 try: 363 fetch_result = fetch( 364 r, source, origin, errstream=errstream, message=reflog_message, 365 depth=depth, **kwargs) 366 target_config = r.get_config() 367 if not isinstance(source, bytes): 368 source = source.encode(DEFAULT_ENCODING) 369 target_config.set((b'remote', origin), b'url', source) 370 target_config.set( 371 (b'remote', origin), b'fetch', 372 b'+refs/heads/*:refs/remotes/' + origin + b'/*') 373 target_config.write_to_path() 374 # TODO(jelmer): Support symref capability, 375 # https://github.com/jelmer/dulwich/issues/485 376 try: 377 head = r[fetch_result[b'HEAD']] 378 except KeyError: 379 head = None 380 else: 381 r[b'HEAD'] = head.id 382 if checkout and not bare and head is not None: 383 errstream.write(b'Checking out ' + head.id + b'\n') 384 r.reset_index(head.tree) 385 except BaseException: 386 shutil.rmtree(target) 387 r.close() 388 raise 389 390 return r 391 392 393def add(repo=".", paths=None): 394 """Add files to the staging area. 395 396 Args: 397 repo: Repository for the files 398 paths: Paths to add. No value passed stages all modified files. 399 Returns: Tuple with set of added files and ignored files 400 """ 401 ignored = set() 402 with open_repo_closing(repo) as r: 403 ignore_manager = IgnoreFilterManager.from_repo(r) 404 if not paths: 405 paths = list( 406 get_untracked_paths(os.getcwd(), r.path, r.open_index())) 407 relpaths = [] 408 if not isinstance(paths, list): 409 paths = [paths] 410 for p in paths: 411 relpath = os.path.relpath(p, r.path) 412 if relpath.startswith('..' + os.path.sep): 413 raise ValueError('path %r is not in repo' % relpath) 414 # FIXME: Support patterns, directories. 415 if ignore_manager.is_ignored(relpath): 416 ignored.add(relpath) 417 continue 418 relpaths.append(relpath) 419 r.stage(relpaths) 420 return (relpaths, ignored) 421 422 423def _is_subdir(subdir, parentdir): 424 """Check whether subdir is parentdir or a subdir of parentdir 425 426 If parentdir or subdir is a relative path, it will be disamgibuated 427 relative to the pwd. 428 """ 429 parentdir_abs = os.path.realpath(parentdir) + os.path.sep 430 subdir_abs = os.path.realpath(subdir) + os.path.sep 431 return subdir_abs.startswith(parentdir_abs) 432 433 434# TODO: option to remove ignored files also, in line with `git clean -fdx` 435def clean(repo=".", target_dir=None): 436 """Remove any untracked files from the target directory recursively 437 438 Equivalent to running `git clean -fd` in target_dir. 439 440 Args: 441 repo: Repository where the files may be tracked 442 target_dir: Directory to clean - current directory if None 443 """ 444 if target_dir is None: 445 target_dir = os.getcwd() 446 447 with open_repo_closing(repo) as r: 448 if not _is_subdir(target_dir, r.path): 449 raise ValueError("target_dir must be in the repo's working dir") 450 451 index = r.open_index() 452 ignore_manager = IgnoreFilterManager.from_repo(r) 453 454 paths_in_wd = _walk_working_dir_paths(target_dir, r.path) 455 # Reverse file visit order, so that files and subdirectories are 456 # removed before containing directory 457 for ap, is_dir in reversed(list(paths_in_wd)): 458 if is_dir: 459 # All subdirectories and files have been removed if untracked, 460 # so dir contains no tracked files iff it is empty. 461 is_empty = len(os.listdir(ap)) == 0 462 if is_empty: 463 os.rmdir(ap) 464 else: 465 ip = path_to_tree_path(r.path, ap) 466 is_tracked = ip in index 467 468 rp = os.path.relpath(ap, r.path) 469 is_ignored = ignore_manager.is_ignored(rp) 470 471 if not is_tracked and not is_ignored: 472 os.remove(ap) 473 474 475def remove(repo=".", paths=None, cached=False): 476 """Remove files from the staging area. 477 478 Args: 479 repo: Repository for the files 480 paths: Paths to remove 481 """ 482 with open_repo_closing(repo) as r: 483 index = r.open_index() 484 for p in paths: 485 full_path = os.path.abspath(p).encode(sys.getfilesystemencoding()) 486 tree_path = path_to_tree_path(r.path, p) 487 try: 488 index_sha = index[tree_path].sha 489 except KeyError: 490 raise Exception('%s did not match any files' % p) 491 492 if not cached: 493 try: 494 st = os.lstat(full_path) 495 except OSError: 496 pass 497 else: 498 try: 499 blob = blob_from_path_and_stat(full_path, st) 500 except IOError: 501 pass 502 else: 503 try: 504 committed_sha = tree_lookup_path( 505 r.__getitem__, r[r.head()].tree, tree_path)[1] 506 except KeyError: 507 committed_sha = None 508 509 if blob.id != index_sha and index_sha != committed_sha: 510 raise Exception( 511 'file has staged content differing ' 512 'from both the file and head: %s' % p) 513 514 if index_sha != committed_sha: 515 raise Exception( 516 'file has staged changes: %s' % p) 517 os.remove(full_path) 518 del index[tree_path] 519 index.write() 520 521 522rm = remove 523 524 525def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING): 526 if commit.encoding: 527 encoding = commit.encoding.decode('ascii') 528 else: 529 encoding = default_encoding 530 return contents.decode(encoding, "replace") 531 532 533def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING): 534 if commit.encoding: 535 encoding = commit.encoding.decode('ascii') 536 else: 537 encoding = default_encoding 538 return contents.encode(encoding) 539 540 541def print_commit(commit, decode, outstream=sys.stdout): 542 """Write a human-readable commit log entry. 543 544 Args: 545 commit: A `Commit` object 546 outstream: A stream file to write to 547 """ 548 outstream.write("-" * 50 + "\n") 549 outstream.write("commit: " + commit.id.decode('ascii') + "\n") 550 if len(commit.parents) > 1: 551 outstream.write( 552 "merge: " + 553 "...".join([c.decode('ascii') for c in commit.parents[1:]]) + "\n") 554 outstream.write("Author: " + decode(commit.author) + "\n") 555 if commit.author != commit.committer: 556 outstream.write("Committer: " + decode(commit.committer) + "\n") 557 558 time_tuple = time.gmtime(commit.author_time + commit.author_timezone) 559 time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple) 560 timezone_str = format_timezone(commit.author_timezone).decode('ascii') 561 outstream.write("Date: " + time_str + " " + timezone_str + "\n") 562 outstream.write("\n") 563 outstream.write(decode(commit.message) + "\n") 564 outstream.write("\n") 565 566 567def print_tag(tag, decode, outstream=sys.stdout): 568 """Write a human-readable tag. 569 570 Args: 571 tag: A `Tag` object 572 decode: Function for decoding bytes to unicode string 573 outstream: A stream to write to 574 """ 575 outstream.write("Tagger: " + decode(tag.tagger) + "\n") 576 time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone) 577 time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple) 578 timezone_str = format_timezone(tag.tag_timezone).decode('ascii') 579 outstream.write("Date: " + time_str + " " + timezone_str + "\n") 580 outstream.write("\n") 581 outstream.write(decode(tag.message) + "\n") 582 outstream.write("\n") 583 584 585def show_blob(repo, blob, decode, outstream=sys.stdout): 586 """Write a blob to a stream. 587 588 Args: 589 repo: A `Repo` object 590 blob: A `Blob` object 591 decode: Function for decoding bytes to unicode string 592 outstream: A stream file to write to 593 """ 594 outstream.write(decode(blob.data)) 595 596 597def show_commit(repo, commit, decode, outstream=sys.stdout): 598 """Show a commit to a stream. 599 600 Args: 601 repo: A `Repo` object 602 commit: A `Commit` object 603 decode: Function for decoding bytes to unicode string 604 outstream: Stream to write to 605 """ 606 print_commit(commit, decode=decode, outstream=outstream) 607 if commit.parents: 608 parent_commit = repo[commit.parents[0]] 609 base_tree = parent_commit.tree 610 else: 611 base_tree = None 612 diffstream = BytesIO() 613 write_tree_diff( 614 diffstream, 615 repo.object_store, base_tree, commit.tree) 616 diffstream.seek(0) 617 outstream.write(commit_decode(commit, diffstream.getvalue())) 618 619 620def show_tree(repo, tree, decode, outstream=sys.stdout): 621 """Print a tree to a stream. 622 623 Args: 624 repo: A `Repo` object 625 tree: A `Tree` object 626 decode: Function for decoding bytes to unicode string 627 outstream: Stream to write to 628 """ 629 for n in tree: 630 outstream.write(decode(n) + "\n") 631 632 633def show_tag(repo, tag, decode, outstream=sys.stdout): 634 """Print a tag to a stream. 635 636 Args: 637 repo: A `Repo` object 638 tag: A `Tag` object 639 decode: Function for decoding bytes to unicode string 640 outstream: Stream to write to 641 """ 642 print_tag(tag, decode, outstream) 643 show_object(repo, repo[tag.object[1]], decode, outstream) 644 645 646def show_object(repo, obj, decode, outstream): 647 return { 648 b"tree": show_tree, 649 b"blob": show_blob, 650 b"commit": show_commit, 651 b"tag": show_tag, 652 }[obj.type_name](repo, obj, decode, outstream) 653 654 655def print_name_status(changes): 656 """Print a simple status summary, listing changed files. 657 """ 658 for change in changes: 659 if not change: 660 continue 661 if isinstance(change, list): 662 change = change[0] 663 if change.type == CHANGE_ADD: 664 path1 = change.new.path 665 path2 = '' 666 kind = 'A' 667 elif change.type == CHANGE_DELETE: 668 path1 = change.old.path 669 path2 = '' 670 kind = 'D' 671 elif change.type == CHANGE_MODIFY: 672 path1 = change.new.path 673 path2 = '' 674 kind = 'M' 675 elif change.type in RENAME_CHANGE_TYPES: 676 path1 = change.old.path 677 path2 = change.new.path 678 if change.type == CHANGE_RENAME: 679 kind = 'R' 680 elif change.type == CHANGE_COPY: 681 kind = 'C' 682 yield '%-8s%-20s%-20s' % (kind, path1, path2) 683 684 685def log(repo=".", paths=None, outstream=sys.stdout, max_entries=None, 686 reverse=False, name_status=False): 687 """Write commit logs. 688 689 Args: 690 repo: Path to repository 691 paths: Optional set of specific paths to print entries for 692 outstream: Stream to write log output to 693 reverse: Reverse order in which entries are printed 694 name_status: Print name status 695 max_entries: Optional maximum number of entries to display 696 """ 697 with open_repo_closing(repo) as r: 698 walker = r.get_walker( 699 max_entries=max_entries, paths=paths, reverse=reverse) 700 for entry in walker: 701 def decode(x): 702 return commit_decode(entry.commit, x) 703 print_commit(entry.commit, decode, outstream) 704 if name_status: 705 outstream.writelines( 706 [l+'\n' for l in print_name_status(entry.changes())]) 707 708 709# TODO(jelmer): better default for encoding? 710def show(repo=".", objects=None, outstream=sys.stdout, 711 default_encoding=DEFAULT_ENCODING): 712 """Print the changes in a commit. 713 714 Args: 715 repo: Path to repository 716 objects: Objects to show (defaults to [HEAD]) 717 outstream: Stream to write to 718 default_encoding: Default encoding to use if none is set in the 719 commit 720 """ 721 if objects is None: 722 objects = ["HEAD"] 723 if not isinstance(objects, list): 724 objects = [objects] 725 with open_repo_closing(repo) as r: 726 for objectish in objects: 727 o = parse_object(r, objectish) 728 if isinstance(o, Commit): 729 def decode(x): 730 return commit_decode(o, x, default_encoding) 731 else: 732 def decode(x): 733 return x.decode(default_encoding) 734 show_object(r, o, decode, outstream) 735 736 737def diff_tree(repo, old_tree, new_tree, outstream=sys.stdout): 738 """Compares the content and mode of blobs found via two tree objects. 739 740 Args: 741 repo: Path to repository 742 old_tree: Id of old tree 743 new_tree: Id of new tree 744 outstream: Stream to write to 745 """ 746 with open_repo_closing(repo) as r: 747 write_tree_diff(outstream, r.object_store, old_tree, new_tree) 748 749 750def rev_list(repo, commits, outstream=sys.stdout): 751 """Lists commit objects in reverse chronological order. 752 753 Args: 754 repo: Path to repository 755 commits: Commits over which to iterate 756 outstream: Stream to write to 757 """ 758 with open_repo_closing(repo) as r: 759 for entry in r.get_walker(include=[r[c].id for c in commits]): 760 outstream.write(entry.commit.id + b"\n") 761 762 763def tag(*args, **kwargs): 764 import warnings 765 warnings.warn("tag has been deprecated in favour of tag_create.", 766 DeprecationWarning) 767 return tag_create(*args, **kwargs) 768 769 770def tag_create( 771 repo, tag, author=None, message=None, annotated=False, 772 objectish="HEAD", tag_time=None, tag_timezone=None, 773 sign=False): 774 """Creates a tag in git via dulwich calls: 775 776 Args: 777 repo: Path to repository 778 tag: tag string 779 author: tag author (optional, if annotated is set) 780 message: tag message (optional) 781 annotated: whether to create an annotated tag 782 objectish: object the tag should point at, defaults to HEAD 783 tag_time: Optional time for annotated tag 784 tag_timezone: Optional timezone for annotated tag 785 sign: GPG Sign the tag 786 """ 787 788 with open_repo_closing(repo) as r: 789 object = parse_object(r, objectish) 790 791 if annotated: 792 # Create the tag object 793 tag_obj = Tag() 794 if author is None: 795 # TODO(jelmer): Don't use repo private method. 796 author = r._get_user_identity(r.get_config_stack()) 797 tag_obj.tagger = author 798 tag_obj.message = message 799 tag_obj.name = tag 800 tag_obj.object = (type(object), object.id) 801 if tag_time is None: 802 tag_time = int(time.time()) 803 tag_obj.tag_time = tag_time 804 if tag_timezone is None: 805 # TODO(jelmer) Use current user timezone rather than UTC 806 tag_timezone = 0 807 elif isinstance(tag_timezone, str): 808 tag_timezone = parse_timezone(tag_timezone) 809 tag_obj.tag_timezone = tag_timezone 810 if sign: 811 import gpg 812 with gpg.Context(armor=True) as c: 813 tag_obj.signature, unused_result = c.sign( 814 tag_obj.as_raw_string()) 815 r.object_store.add_object(tag_obj) 816 tag_id = tag_obj.id 817 else: 818 tag_id = object.id 819 820 r.refs[_make_tag_ref(tag)] = tag_id 821 822 823def list_tags(*args, **kwargs): 824 import warnings 825 warnings.warn("list_tags has been deprecated in favour of tag_list.", 826 DeprecationWarning) 827 return tag_list(*args, **kwargs) 828 829 830def tag_list(repo, outstream=sys.stdout): 831 """List all tags. 832 833 Args: 834 repo: Path to repository 835 outstream: Stream to write tags to 836 """ 837 with open_repo_closing(repo) as r: 838 tags = sorted(r.refs.as_dict(b"refs/tags")) 839 return tags 840 841 842def tag_delete(repo, name): 843 """Remove a tag. 844 845 Args: 846 repo: Path to repository 847 name: Name of tag to remove 848 """ 849 with open_repo_closing(repo) as r: 850 if isinstance(name, bytes): 851 names = [name] 852 elif isinstance(name, list): 853 names = name 854 else: 855 raise TypeError("Unexpected tag name type %r" % name) 856 for name in names: 857 del r.refs[_make_tag_ref(name)] 858 859 860def reset(repo, mode, treeish="HEAD"): 861 """Reset current HEAD to the specified state. 862 863 Args: 864 repo: Path to repository 865 mode: Mode ("hard", "soft", "mixed") 866 treeish: Treeish to reset to 867 """ 868 869 if mode != "hard": 870 raise ValueError("hard is the only mode currently supported") 871 872 with open_repo_closing(repo) as r: 873 tree = parse_tree(r, treeish) 874 r.reset_index(tree.id) 875 876 877def push(repo, remote_location, refspecs, 878 outstream=default_bytes_out_stream, 879 errstream=default_bytes_err_stream, **kwargs): 880 """Remote push with dulwich via dulwich.client 881 882 Args: 883 repo: Path to repository 884 remote_location: Location of the remote 885 refspecs: Refs to push to remote 886 outstream: A stream file to write output 887 errstream: A stream file to write errors 888 """ 889 890 # Open the repo 891 with open_repo_closing(repo) as r: 892 893 # Get the client and path 894 client, path = get_transport_and_path( 895 remote_location, config=r.get_config_stack(), **kwargs) 896 897 selected_refs = [] 898 899 def update_refs(refs): 900 selected_refs.extend(parse_reftuples(r.refs, refs, refspecs)) 901 new_refs = {} 902 # TODO: Handle selected_refs == {None: None} 903 for (lh, rh, force) in selected_refs: 904 if lh is None: 905 new_refs[rh] = ZERO_SHA 906 else: 907 new_refs[rh] = r.refs[lh] 908 return new_refs 909 910 err_encoding = getattr(errstream, 'encoding', None) or DEFAULT_ENCODING 911 remote_location_bytes = client.get_url(path).encode(err_encoding) 912 try: 913 client.send_pack( 914 path, update_refs, 915 generate_pack_data=r.object_store.generate_pack_data, 916 progress=errstream.write) 917 errstream.write( 918 b"Push to " + remote_location_bytes + b" successful.\n") 919 except UpdateRefsError as e: 920 errstream.write(b"Push to " + remote_location_bytes + 921 b" failed -> " + e.message.encode(err_encoding) + 922 b"\n") 923 except SendPackError as e: 924 errstream.write(b"Push to " + remote_location_bytes + 925 b" failed -> " + e.args[0] + b"\n") 926 927 928def pull(repo, remote_location=None, refspecs=None, 929 outstream=default_bytes_out_stream, 930 errstream=default_bytes_err_stream, **kwargs): 931 """Pull from remote via dulwich.client 932 933 Args: 934 repo: Path to repository 935 remote_location: Location of the remote 936 refspec: refspecs to fetch 937 outstream: A stream file to write to output 938 errstream: A stream file to write to errors 939 """ 940 # Open the repo 941 with open_repo_closing(repo) as r: 942 if remote_location is None: 943 config = r.get_config() 944 remote_name = get_branch_remote(r.path) 945 section = (b'remote', remote_name) 946 947 if config.has_section(section): 948 url = config.get(section, 'url') 949 remote_location = url.decode() 950 951 if refspecs is None: 952 refspecs = [b"HEAD"] 953 selected_refs = [] 954 955 def determine_wants(remote_refs): 956 selected_refs.extend( 957 parse_reftuples(remote_refs, r.refs, refspecs)) 958 return [remote_refs[lh] for (lh, rh, force) in selected_refs] 959 client, path = get_transport_and_path( 960 remote_location, config=r.get_config_stack(), **kwargs) 961 fetch_result = client.fetch( 962 path, r, progress=errstream.write, determine_wants=determine_wants) 963 for (lh, rh, force) in selected_refs: 964 r.refs[rh] = fetch_result.refs[lh] 965 if selected_refs: 966 r[b'HEAD'] = fetch_result.refs[selected_refs[0][1]] 967 968 # Perform 'git checkout .' - syncs staged changes 969 tree = r[b"HEAD"].tree 970 r.reset_index(tree=tree) 971 972 973def status(repo=".", ignored=False): 974 """Returns staged, unstaged, and untracked changes relative to the HEAD. 975 976 Args: 977 repo: Path to repository or repository object 978 ignored: Whether to include ignored files in `untracked` 979 Returns: GitStatus tuple, 980 staged - dict with lists of staged paths (diff index/HEAD) 981 unstaged - list of unstaged paths (diff index/working-tree) 982 untracked - list of untracked, un-ignored & non-.git paths 983 """ 984 with open_repo_closing(repo) as r: 985 # 1. Get status of staged 986 tracked_changes = get_tree_changes(r) 987 # 2. Get status of unstaged 988 index = r.open_index() 989 normalizer = r.get_blob_normalizer() 990 filter_callback = normalizer.checkin_normalize 991 unstaged_changes = list( 992 get_unstaged_changes(index, r.path, filter_callback) 993 ) 994 ignore_manager = IgnoreFilterManager.from_repo(r) 995 untracked_paths = get_untracked_paths(r.path, r.path, index) 996 if ignored: 997 untracked_changes = list(untracked_paths) 998 else: 999 untracked_changes = [ 1000 p for p in untracked_paths 1001 if not ignore_manager.is_ignored(p)] 1002 return GitStatus(tracked_changes, unstaged_changes, untracked_changes) 1003 1004 1005def _walk_working_dir_paths(frompath, basepath): 1006 """Get path, is_dir for files in working dir from frompath 1007 1008 Args: 1009 frompath: Path to begin walk 1010 basepath: Path to compare to 1011 """ 1012 for dirpath, dirnames, filenames in os.walk(frompath): 1013 # Skip .git and below. 1014 if '.git' in dirnames: 1015 dirnames.remove('.git') 1016 if dirpath != basepath: 1017 continue 1018 if '.git' in filenames: 1019 filenames.remove('.git') 1020 if dirpath != basepath: 1021 continue 1022 1023 if dirpath != frompath: 1024 yield dirpath, True 1025 1026 for filename in filenames: 1027 filepath = os.path.join(dirpath, filename) 1028 yield filepath, False 1029 1030 1031def get_untracked_paths(frompath, basepath, index): 1032 """Get untracked paths. 1033 1034 Args: 1035 ;param frompath: Path to walk 1036 basepath: Path to compare to 1037 index: Index to check against 1038 """ 1039 for ap, is_dir in _walk_working_dir_paths(frompath, basepath): 1040 if not is_dir: 1041 ip = path_to_tree_path(basepath, ap) 1042 if ip not in index: 1043 yield os.path.relpath(ap, frompath) 1044 1045 1046def get_tree_changes(repo): 1047 """Return add/delete/modify changes to tree by comparing index to HEAD. 1048 1049 Args: 1050 repo: repo path or object 1051 Returns: dict with lists for each type of change 1052 """ 1053 with open_repo_closing(repo) as r: 1054 index = r.open_index() 1055 1056 # Compares the Index to the HEAD & determines changes 1057 # Iterate through the changes and report add/delete/modify 1058 # TODO: call out to dulwich.diff_tree somehow. 1059 tracked_changes = { 1060 'add': [], 1061 'delete': [], 1062 'modify': [], 1063 } 1064 try: 1065 tree_id = r[b'HEAD'].tree 1066 except KeyError: 1067 tree_id = None 1068 1069 for change in index.changes_from_tree(r.object_store, tree_id): 1070 if not change[0][0]: 1071 tracked_changes['add'].append(change[0][1]) 1072 elif not change[0][1]: 1073 tracked_changes['delete'].append(change[0][0]) 1074 elif change[0][0] == change[0][1]: 1075 tracked_changes['modify'].append(change[0][0]) 1076 else: 1077 raise AssertionError('git mv ops not yet supported') 1078 return tracked_changes 1079 1080 1081def daemon(path=".", address=None, port=None): 1082 """Run a daemon serving Git requests over TCP/IP. 1083 1084 Args: 1085 path: Path to the directory to serve. 1086 address: Optional address to listen on (defaults to ::) 1087 port: Optional port to listen on (defaults to TCP_GIT_PORT) 1088 """ 1089 # TODO(jelmer): Support git-daemon-export-ok and --export-all. 1090 backend = FileSystemBackend(path) 1091 server = TCPGitServer(backend, address, port) 1092 server.serve_forever() 1093 1094 1095def web_daemon(path=".", address=None, port=None): 1096 """Run a daemon serving Git requests over HTTP. 1097 1098 Args: 1099 path: Path to the directory to serve 1100 address: Optional address to listen on (defaults to ::) 1101 port: Optional port to listen on (defaults to 80) 1102 """ 1103 from dulwich.web import ( 1104 make_wsgi_chain, 1105 make_server, 1106 WSGIRequestHandlerLogger, 1107 WSGIServerLogger) 1108 1109 backend = FileSystemBackend(path) 1110 app = make_wsgi_chain(backend) 1111 server = make_server(address, port, app, 1112 handler_class=WSGIRequestHandlerLogger, 1113 server_class=WSGIServerLogger) 1114 server.serve_forever() 1115 1116 1117def upload_pack(path=".", inf=None, outf=None): 1118 """Upload a pack file after negotiating its contents using smart protocol. 1119 1120 Args: 1121 path: Path to the repository 1122 inf: Input stream to communicate with client 1123 outf: Output stream to communicate with client 1124 """ 1125 if outf is None: 1126 outf = getattr(sys.stdout, 'buffer', sys.stdout) 1127 if inf is None: 1128 inf = getattr(sys.stdin, 'buffer', sys.stdin) 1129 path = os.path.expanduser(path) 1130 backend = FileSystemBackend(path) 1131 1132 def send_fn(data): 1133 outf.write(data) 1134 outf.flush() 1135 proto = Protocol(inf.read, send_fn) 1136 handler = UploadPackHandler(backend, [path], proto) 1137 # FIXME: Catch exceptions and write a single-line summary to outf. 1138 handler.handle() 1139 return 0 1140 1141 1142def receive_pack(path=".", inf=None, outf=None): 1143 """Receive a pack file after negotiating its contents using smart protocol. 1144 1145 Args: 1146 path: Path to the repository 1147 inf: Input stream to communicate with client 1148 outf: Output stream to communicate with client 1149 """ 1150 if outf is None: 1151 outf = getattr(sys.stdout, 'buffer', sys.stdout) 1152 if inf is None: 1153 inf = getattr(sys.stdin, 'buffer', sys.stdin) 1154 path = os.path.expanduser(path) 1155 backend = FileSystemBackend(path) 1156 1157 def send_fn(data): 1158 outf.write(data) 1159 outf.flush() 1160 proto = Protocol(inf.read, send_fn) 1161 handler = ReceivePackHandler(backend, [path], proto) 1162 # FIXME: Catch exceptions and write a single-line summary to outf. 1163 handler.handle() 1164 return 0 1165 1166 1167def _make_branch_ref(name): 1168 if getattr(name, 'encode', None): 1169 name = name.encode(DEFAULT_ENCODING) 1170 return LOCAL_BRANCH_PREFIX + name 1171 1172 1173def _make_tag_ref(name): 1174 if getattr(name, 'encode', None): 1175 name = name.encode(DEFAULT_ENCODING) 1176 return b"refs/tags/" + name 1177 1178 1179def branch_delete(repo, name): 1180 """Delete a branch. 1181 1182 Args: 1183 repo: Path to the repository 1184 name: Name of the branch 1185 """ 1186 with open_repo_closing(repo) as r: 1187 if isinstance(name, list): 1188 names = name 1189 else: 1190 names = [name] 1191 for name in names: 1192 del r.refs[_make_branch_ref(name)] 1193 1194 1195def branch_create(repo, name, objectish=None, force=False): 1196 """Create a branch. 1197 1198 Args: 1199 repo: Path to the repository 1200 name: Name of the new branch 1201 objectish: Target object to point new branch at (defaults to HEAD) 1202 force: Force creation of branch, even if it already exists 1203 """ 1204 with open_repo_closing(repo) as r: 1205 if objectish is None: 1206 objectish = "HEAD" 1207 object = parse_object(r, objectish) 1208 refname = _make_branch_ref(name) 1209 ref_message = b"branch: Created from " + objectish.encode('utf-8') 1210 if force: 1211 r.refs.set_if_equals(refname, None, object.id, message=ref_message) 1212 else: 1213 if not r.refs.add_if_new(refname, object.id, message=ref_message): 1214 raise KeyError("Branch with name %s already exists." % name) 1215 1216 1217def branch_list(repo): 1218 """List all branches. 1219 1220 Args: 1221 repo: Path to the repository 1222 """ 1223 with open_repo_closing(repo) as r: 1224 return r.refs.keys(base=LOCAL_BRANCH_PREFIX) 1225 1226 1227def active_branch(repo): 1228 """Return the active branch in the repository, if any. 1229 1230 Args: 1231 repo: Repository to open 1232 Returns: 1233 branch name 1234 Raises: 1235 KeyError: if the repository does not have a working tree 1236 IndexError: if HEAD is floating 1237 """ 1238 with open_repo_closing(repo) as r: 1239 active_ref = r.refs.follow(b'HEAD')[0][1] 1240 if not active_ref.startswith(LOCAL_BRANCH_PREFIX): 1241 raise ValueError(active_ref) 1242 return active_ref[len(LOCAL_BRANCH_PREFIX):] 1243 1244 1245def get_branch_remote(repo): 1246 """Return the active branch's remote name, if any. 1247 1248 Args: 1249 repo: Repository to open 1250 Returns: 1251 remote name 1252 Raises: 1253 KeyError: if the repository does not have a working tree 1254 """ 1255 with open_repo_closing(repo) as r: 1256 branch_name = active_branch(r.path) 1257 config = r.get_config() 1258 try: 1259 remote_name = config.get((b'branch', branch_name), 'remote') 1260 except KeyError: 1261 remote_name = b'origin' 1262 return remote_name 1263 1264 1265def fetch(repo, remote_location, remote_name=b'origin', outstream=sys.stdout, 1266 errstream=default_bytes_err_stream, message=None, depth=None, 1267 prune=False, prune_tags=False, **kwargs): 1268 """Fetch objects from a remote server. 1269 1270 Args: 1271 repo: Path to the repository 1272 remote_location: String identifying a remote server 1273 remote_name: Name for remote server 1274 outstream: Output stream (defaults to stdout) 1275 errstream: Error stream (defaults to stderr) 1276 message: Reflog message (defaults to b"fetch: from <remote_name>") 1277 depth: Depth to fetch at 1278 prune: Prune remote removed refs 1279 prune_tags: Prune reomte removed tags 1280 Returns: 1281 Dictionary with refs on the remote 1282 """ 1283 if message is None: 1284 message = b'fetch: from ' + remote_location.encode("utf-8") 1285 with open_repo_closing(repo) as r: 1286 client, path = get_transport_and_path( 1287 remote_location, config=r.get_config_stack(), **kwargs) 1288 fetch_result = client.fetch(path, r, progress=errstream.write, 1289 depth=depth) 1290 stripped_refs = strip_peeled_refs(fetch_result.refs) 1291 branches = { 1292 n[len(LOCAL_BRANCH_PREFIX):]: v for (n, v) in stripped_refs.items() 1293 if n.startswith(LOCAL_BRANCH_PREFIX)} 1294 r.refs.import_refs( 1295 b'refs/remotes/' + remote_name, branches, message=message, 1296 prune=prune) 1297 tags = { 1298 n[len(b'refs/tags/'):]: v for (n, v) in stripped_refs.items() 1299 if n.startswith(b'refs/tags/') and 1300 not n.endswith(ANNOTATED_TAG_SUFFIX)} 1301 r.refs.import_refs( 1302 b'refs/tags', tags, message=message, 1303 prune=prune_tags) 1304 return fetch_result.refs 1305 1306 1307def ls_remote(remote, config=None, **kwargs): 1308 """List the refs in a remote. 1309 1310 Args: 1311 remote: Remote repository location 1312 config: Configuration to use 1313 Returns: 1314 Dictionary with remote refs 1315 """ 1316 if config is None: 1317 config = StackedConfig.default() 1318 client, host_path = get_transport_and_path(remote, config=config, **kwargs) 1319 return client.get_refs(host_path) 1320 1321 1322def repack(repo): 1323 """Repack loose files in a repository. 1324 1325 Currently this only packs loose objects. 1326 1327 Args: 1328 repo: Path to the repository 1329 """ 1330 with open_repo_closing(repo) as r: 1331 r.object_store.pack_loose_objects() 1332 1333 1334def pack_objects(repo, object_ids, packf, idxf, delta_window_size=None): 1335 """Pack objects into a file. 1336 1337 Args: 1338 repo: Path to the repository 1339 object_ids: List of object ids to write 1340 packf: File-like object to write to 1341 idxf: File-like object to write to (can be None) 1342 """ 1343 with open_repo_closing(repo) as r: 1344 entries, data_sum = write_pack_objects( 1345 packf, 1346 r.object_store.iter_shas((oid, None) for oid in object_ids), 1347 delta_window_size=delta_window_size) 1348 if idxf is not None: 1349 entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()]) 1350 write_pack_index(idxf, entries, data_sum) 1351 1352 1353def ls_tree(repo, treeish=b"HEAD", outstream=sys.stdout, recursive=False, 1354 name_only=False): 1355 """List contents of a tree. 1356 1357 Args: 1358 repo: Path to the repository 1359 tree_ish: Tree id to list 1360 outstream: Output stream (defaults to stdout) 1361 recursive: Whether to recursively list files 1362 name_only: Only print item name 1363 """ 1364 def list_tree(store, treeid, base): 1365 for (name, mode, sha) in store[treeid].iteritems(): 1366 if base: 1367 name = posixpath.join(base, name) 1368 if name_only: 1369 outstream.write(name + b"\n") 1370 else: 1371 outstream.write(pretty_format_tree_entry(name, mode, sha)) 1372 if stat.S_ISDIR(mode) and recursive: 1373 list_tree(store, sha, name) 1374 with open_repo_closing(repo) as r: 1375 tree = parse_tree(r, treeish) 1376 list_tree(r.object_store, tree.id, "") 1377 1378 1379def remote_add(repo, name, url): 1380 """Add a remote. 1381 1382 Args: 1383 repo: Path to the repository 1384 name: Remote name 1385 url: Remote URL 1386 """ 1387 if not isinstance(name, bytes): 1388 name = name.encode(DEFAULT_ENCODING) 1389 if not isinstance(url, bytes): 1390 url = url.encode(DEFAULT_ENCODING) 1391 with open_repo_closing(repo) as r: 1392 c = r.get_config() 1393 section = (b'remote', name) 1394 if c.has_section(section): 1395 raise RemoteExists(section) 1396 c.set(section, b"url", url) 1397 c.write_to_path() 1398 1399 1400def check_ignore(repo, paths, no_index=False): 1401 """Debug gitignore files. 1402 1403 Args: 1404 repo: Path to the repository 1405 paths: List of paths to check for 1406 no_index: Don't check index 1407 Returns: List of ignored files 1408 """ 1409 with open_repo_closing(repo) as r: 1410 index = r.open_index() 1411 ignore_manager = IgnoreFilterManager.from_repo(r) 1412 for path in paths: 1413 if not no_index and path_to_tree_path(r.path, path) in index: 1414 continue 1415 if os.path.isabs(path): 1416 path = os.path.relpath(path, r.path) 1417 if ignore_manager.is_ignored(path): 1418 yield path 1419 1420 1421def update_head(repo, target, detached=False, new_branch=None): 1422 """Update HEAD to point at a new branch/commit. 1423 1424 Note that this does not actually update the working tree. 1425 1426 Args: 1427 repo: Path to the repository 1428 detach: Create a detached head 1429 target: Branch or committish to switch to 1430 new_branch: New branch to create 1431 """ 1432 with open_repo_closing(repo) as r: 1433 if new_branch is not None: 1434 to_set = _make_branch_ref(new_branch) 1435 else: 1436 to_set = b"HEAD" 1437 if detached: 1438 # TODO(jelmer): Provide some way so that the actual ref gets 1439 # updated rather than what it points to, so the delete isn't 1440 # necessary. 1441 del r.refs[to_set] 1442 r.refs[to_set] = parse_commit(r, target).id 1443 else: 1444 r.refs.set_symbolic_ref(to_set, parse_ref(r, target)) 1445 if new_branch is not None: 1446 r.refs.set_symbolic_ref(b"HEAD", to_set) 1447 1448 1449def check_mailmap(repo, contact): 1450 """Check canonical name and email of contact. 1451 1452 Args: 1453 repo: Path to the repository 1454 contact: Contact name and/or email 1455 Returns: Canonical contact data 1456 """ 1457 with open_repo_closing(repo) as r: 1458 from dulwich.mailmap import Mailmap 1459 import errno 1460 try: 1461 mailmap = Mailmap.from_path(os.path.join(r.path, '.mailmap')) 1462 except IOError as e: 1463 if e.errno != errno.ENOENT: 1464 raise 1465 mailmap = Mailmap() 1466 return mailmap.lookup(contact) 1467 1468 1469def fsck(repo): 1470 """Check a repository. 1471 1472 Args: 1473 repo: A path to the repository 1474 Returns: Iterator over errors/warnings 1475 """ 1476 with open_repo_closing(repo) as r: 1477 # TODO(jelmer): check pack files 1478 # TODO(jelmer): check graph 1479 # TODO(jelmer): check refs 1480 for sha in r.object_store: 1481 o = r.object_store[sha] 1482 try: 1483 o.check() 1484 except Exception as e: 1485 yield (sha, e) 1486 1487 1488def stash_list(repo): 1489 """List all stashes in a repository.""" 1490 with open_repo_closing(repo) as r: 1491 from dulwich.stash import Stash 1492 stash = Stash.from_repo(r) 1493 return enumerate(list(stash.stashes())) 1494 1495 1496def stash_push(repo): 1497 """Push a new stash onto the stack.""" 1498 with open_repo_closing(repo) as r: 1499 from dulwich.stash import Stash 1500 stash = Stash.from_repo(r) 1501 stash.push() 1502 1503 1504def stash_pop(repo): 1505 """Pop a new stash from the stack.""" 1506 with open_repo_closing(repo) as r: 1507 from dulwich.stash import Stash 1508 stash = Stash.from_repo(r) 1509 stash.pop() 1510 1511 1512def ls_files(repo): 1513 """List all files in an index.""" 1514 with open_repo_closing(repo) as r: 1515 return sorted(r.open_index()) 1516 1517 1518def describe(repo): 1519 """Describe the repository version. 1520 1521 Args: 1522 projdir: git repository root 1523 Returns: a string description of the current git revision 1524 1525 Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh". 1526 """ 1527 # Get the repository 1528 with open_repo_closing(repo) as r: 1529 # Get a list of all tags 1530 refs = r.get_refs() 1531 tags = {} 1532 for key, value in refs.items(): 1533 key = key.decode() 1534 obj = r.get_object(value) 1535 if u'tags' not in key: 1536 continue 1537 1538 _, tag = key.rsplit(u'/', 1) 1539 1540 try: 1541 commit = obj.object 1542 except AttributeError: 1543 continue 1544 else: 1545 commit = r.get_object(commit[1]) 1546 tags[tag] = [ 1547 datetime.datetime(*time.gmtime(commit.commit_time)[:6]), 1548 commit.id.decode('ascii'), 1549 ] 1550 1551 sorted_tags = sorted(tags.items(), 1552 key=lambda tag: tag[1][0], 1553 reverse=True) 1554 1555 # If there are no tags, return the current commit 1556 if len(sorted_tags) == 0: 1557 return 'g{}'.format(r[r.head()].id.decode('ascii')[:7]) 1558 1559 # We're now 0 commits from the top 1560 commit_count = 0 1561 1562 # Get the latest commit 1563 latest_commit = r[r.head()] 1564 1565 # Walk through all commits 1566 walker = r.get_walker() 1567 for entry in walker: 1568 # Check if tag 1569 commit_id = entry.commit.id.decode('ascii') 1570 for tag in sorted_tags: 1571 tag_name = tag[0] 1572 tag_commit = tag[1][1] 1573 if commit_id == tag_commit: 1574 if commit_count == 0: 1575 return tag_name 1576 else: 1577 return '{}-{}-g{}'.format( 1578 tag_name, 1579 commit_count, 1580 latest_commit.id.decode('ascii')[:7]) 1581 1582 commit_count += 1 1583 1584 # Return plain commit if no parent tag can be found 1585 return 'g{}'.format(latest_commit.id.decode('ascii')[:7]) 1586 1587 1588def get_object_by_path(repo, path, committish=None): 1589 """Get an object by path. 1590 1591 Args: 1592 repo: A path to the repository 1593 path: Path to look up 1594 committish: Commit to look up path in 1595 Returns: A `ShaFile` object 1596 """ 1597 if committish is None: 1598 committish = "HEAD" 1599 # Get the repository 1600 with open_repo_closing(repo) as r: 1601 commit = parse_commit(r, committish) 1602 base_tree = commit.tree 1603 if not isinstance(path, bytes): 1604 path = commit_encode(commit, path) 1605 (mode, sha) = tree_lookup_path( 1606 r.object_store.__getitem__, 1607 base_tree, path) 1608 return r[sha] 1609 1610 1611def write_tree(repo): 1612 """Write a tree object from the index. 1613 1614 Args: 1615 repo: Repository for which to write tree 1616 Returns: tree id for the tree that was written 1617 """ 1618 with open_repo_closing(repo) as r: 1619 return r.open_index().commit(r.object_store) 1620