1# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk> 2# 3# This program is free software; you can redistribute it and/or modify 4# it under the terms of the GNU General Public License as published by 5# the Free Software Foundation; either version 2 of the License, or 6# (at your option) any later version. 7# 8# This program is distributed in the hope that it will be useful, 9# but WITHOUT ANY WARRANTY; without even the implied warranty of 10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11# GNU General Public License for more details. 12# 13# You should have received a copy of the GNU General Public License 14# along with this program; if not, write to the Free Software 15# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 16 17 18"""An adapter between a Git index and a Bazaar Working Tree""" 19 20import itertools 21from collections import defaultdict 22import errno 23from dulwich.ignore import ( 24 IgnoreFilterManager, 25 ) 26from dulwich.config import ConfigFile as GitConfigFile 27from dulwich.file import GitFile, FileLocked 28from dulwich.index import ( 29 Index, 30 SHA1Writer, 31 build_index_from_tree, 32 index_entry_from_path, 33 index_entry_from_stat, 34 FLAG_STAGEMASK, 35 read_submodule_head, 36 validate_path, 37 write_index_dict, 38 ) 39from dulwich.object_store import ( 40 tree_lookup_path, 41 ) 42from dulwich.objects import ( 43 S_ISGITLINK, 44 ) 45import os 46import posixpath 47import re 48import stat 49import sys 50 51from .. import ( 52 branch as _mod_branch, 53 conflicts as _mod_conflicts, 54 errors, 55 controldir as _mod_controldir, 56 globbing, 57 lock, 58 osutils, 59 revision as _mod_revision, 60 trace, 61 transport as _mod_transport, 62 tree, 63 urlutils, 64 workingtree, 65 ) 66from ..decorators import ( 67 only_raises, 68 ) 69from ..mutabletree import ( 70 BadReferenceTarget, 71 MutableTree, 72 ) 73 74 75from .dir import ( 76 LocalGitDir, 77 ) 78from .tree import ( 79 MutableGitIndexTree, 80 ) 81from .mapping import ( 82 encode_git_path, 83 decode_git_path, 84 mode_kind, 85 ) 86 87 88CONFLICT_SUFFIXES = ['.BASE', '.OTHER', '.THIS'] 89 90 91# TODO: There should be a base revid attribute to better inform the user about 92# how the conflicts were generated. 93class TextConflict(_mod_conflicts.Conflict): 94 """The merge algorithm could not resolve all differences encountered.""" 95 96 has_files = True 97 98 typestring = 'text conflict' 99 100 _conflict_re = re.compile(b'^(<{7}|={7}|>{7})') 101 102 def associated_filenames(self): 103 return [self.path + suffix for suffix in CONFLICT_SUFFIXES] 104 105 def _resolve(self, tt, winner_suffix): 106 """Resolve the conflict by copying one of .THIS or .OTHER into file. 107 108 :param tt: The TreeTransform where the conflict is resolved. 109 :param winner_suffix: Either 'THIS' or 'OTHER' 110 111 The resolution is symmetric, when taking THIS, item.THIS is renamed 112 into item and vice-versa. This takes one of the files as a whole 113 ignoring every difference that could have been merged cleanly. 114 """ 115 # To avoid useless copies, we switch item and item.winner_suffix, only 116 # item will exist after the conflict has been resolved anyway. 117 item_tid = tt.trans_id_tree_path(self.path) 118 item_parent_tid = tt.get_tree_parent(item_tid) 119 winner_path = self.path + '.' + winner_suffix 120 winner_tid = tt.trans_id_tree_path(winner_path) 121 winner_parent_tid = tt.get_tree_parent(winner_tid) 122 # Switch the paths to preserve the content 123 tt.adjust_path(osutils.basename(self.path), 124 winner_parent_tid, winner_tid) 125 tt.adjust_path(osutils.basename(winner_path), 126 item_parent_tid, item_tid) 127 tt.unversion_file(item_tid) 128 tt.version_file(winner_tid) 129 tt.apply() 130 131 def action_auto(self, tree): 132 # GZ 2012-07-27: Using NotImplementedError to signal that a conflict 133 # can't be auto resolved does not seem ideal. 134 try: 135 kind = tree.kind(self.path) 136 except errors.NoSuchFile: 137 return 138 if kind != 'file': 139 raise NotImplementedError("Conflict is not a file") 140 conflict_markers_in_line = self._conflict_re.search 141 with tree.get_file(self.path) as f: 142 for line in f: 143 if conflict_markers_in_line(line): 144 raise NotImplementedError("Conflict markers present") 145 146 def _resolve_with_cleanups(self, tree, *args, **kwargs): 147 with tree.transform() as tt: 148 self._resolve(tt, *args, **kwargs) 149 150 def action_take_this(self, tree): 151 self._resolve_with_cleanups(tree, 'THIS') 152 153 def action_take_other(self, tree): 154 self._resolve_with_cleanups(tree, 'OTHER') 155 156 def do(self, action, tree): 157 """Apply the specified action to the conflict. 158 159 :param action: The method name to call. 160 161 :param tree: The tree passed as a parameter to the method. 162 """ 163 meth = getattr(self, 'action_%s' % action, None) 164 if meth is None: 165 raise NotImplementedError(self.__class__.__name__ + '.' + action) 166 meth(tree) 167 168 def action_done(self, tree): 169 """Mark the conflict as solved once it has been handled.""" 170 # This method does nothing but simplifies the design of upper levels. 171 pass 172 173 def describe(self): 174 return 'Text conflict in %(path)s' % self.__dict__ 175 176 def __str__(self): 177 return self.describe() 178 179 def __repr__(self): 180 return "%s(%r)" % (type(self).__name__, self.path) 181 182 183class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree): 184 """A Git working tree.""" 185 186 def __init__(self, controldir, repo, branch): 187 MutableGitIndexTree.__init__(self) 188 basedir = controldir.root_transport.local_abspath('.') 189 self.basedir = osutils.realpath(basedir) 190 self.controldir = controldir 191 self.repository = repo 192 self.store = self.repository._git.object_store 193 self.mapping = self.repository.get_mapping() 194 self._branch = branch 195 self._transport = self.repository._git._controltransport 196 self._format = GitWorkingTreeFormat() 197 self.index = None 198 self._index_file = None 199 self.views = self._make_views() 200 self._rules_searcher = None 201 self._detect_case_handling() 202 self._reset_data() 203 204 def supports_tree_reference(self): 205 return True 206 207 def supports_rename_tracking(self): 208 return False 209 210 def _read_index(self): 211 self.index = Index(self.control_transport.local_abspath('index')) 212 self._index_dirty = False 213 214 def _get_submodule_index(self, relpath): 215 if not isinstance(relpath, bytes): 216 raise TypeError(relpath) 217 try: 218 info = self._submodule_info()[relpath] 219 except KeyError: 220 index_path = os.path.join(self.basedir, decode_git_path(relpath), '.git', 'index') 221 else: 222 index_path = self.control_transport.local_abspath( 223 posixpath.join('modules', decode_git_path(info[1]), 'index')) 224 return Index(index_path) 225 226 def lock_read(self): 227 """Lock the repository for read operations. 228 229 :return: A breezy.lock.LogicalLockResult. 230 """ 231 if not self._lock_mode: 232 self._lock_mode = 'r' 233 self._lock_count = 1 234 self._read_index() 235 else: 236 self._lock_count += 1 237 self.branch.lock_read() 238 return lock.LogicalLockResult(self.unlock) 239 240 def _lock_write_tree(self): 241 if not self._lock_mode: 242 self._lock_mode = 'w' 243 self._lock_count = 1 244 try: 245 self._index_file = GitFile( 246 self.control_transport.local_abspath('index'), 'wb') 247 except FileLocked: 248 raise errors.LockContention('index') 249 self._read_index() 250 elif self._lock_mode == 'r': 251 raise errors.ReadOnlyError(self) 252 else: 253 self._lock_count += 1 254 255 def lock_tree_write(self): 256 self.branch.lock_read() 257 try: 258 self._lock_write_tree() 259 return lock.LogicalLockResult(self.unlock) 260 except BaseException: 261 self.branch.unlock() 262 raise 263 264 def lock_write(self, token=None): 265 self.branch.lock_write() 266 try: 267 self._lock_write_tree() 268 return lock.LogicalLockResult(self.unlock) 269 except BaseException: 270 self.branch.unlock() 271 raise 272 273 def is_locked(self): 274 return self._lock_count >= 1 275 276 def get_physical_lock_status(self): 277 return False 278 279 def break_lock(self): 280 try: 281 self.control_transport.delete('index.lock') 282 except errors.NoSuchFile: 283 pass 284 self.branch.break_lock() 285 286 @only_raises(errors.LockNotHeld, errors.LockBroken) 287 def unlock(self): 288 if not self._lock_count: 289 return lock.cant_unlock_not_held(self) 290 try: 291 self._cleanup() 292 self._lock_count -= 1 293 if self._lock_count > 0: 294 return 295 if self._index_file is not None: 296 if self._index_dirty: 297 self._flush(self._index_file) 298 self._index_file.close() 299 else: 300 # Something else already triggered a write of the index 301 # file by calling .flush() 302 self._index_file.abort() 303 self._index_file = None 304 self._lock_mode = None 305 self.index = None 306 finally: 307 self.branch.unlock() 308 309 def _cleanup(self): 310 pass 311 312 def _detect_case_handling(self): 313 try: 314 self._transport.stat(".git/cOnFiG") 315 except errors.NoSuchFile: 316 self.case_sensitive = True 317 else: 318 self.case_sensitive = False 319 320 def merge_modified(self): 321 return {} 322 323 def set_merge_modified(self, modified_hashes): 324 raise errors.UnsupportedOperation(self.set_merge_modified, self) 325 326 def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False): 327 self.set_parent_ids([p for p, t in parents_list]) 328 329 def _set_merges_from_parent_ids(self, rhs_parent_ids): 330 try: 331 merges = [self.branch.lookup_bzr_revision_id( 332 revid)[0] for revid in rhs_parent_ids] 333 except errors.NoSuchRevision as e: 334 raise errors.GhostRevisionUnusableHere(e.revision) 335 if merges: 336 self.control_transport.put_bytes( 337 'MERGE_HEAD', b'\n'.join(merges), 338 mode=self.controldir._get_file_mode()) 339 else: 340 try: 341 self.control_transport.delete('MERGE_HEAD') 342 except errors.NoSuchFile: 343 pass 344 345 def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False): 346 """Set the parent ids to revision_ids. 347 348 See also set_parent_trees. This api will try to retrieve the tree data 349 for each element of revision_ids from the trees repository. If you have 350 tree data already available, it is more efficient to use 351 set_parent_trees rather than set_parent_ids. set_parent_ids is however 352 an easier API to use. 353 354 :param revision_ids: The revision_ids to set as the parent ids of this 355 working tree. Any of these may be ghosts. 356 """ 357 with self.lock_tree_write(): 358 self._check_parents_for_ghosts( 359 revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost) 360 for revision_id in revision_ids: 361 _mod_revision.check_not_reserved_id(revision_id) 362 363 revision_ids = self._filter_parent_ids_by_ancestry(revision_ids) 364 365 if len(revision_ids) > 0: 366 self.set_last_revision(revision_ids[0]) 367 else: 368 self.set_last_revision(_mod_revision.NULL_REVISION) 369 370 self._set_merges_from_parent_ids(revision_ids[1:]) 371 372 def get_parent_ids(self): 373 """See Tree.get_parent_ids. 374 375 This implementation reads the pending merges list and last_revision 376 value and uses that to decide what the parents list should be. 377 """ 378 last_rev = _mod_revision.ensure_null(self._last_revision()) 379 if _mod_revision.NULL_REVISION == last_rev: 380 parents = [] 381 else: 382 parents = [last_rev] 383 try: 384 merges_bytes = self.control_transport.get_bytes('MERGE_HEAD') 385 except errors.NoSuchFile: 386 pass 387 else: 388 for l in osutils.split_lines(merges_bytes): 389 revision_id = l.rstrip(b'\n') 390 parents.append( 391 self.branch.lookup_foreign_revision_id(revision_id)) 392 return parents 393 394 def check_state(self): 395 """Check that the working state is/isn't valid.""" 396 pass 397 398 def remove(self, files, verbose=False, to_file=None, keep_files=True, 399 force=False): 400 """Remove nominated files from the working tree metadata. 401 402 :param files: File paths relative to the basedir. 403 :param keep_files: If true, the files will also be kept. 404 :param force: Delete files and directories, even if they are changed 405 and even if the directories are not empty. 406 """ 407 if not isinstance(files, list): 408 files = [files] 409 410 if to_file is None: 411 to_file = sys.stdout 412 413 def backup(file_to_backup): 414 abs_path = self.abspath(file_to_backup) 415 backup_name = self.controldir._available_backup_name( 416 file_to_backup) 417 osutils.rename(abs_path, self.abspath(backup_name)) 418 return "removed %s (but kept a copy: %s)" % ( 419 file_to_backup, backup_name) 420 421 # Sort needed to first handle directory content before the directory 422 files_to_backup = [] 423 424 all_files = set() 425 426 def recurse_directory_to_add_files(directory): 427 # Recurse directory and add all files 428 # so we can check if they have changed. 429 for parent_path, file_infos in self.walkdirs(directory): 430 for relpath, basename, kind, lstat, kind in file_infos: 431 # Is it versioned or ignored? 432 if self.is_versioned(relpath): 433 # Add nested content for deletion. 434 all_files.add(relpath) 435 else: 436 # Files which are not versioned 437 # should be treated as unknown. 438 files_to_backup.append(relpath) 439 440 with self.lock_tree_write(): 441 for filepath in files: 442 # Get file name into canonical form. 443 abspath = self.abspath(filepath) 444 filepath = self.relpath(abspath) 445 446 if filepath: 447 all_files.add(filepath) 448 recurse_directory_to_add_files(filepath) 449 450 files = list(all_files) 451 452 if len(files) == 0: 453 return # nothing to do 454 455 # Sort needed to first handle directory content before the 456 # directory 457 files.sort(reverse=True) 458 459 # Bail out if we are going to delete files we shouldn't 460 if not keep_files and not force: 461 for change in self.iter_changes( 462 self.basis_tree(), include_unchanged=True, 463 require_versioned=False, want_unversioned=True, 464 specific_files=files): 465 if change.versioned[0] is False: 466 # The record is unknown or newly added 467 files_to_backup.append(change.path[1]) 468 files_to_backup.extend( 469 osutils.parent_directories(change.path[1])) 470 elif (change.changed_content and (change.kind[1] is not None) 471 and osutils.is_inside_any(files, change.path[1])): 472 # Versioned and changed, but not deleted, and still 473 # in one of the dirs to be deleted. 474 files_to_backup.append(change.path[1]) 475 files_to_backup.extend( 476 osutils.parent_directories(change.path[1])) 477 478 for f in files: 479 if f == '': 480 continue 481 482 try: 483 kind = self.kind(f) 484 except errors.NoSuchFile: 485 kind = None 486 487 abs_path = self.abspath(f) 488 if verbose: 489 # having removed it, it must be either ignored or unknown 490 if self.is_ignored(f): 491 new_status = 'I' 492 else: 493 new_status = '?' 494 kind_ch = osutils.kind_marker(kind) 495 to_file.write(new_status + ' ' + f + kind_ch + '\n') 496 if kind is None: 497 message = "%s does not exist" % (f, ) 498 else: 499 if not keep_files: 500 if f in files_to_backup and not force: 501 message = backup(f) 502 else: 503 if kind == 'directory': 504 osutils.rmtree(abs_path) 505 else: 506 osutils.delete_any(abs_path) 507 message = "deleted %s" % (f,) 508 else: 509 message = "removed %s" % (f,) 510 self._unversion_path(f) 511 512 # print only one message (if any) per file. 513 if message is not None: 514 trace.note(message) 515 self._versioned_dirs = None 516 517 def smart_add(self, file_list, recurse=True, action=None, save=True): 518 if not file_list: 519 file_list = [u'.'] 520 521 # expand any symlinks in the directory part, while leaving the 522 # filename alone 523 # only expanding if symlinks are supported avoids windows path bugs 524 if self.supports_symlinks(): 525 file_list = list(map(osutils.normalizepath, file_list)) 526 527 conflicts_related = set() 528 for c in self.conflicts(): 529 conflicts_related.update(c.associated_filenames()) 530 531 added = [] 532 ignored = {} 533 user_dirs = [] 534 535 def call_action(filepath, kind): 536 if filepath == '': 537 return 538 if action is not None: 539 parent_path = posixpath.dirname(filepath) 540 parent_id = self.path2id(parent_path) 541 parent_ie = self._get_dir_ie(parent_path, parent_id) 542 file_id = action(self, parent_ie, filepath, kind) 543 if file_id is not None: 544 raise workingtree.SettingFileIdUnsupported() 545 546 with self.lock_tree_write(): 547 for filepath in osutils.canonical_relpaths( 548 self.basedir, file_list): 549 filepath, can_access = osutils.normalized_filename(filepath) 550 if not can_access: 551 raise errors.InvalidNormalization(filepath) 552 553 abspath = self.abspath(filepath) 554 kind = osutils.file_kind(abspath) 555 if kind in ("file", "symlink"): 556 (index, subpath) = self._lookup_index( 557 encode_git_path(filepath)) 558 if subpath in index: 559 # Already present 560 continue 561 call_action(filepath, kind) 562 if save: 563 self._index_add_entry(filepath, kind) 564 added.append(filepath) 565 elif kind == "directory": 566 (index, subpath) = self._lookup_index( 567 encode_git_path(filepath)) 568 if subpath not in index: 569 call_action(filepath, kind) 570 if recurse: 571 user_dirs.append(filepath) 572 else: 573 raise errors.BadFileKindError(filename=abspath, kind=kind) 574 for user_dir in user_dirs: 575 abs_user_dir = self.abspath(user_dir) 576 if user_dir != '': 577 try: 578 transport = _mod_transport.get_transport_from_path( 579 abs_user_dir) 580 _mod_controldir.ControlDirFormat.find_format(transport) 581 subtree = True 582 except errors.NotBranchError: 583 subtree = False 584 except errors.UnsupportedFormatError: 585 subtree = False 586 else: 587 subtree = False 588 if subtree: 589 trace.warning('skipping nested tree %r', abs_user_dir) 590 continue 591 592 for name in os.listdir(abs_user_dir): 593 subp = os.path.join(user_dir, name) 594 if (self.is_control_filename(subp) or 595 self.mapping.is_special_file(subp)): 596 continue 597 ignore_glob = self.is_ignored(subp) 598 if ignore_glob is not None: 599 ignored.setdefault(ignore_glob, []).append(subp) 600 continue 601 abspath = self.abspath(subp) 602 kind = osutils.file_kind(abspath) 603 if kind == "directory": 604 user_dirs.append(subp) 605 else: 606 (index, subpath) = self._lookup_index( 607 encode_git_path(subp)) 608 if subpath in index: 609 # Already present 610 continue 611 if subp in conflicts_related: 612 continue 613 call_action(subp, kind) 614 if save: 615 self._index_add_entry(subp, kind) 616 added.append(subp) 617 return added, ignored 618 619 def has_filename(self, filename): 620 return osutils.lexists(self.abspath(filename)) 621 622 def _iter_files_recursive(self, from_dir=None, include_dirs=False, 623 recurse_nested=False): 624 if from_dir is None: 625 from_dir = u"" 626 if not isinstance(from_dir, str): 627 raise TypeError(from_dir) 628 encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc) 629 for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir): 630 dir_relpath = dirpath[len(self.basedir):].strip(b"/") 631 if self.controldir.is_control_filename( 632 dir_relpath.decode(osutils._fs_enc)): 633 continue 634 for name in list(dirnames): 635 if self.controldir.is_control_filename( 636 name.decode(osutils._fs_enc)): 637 dirnames.remove(name) 638 continue 639 relpath = os.path.join(dir_relpath, name) 640 if not recurse_nested and self._directory_is_tree_reference(relpath.decode(osutils._fs_enc)): 641 dirnames.remove(name) 642 if include_dirs: 643 try: 644 yield relpath.decode(osutils._fs_enc) 645 except UnicodeDecodeError: 646 raise errors.BadFilenameEncoding( 647 relpath, osutils._fs_enc) 648 if not self.is_versioned(relpath.decode(osutils._fs_enc)): 649 dirnames.remove(name) 650 for name in filenames: 651 if self.mapping.is_special_file(name): 652 continue 653 if self.controldir.is_control_filename( 654 name.decode(osutils._fs_enc, 'replace')): 655 continue 656 yp = os.path.join(dir_relpath, name) 657 try: 658 yield yp.decode(osutils._fs_enc) 659 except UnicodeDecodeError: 660 raise errors.BadFilenameEncoding( 661 yp, osutils._fs_enc) 662 663 def extras(self): 664 """Yield all unversioned files in this WorkingTree. 665 """ 666 with self.lock_read(): 667 index_paths = set( 668 [decode_git_path(p) for p, sha, mode in self.iter_git_objects()]) 669 all_paths = set(self._iter_files_recursive(include_dirs=False)) 670 return iter(all_paths - index_paths) 671 672 def _gather_kinds(self, files, kinds): 673 """See MutableTree._gather_kinds.""" 674 with self.lock_tree_write(): 675 for pos, f in enumerate(files): 676 if kinds[pos] is None: 677 fullpath = osutils.normpath(self.abspath(f)) 678 try: 679 kind = osutils.file_kind(fullpath) 680 except OSError as e: 681 if e.errno == errno.ENOENT: 682 raise errors.NoSuchFile(fullpath) 683 if f != '' and self._directory_is_tree_reference(f): 684 kind = 'tree-reference' 685 kinds[pos] = kind 686 687 def flush(self): 688 if self._lock_mode != 'w': 689 raise errors.NotWriteLocked(self) 690 # TODO(jelmer): This shouldn't be writing in-place, but index.lock is 691 # already in use and GitFile doesn't allow overriding the lock file 692 # name :( 693 f = open(self.control_transport.local_abspath('index'), 'wb') 694 # Note that _flush will close the file 695 self._flush(f) 696 697 def _flush(self, f): 698 try: 699 shaf = SHA1Writer(f) 700 write_index_dict(shaf, self.index) 701 shaf.close() 702 except BaseException: 703 f.abort() 704 raise 705 self._index_dirty = False 706 707 def get_file_mtime(self, path): 708 """See Tree.get_file_mtime.""" 709 try: 710 return self._lstat(path).st_mtime 711 except OSError as e: 712 if e.errno == errno.ENOENT: 713 raise errors.NoSuchFile(path) 714 raise 715 716 def is_ignored(self, filename): 717 r"""Check whether the filename matches an ignore pattern. 718 719 If the file is ignored, returns the pattern which caused it to 720 be ignored, otherwise None. So this can simply be used as a 721 boolean if desired.""" 722 if getattr(self, '_global_ignoreglobster', None) is None: 723 from breezy import ignores 724 ignore_globs = set() 725 ignore_globs.update(ignores.get_runtime_ignores()) 726 ignore_globs.update(ignores.get_user_ignores()) 727 self._global_ignoreglobster = globbing.ExceptionGlobster( 728 ignore_globs) 729 match = self._global_ignoreglobster.match(filename) 730 if match is not None: 731 return match 732 try: 733 if self.kind(filename) == 'directory': 734 filename += '/' 735 except errors.NoSuchFile: 736 pass 737 filename = filename.lstrip('/') 738 ignore_manager = self._get_ignore_manager() 739 ps = list(ignore_manager.find_matching(filename)) 740 if not ps: 741 return None 742 if not ps[-1].is_exclude: 743 return None 744 return bytes(ps[-1]) 745 746 def _get_ignore_manager(self): 747 ignoremanager = getattr(self, '_ignoremanager', None) 748 if ignoremanager is not None: 749 return ignoremanager 750 751 ignore_manager = IgnoreFilterManager.from_repo(self.repository._git) 752 self._ignoremanager = ignore_manager 753 return ignore_manager 754 755 def _flush_ignore_list_cache(self): 756 self._ignoremanager = None 757 758 def set_last_revision(self, revid): 759 if _mod_revision.is_null(revid): 760 self.branch.set_last_revision_info(0, revid) 761 return False 762 _mod_revision.check_not_reserved_id(revid) 763 try: 764 self.branch.generate_revision_history(revid) 765 except errors.NoSuchRevision: 766 raise errors.GhostRevisionUnusableHere(revid) 767 768 def _reset_data(self): 769 pass 770 771 def get_file_verifier(self, path, stat_value=None): 772 with self.lock_read(): 773 (index, subpath) = self._lookup_index(encode_git_path(path)) 774 try: 775 return ("GIT", index[subpath].sha) 776 except KeyError: 777 if self._has_dir(path): 778 return ("GIT", None) 779 raise errors.NoSuchFile(path) 780 781 def get_file_sha1(self, path, stat_value=None): 782 with self.lock_read(): 783 if not self.is_versioned(path): 784 raise errors.NoSuchFile(path) 785 abspath = self.abspath(path) 786 try: 787 return osutils.sha_file_by_name(abspath) 788 except OSError as e: 789 if e.errno in (errno.EISDIR, errno.ENOENT): 790 return None 791 raise 792 793 def revision_tree(self, revid): 794 return self.repository.revision_tree(revid) 795 796 def _is_executable_from_path_and_stat_from_stat(self, path, stat_result): 797 mode = stat_result.st_mode 798 return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode) 799 800 def _is_executable_from_path_and_stat_from_basis(self, path, stat_result): 801 return self.basis_tree().is_executable(path) 802 803 def stored_kind(self, path): 804 with self.lock_read(): 805 encoded_path = encode_git_path(path) 806 (index, subpath) = self._lookup_index(encoded_path) 807 try: 808 return mode_kind(index[subpath].mode) 809 except KeyError: 810 # Maybe it's a directory? 811 if self._has_dir(encoded_path): 812 return "directory" 813 raise errors.NoSuchFile(path) 814 815 def _lstat(self, path): 816 return os.lstat(self.abspath(path)) 817 818 def _live_entry(self, path): 819 encoded_path = self.abspath(decode_git_path(path)).encode( 820 osutils._fs_enc) 821 return index_entry_from_path(encoded_path) 822 823 def is_executable(self, path): 824 with self.lock_read(): 825 if self._supports_executable(): 826 mode = self._lstat(path).st_mode 827 else: 828 (index, subpath) = self._lookup_index(encode_git_path(path)) 829 try: 830 mode = index[subpath].mode 831 except KeyError: 832 mode = 0 833 return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode) 834 835 def _is_executable_from_path_and_stat(self, path, stat_result): 836 if self._supports_executable(): 837 return self._is_executable_from_path_and_stat_from_stat(path, stat_result) 838 else: 839 return self._is_executable_from_path_and_stat_from_basis( 840 path, stat_result) 841 842 def list_files(self, include_root=False, from_dir=None, recursive=True, 843 recurse_nested=False): 844 if from_dir is None or from_dir == '.': 845 from_dir = u"" 846 dir_ids = {} 847 fk_entries = {'directory': tree.TreeDirectory, 848 'file': tree.TreeFile, 849 'symlink': tree.TreeLink, 850 'tree-reference': tree.TreeReference} 851 with self.lock_read(): 852 root_ie = self._get_dir_ie(u"", None) 853 if include_root and not from_dir: 854 yield "", "V", root_ie.kind, root_ie 855 dir_ids[u""] = root_ie.file_id 856 if recursive: 857 path_iterator = sorted( 858 self._iter_files_recursive( 859 from_dir, include_dirs=True, 860 recurse_nested=recurse_nested)) 861 else: 862 encoded_from_dir = self.abspath(from_dir).encode( 863 osutils._fs_enc) 864 path_iterator = sorted( 865 [os.path.join(from_dir, name.decode(osutils._fs_enc)) 866 for name in os.listdir(encoded_from_dir) 867 if not self.controldir.is_control_filename( 868 name.decode(osutils._fs_enc)) and 869 not self.mapping.is_special_file( 870 name.decode(osutils._fs_enc))]) 871 for path in path_iterator: 872 try: 873 encoded_path = encode_git_path(path) 874 except UnicodeEncodeError: 875 raise errors.BadFilenameEncoding( 876 path, osutils._fs_enc) 877 (index, index_path) = self._lookup_index(encoded_path) 878 try: 879 value = index[index_path] 880 except KeyError: 881 value = None 882 kind = self.kind(path) 883 parent, name = posixpath.split(path) 884 for dir_path, dir_ie in self._add_missing_parent_ids( 885 parent, dir_ids): 886 pass 887 if kind == 'tree-reference' and recurse_nested: 888 ie = self._get_dir_ie(path, self.path2id(path)) 889 yield (posixpath.relpath(path, from_dir), 'V', 'directory', 890 ie) 891 continue 892 if kind == 'directory': 893 if path != from_dir: 894 if self._has_dir(encoded_path): 895 ie = self._get_dir_ie(path, self.path2id(path)) 896 status = "V" 897 elif self.is_ignored(path): 898 status = "I" 899 ie = fk_entries[kind]() 900 else: 901 status = "?" 902 ie = fk_entries[kind]() 903 yield (posixpath.relpath(path, from_dir), status, kind, 904 ie) 905 continue 906 if value is not None: 907 ie = self._get_file_ie(name, path, value, dir_ids[parent]) 908 yield (posixpath.relpath(path, from_dir), "V", ie.kind, ie) 909 else: 910 try: 911 ie = fk_entries[kind]() 912 except KeyError: 913 # unsupported kind 914 continue 915 yield (posixpath.relpath(path, from_dir), 916 ("I" if self.is_ignored(path) else "?"), kind, ie) 917 918 def all_file_ids(self): 919 raise errors.UnsupportedOperation(self.all_file_ids, self) 920 921 def all_versioned_paths(self): 922 with self.lock_read(): 923 paths = {u""} 924 for path in self.index: 925 if self.mapping.is_special_file(path): 926 continue 927 path = decode_git_path(path) 928 paths.add(path) 929 while path != "": 930 path = posixpath.dirname(path).strip("/") 931 if path in paths: 932 break 933 paths.add(path) 934 return paths 935 936 def iter_child_entries(self, path): 937 encoded_path = encode_git_path(path) 938 with self.lock_read(): 939 parent_id = self.path2id(path) 940 found_any = False 941 for item_path, value in self.index.iteritems(): 942 decoded_item_path = decode_git_path(item_path) 943 if self.mapping.is_special_file(item_path): 944 continue 945 if not osutils.is_inside(path, decoded_item_path): 946 continue 947 found_any = True 948 subpath = posixpath.relpath(decoded_item_path, path) 949 if '/' in subpath: 950 dirname = subpath.split('/', 1)[0] 951 file_ie = self._get_dir_ie( 952 posixpath.join(path, dirname), parent_id) 953 else: 954 (unused_parent, name) = posixpath.split(decoded_item_path) 955 file_ie = self._get_file_ie( 956 name, decoded_item_path, value, parent_id) 957 yield file_ie 958 if not found_any and path != u'': 959 raise errors.NoSuchFile(path) 960 961 def conflicts(self): 962 with self.lock_read(): 963 conflicts = _mod_conflicts.ConflictList() 964 for item_path, value in self.index.iteritems(): 965 if value.flags & FLAG_STAGEMASK: 966 conflicts.append(TextConflict(decode_git_path(item_path))) 967 return conflicts 968 969 def set_conflicts(self, conflicts): 970 by_path = set() 971 for conflict in conflicts: 972 if conflict.typestring in ('text conflict', 'contents conflict'): 973 by_path.add(encode_git_path(conflict.path)) 974 else: 975 raise errors.UnsupportedOperation(self.set_conflicts, self) 976 with self.lock_tree_write(): 977 for path in self.index: 978 self._set_conflicted(path, path in by_path) 979 980 def _set_conflicted(self, path, conflicted): 981 value = self.index[path] 982 self._index_dirty = True 983 if conflicted: 984 self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, )) 985 else: 986 self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, )) 987 988 def add_conflicts(self, new_conflicts): 989 with self.lock_tree_write(): 990 for conflict in new_conflicts: 991 if conflict.typestring in ('text conflict', 992 'contents conflict'): 993 try: 994 self._set_conflicted( 995 encode_git_path(conflict.path), True) 996 except KeyError: 997 raise errors.UnsupportedOperation( 998 self.add_conflicts, self) 999 else: 1000 raise errors.UnsupportedOperation(self.add_conflicts, self) 1001 1002 def walkdirs(self, prefix=""): 1003 """Walk the directories of this tree. 1004 1005 returns a generator which yields items in the form: 1006 (current_directory_path, 1007 [(file1_path, file1_name, file1_kind, (lstat), 1008 file1_kind), ... ]) 1009 1010 This API returns a generator, which is only valid during the current 1011 tree transaction - within a single lock_read or lock_write duration. 1012 1013 If the tree is not locked, it may cause an error to be raised, 1014 depending on the tree implementation. 1015 """ 1016 from bisect import bisect_left 1017 import operator 1018 disk_top = self.abspath(prefix) 1019 if disk_top.endswith('/'): 1020 disk_top = disk_top[:-1] 1021 top_strip_len = len(disk_top) + 1 1022 inventory_iterator = self._walkdirs(prefix) 1023 disk_iterator = osutils.walkdirs(disk_top, prefix) 1024 try: 1025 current_disk = next(disk_iterator) 1026 disk_finished = False 1027 except OSError as e: 1028 if not (e.errno == errno.ENOENT 1029 or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)): 1030 raise 1031 current_disk = None 1032 disk_finished = True 1033 try: 1034 current_inv = next(inventory_iterator) 1035 inv_finished = False 1036 except StopIteration: 1037 current_inv = None 1038 inv_finished = True 1039 while not inv_finished or not disk_finished: 1040 if current_disk: 1041 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), 1042 cur_disk_dir_content) = current_disk 1043 else: 1044 ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), 1045 cur_disk_dir_content) = ((None, None), None) 1046 if not disk_finished: 1047 # strip out .bzr dirs 1048 if (cur_disk_dir_path_from_top[top_strip_len:] == '' 1049 and len(cur_disk_dir_content) > 0): 1050 # osutils.walkdirs can be made nicer - 1051 # yield the path-from-prefix rather than the pathjoined 1052 # value. 1053 bzrdir_loc = bisect_left(cur_disk_dir_content, 1054 ('.git', '.git')) 1055 if (bzrdir_loc < len(cur_disk_dir_content) and 1056 self.controldir.is_control_filename( 1057 cur_disk_dir_content[bzrdir_loc][0])): 1058 # we dont yield the contents of, or, .bzr itself. 1059 del cur_disk_dir_content[bzrdir_loc] 1060 if inv_finished: 1061 # everything is unknown 1062 direction = 1 1063 elif disk_finished: 1064 # everything is missing 1065 direction = -1 1066 else: 1067 direction = ((current_inv[0][0] > cur_disk_dir_relpath) 1068 - (current_inv[0][0] < cur_disk_dir_relpath)) 1069 if direction > 0: 1070 # disk is before inventory - unknown 1071 dirblock = [(relpath, basename, kind, stat, None) for 1072 relpath, basename, kind, stat, top_path in 1073 cur_disk_dir_content] 1074 yield cur_disk_dir_relpath, dirblock 1075 try: 1076 current_disk = next(disk_iterator) 1077 except StopIteration: 1078 disk_finished = True 1079 elif direction < 0: 1080 # inventory is before disk - missing. 1081 dirblock = [(relpath, basename, 'unknown', None, kind) 1082 for relpath, basename, dkind, stat, fileid, kind in 1083 current_inv[1]] 1084 yield current_inv[0][0], dirblock 1085 try: 1086 current_inv = next(inventory_iterator) 1087 except StopIteration: 1088 inv_finished = True 1089 else: 1090 # versioned present directory 1091 # merge the inventory and disk data together 1092 dirblock = [] 1093 for relpath, subiterator in itertools.groupby(sorted( 1094 current_inv[1] + cur_disk_dir_content, 1095 key=operator.itemgetter(0)), operator.itemgetter(1)): 1096 path_elements = list(subiterator) 1097 if len(path_elements) == 2: 1098 inv_row, disk_row = path_elements 1099 # versioned, present file 1100 dirblock.append((inv_row[0], 1101 inv_row[1], disk_row[2], 1102 disk_row[3], inv_row[5])) 1103 elif len(path_elements[0]) == 5: 1104 # unknown disk file 1105 dirblock.append( 1106 (path_elements[0][0], path_elements[0][1], 1107 path_elements[0][2], path_elements[0][3], 1108 None)) 1109 elif len(path_elements[0]) == 6: 1110 # versioned, absent file. 1111 dirblock.append( 1112 (path_elements[0][0], path_elements[0][1], 1113 'unknown', None, 1114 path_elements[0][5])) 1115 else: 1116 raise NotImplementedError('unreachable code') 1117 yield current_inv[0][0], dirblock 1118 try: 1119 current_inv = next(inventory_iterator) 1120 except StopIteration: 1121 inv_finished = True 1122 try: 1123 current_disk = next(disk_iterator) 1124 except StopIteration: 1125 disk_finished = True 1126 1127 def _walkdirs(self, prefix=u""): 1128 if prefix != u"": 1129 prefix += u"/" 1130 prefix = encode_git_path(prefix) 1131 per_dir = defaultdict(set) 1132 if prefix == b"": 1133 per_dir[(u'', self.path2id(''))] = set() 1134 1135 def add_entry(path, kind): 1136 if path == b'' or not path.startswith(prefix): 1137 return 1138 (dirname, child_name) = posixpath.split(path) 1139 add_entry(dirname, 'directory') 1140 dirname = decode_git_path(dirname) 1141 dir_file_id = self.path2id(dirname) 1142 if not isinstance(value, tuple) or len(value) != 10: 1143 raise ValueError(value) 1144 per_dir[(dirname, dir_file_id)].add( 1145 (decode_git_path(path), decode_git_path(child_name), 1146 kind, None, 1147 self.path2id(decode_git_path(path)), 1148 kind)) 1149 with self.lock_read(): 1150 for path, value in self.index.iteritems(): 1151 if self.mapping.is_special_file(path): 1152 continue 1153 if not path.startswith(prefix): 1154 continue 1155 add_entry(path, mode_kind(value.mode)) 1156 return ((k, sorted(v)) for (k, v) in sorted(per_dir.items())) 1157 1158 def get_shelf_manager(self): 1159 raise workingtree.ShelvingUnsupported() 1160 1161 def store_uncommitted(self): 1162 raise errors.StoringUncommittedNotSupported(self) 1163 1164 def annotate_iter(self, path, 1165 default_revision=_mod_revision.CURRENT_REVISION): 1166 """See Tree.annotate_iter 1167 1168 This implementation will use the basis tree implementation if possible. 1169 Lines not in the basis are attributed to CURRENT_REVISION 1170 1171 If there are pending merges, lines added by those merges will be 1172 incorrectly attributed to CURRENT_REVISION (but after committing, the 1173 attribution will be correct). 1174 """ 1175 with self.lock_read(): 1176 maybe_file_parent_keys = [] 1177 for parent_id in self.get_parent_ids(): 1178 try: 1179 parent_tree = self.revision_tree(parent_id) 1180 except errors.NoSuchRevisionInTree: 1181 parent_tree = self.branch.repository.revision_tree( 1182 parent_id) 1183 with parent_tree.lock_read(): 1184 # TODO(jelmer): Use rename/copy tracker to find path name 1185 # in parent 1186 parent_path = path 1187 try: 1188 kind = parent_tree.kind(parent_path) 1189 except errors.NoSuchFile: 1190 continue 1191 if kind != 'file': 1192 # Note: this is slightly unnecessary, because symlinks 1193 # and directories have a "text" which is the empty 1194 # text, and we know that won't mess up annotations. But 1195 # it seems cleaner 1196 continue 1197 parent_text_key = ( 1198 parent_path, 1199 parent_tree.get_file_revision(parent_path)) 1200 if parent_text_key not in maybe_file_parent_keys: 1201 maybe_file_parent_keys.append(parent_text_key) 1202 # Now we have the parents of this content 1203 from breezy.annotate import Annotator 1204 from .annotate import AnnotateProvider 1205 annotate_provider = AnnotateProvider( 1206 self.branch.repository._file_change_scanner) 1207 annotator = Annotator(annotate_provider) 1208 1209 from breezy.graph import Graph 1210 graph = Graph(annotate_provider) 1211 heads = graph.heads(maybe_file_parent_keys) 1212 file_parent_keys = [] 1213 for key in maybe_file_parent_keys: 1214 if key in heads: 1215 file_parent_keys.append(key) 1216 1217 text = self.get_file_text(path) 1218 this_key = (path, default_revision) 1219 annotator.add_special_text(this_key, file_parent_keys, text) 1220 annotations = [(key[-1], line) 1221 for key, line in annotator.annotate_flat(this_key)] 1222 return annotations 1223 1224 def _rename_one(self, from_rel, to_rel): 1225 os.rename(self.abspath(from_rel), self.abspath(to_rel)) 1226 1227 def _build_checkout_with_index(self): 1228 build_index_from_tree( 1229 self.user_transport.local_abspath('.'), 1230 self.control_transport.local_abspath("index"), 1231 self.store, 1232 None 1233 if self.branch.head is None 1234 else self.store[self.branch.head].tree, 1235 honor_filemode=self._supports_executable()) 1236 1237 def reset_state(self, revision_ids=None): 1238 """Reset the state of the working tree. 1239 1240 This does a hard-reset to a last-known-good state. This is a way to 1241 fix if something got corrupted (like the .git/index file) 1242 """ 1243 with self.lock_tree_write(): 1244 if revision_ids is not None: 1245 self.set_parent_ids(revision_ids) 1246 self.index.clear() 1247 self._index_dirty = True 1248 if self.branch.head is not None: 1249 for entry in self.store.iter_tree_contents( 1250 self.store[self.branch.head].tree): 1251 if not validate_path(entry.path): 1252 continue 1253 1254 if S_ISGITLINK(entry.mode): 1255 pass # TODO(jelmer): record and return submodule paths 1256 else: 1257 # Let's at least try to use the working tree file: 1258 try: 1259 st = self._lstat(self.abspath( 1260 decode_git_path(entry.path))) 1261 except OSError: 1262 # But if it doesn't exist, we'll make something up. 1263 obj = self.store[entry.sha] 1264 st = os.stat_result((entry.mode, 0, 0, 0, 1265 0, 0, len( 1266 obj.as_raw_string()), 0, 1267 0, 0)) 1268 (index, subpath) = self._lookup_index(entry.path) 1269 index[subpath] = index_entry_from_stat(st, entry.sha, 0) 1270 1271 def _update_git_tree( 1272 self, old_revision, new_revision, change_reporter=None, 1273 show_base=False): 1274 basis_tree = self.revision_tree(old_revision) 1275 if new_revision != old_revision: 1276 from .. import merge 1277 with basis_tree.lock_read(): 1278 new_basis_tree = self.branch.basis_tree() 1279 merge.merge_inner( 1280 self.branch, 1281 new_basis_tree, 1282 basis_tree, 1283 this_tree=self, 1284 change_reporter=change_reporter, 1285 show_base=show_base) 1286 1287 def pull(self, source, overwrite=False, stop_revision=None, 1288 change_reporter=None, possible_transports=None, local=False, 1289 show_base=False, tag_selector=None): 1290 with self.lock_write(), source.lock_read(): 1291 old_revision = self.branch.last_revision() 1292 count = self.branch.pull(source, overwrite, stop_revision, 1293 possible_transports=possible_transports, 1294 local=local, tag_selector=tag_selector) 1295 self._update_git_tree( 1296 old_revision=old_revision, 1297 new_revision=self.branch.last_revision(), 1298 change_reporter=change_reporter, 1299 show_base=show_base) 1300 return count 1301 1302 def add_reference(self, sub_tree): 1303 """Add a TreeReference to the tree, pointing at sub_tree. 1304 1305 :param sub_tree: subtree to add. 1306 """ 1307 with self.lock_tree_write(): 1308 try: 1309 sub_tree_path = self.relpath(sub_tree.basedir) 1310 except errors.PathNotChild: 1311 raise BadReferenceTarget( 1312 self, sub_tree, 'Target not inside tree.') 1313 1314 self._add([sub_tree_path], [None], ['tree-reference']) 1315 1316 def _read_submodule_head(self, path): 1317 return read_submodule_head(self.abspath(path)) 1318 1319 def get_reference_revision(self, path, branch=None): 1320 hexsha = self._read_submodule_head(path) 1321 if hexsha is None: 1322 (index, subpath) = self._lookup_index( 1323 encode_git_path(path)) 1324 if subpath is None: 1325 raise errors.NoSuchFile(path) 1326 hexsha = index[subpath].sha 1327 return self.branch.lookup_foreign_revision_id(hexsha) 1328 1329 def get_nested_tree(self, path): 1330 return workingtree.WorkingTree.open(self.abspath(path)) 1331 1332 def _directory_is_tree_reference(self, relpath): 1333 # as a special case, if a directory contains control files then 1334 # it's a tree reference, except that the root of the tree is not 1335 return relpath and osutils.lexists(self.abspath(relpath) + u"/.git") 1336 1337 def extract(self, sub_path, format=None): 1338 """Extract a subtree from this tree. 1339 1340 A new branch will be created, relative to the path for this tree. 1341 """ 1342 def mkdirs(path): 1343 segments = osutils.splitpath(path) 1344 transport = self.branch.controldir.root_transport 1345 for name in segments: 1346 transport = transport.clone(name) 1347 transport.ensure_base() 1348 return transport 1349 1350 with self.lock_tree_write(): 1351 self.flush() 1352 branch_transport = mkdirs(sub_path) 1353 if format is None: 1354 format = self.controldir.cloning_metadir() 1355 branch_transport.ensure_base() 1356 branch_bzrdir = format.initialize_on_transport(branch_transport) 1357 try: 1358 repo = branch_bzrdir.find_repository() 1359 except errors.NoRepositoryPresent: 1360 repo = branch_bzrdir.create_repository() 1361 if not repo.supports_rich_root(): 1362 raise errors.RootNotRich() 1363 new_branch = branch_bzrdir.create_branch() 1364 new_branch.pull(self.branch) 1365 for parent_id in self.get_parent_ids(): 1366 new_branch.fetch(self.branch, parent_id) 1367 tree_transport = self.controldir.root_transport.clone(sub_path) 1368 if tree_transport.base != branch_transport.base: 1369 tree_bzrdir = format.initialize_on_transport(tree_transport) 1370 tree_bzrdir.set_branch_reference(new_branch) 1371 else: 1372 tree_bzrdir = branch_bzrdir 1373 wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION) 1374 wt.set_parent_ids(self.get_parent_ids()) 1375 return wt 1376 1377 def _get_check_refs(self): 1378 """Return the references needed to perform a check of this tree. 1379 1380 The default implementation returns no refs, and is only suitable for 1381 trees that have no local caching and can commit on ghosts at any time. 1382 1383 :seealso: breezy.check for details about check_refs. 1384 """ 1385 return [] 1386 1387 def copy_content_into(self, tree, revision_id=None): 1388 """Copy the current content and user files of this tree into tree.""" 1389 from .. import merge 1390 with self.lock_read(): 1391 if revision_id is None: 1392 merge.transform_tree(tree, self) 1393 else: 1394 # TODO now merge from tree.last_revision to revision (to 1395 # preserve user local changes) 1396 try: 1397 other_tree = self.revision_tree(revision_id) 1398 except errors.NoSuchRevision: 1399 other_tree = self.branch.repository.revision_tree( 1400 revision_id) 1401 1402 merge.transform_tree(tree, other_tree) 1403 if revision_id == _mod_revision.NULL_REVISION: 1404 new_parents = [] 1405 else: 1406 new_parents = [revision_id] 1407 tree.set_parent_ids(new_parents) 1408 1409 def reference_parent(self, path, possible_transports=None): 1410 remote_url = self.get_reference_info(path) 1411 if remote_url is None: 1412 trace.warning("Unable to find submodule info for %s", path) 1413 return None 1414 return _mod_branch.Branch.open(remote_url, possible_transports=possible_transports) 1415 1416 def get_reference_info(self, path): 1417 submodule_info = self._submodule_info() 1418 info = submodule_info.get(encode_git_path(path)) 1419 if info is None: 1420 return None 1421 return decode_git_path(info[0]) 1422 1423 def set_reference_info(self, tree_path, branch_location): 1424 path = self.abspath('.gitmodules') 1425 try: 1426 config = GitConfigFile.from_path(path) 1427 except EnvironmentError as e: 1428 if e.errno == errno.ENOENT: 1429 config = GitConfigFile() 1430 else: 1431 raise 1432 section = (b'submodule', encode_git_path(tree_path)) 1433 if branch_location is None: 1434 try: 1435 del config[section] 1436 except KeyError: 1437 pass 1438 else: 1439 branch_location = urlutils.join( 1440 urlutils.strip_segment_parameters(self.branch.user_url), 1441 branch_location) 1442 config.set( 1443 section, 1444 b'path', encode_git_path(tree_path)) 1445 config.set( 1446 section, 1447 b'url', branch_location.encode('utf-8')) 1448 config.write_to_path(path) 1449 self.add('.gitmodules') 1450 1451 _marker = object() 1452 1453 def update(self, change_reporter=None, possible_transports=None, 1454 revision=None, old_tip=_marker, show_base=False): 1455 """Update a working tree along its branch. 1456 1457 This will update the branch if its bound too, which means we have 1458 multiple trees involved: 1459 1460 - The new basis tree of the master. 1461 - The old basis tree of the branch. 1462 - The old basis tree of the working tree. 1463 - The current working tree state. 1464 1465 Pathologically, all three may be different, and non-ancestors of each 1466 other. Conceptually we want to: 1467 1468 - Preserve the wt.basis->wt.state changes 1469 - Transform the wt.basis to the new master basis. 1470 - Apply a merge of the old branch basis to get any 'local' changes from 1471 it into the tree. 1472 - Restore the wt.basis->wt.state changes. 1473 1474 There isn't a single operation at the moment to do that, so we: 1475 1476 - Merge current state -> basis tree of the master w.r.t. the old tree 1477 basis. 1478 - Do a 'normal' merge of the old branch basis if it is relevant. 1479 1480 :param revision: The target revision to update to. Must be in the 1481 revision history. 1482 :param old_tip: If branch.update() has already been run, the value it 1483 returned (old tip of the branch or None). _marker is used 1484 otherwise. 1485 """ 1486 if self.branch.get_bound_location() is not None: 1487 self.lock_write() 1488 update_branch = (old_tip is self._marker) 1489 else: 1490 self.lock_tree_write() 1491 update_branch = False 1492 try: 1493 if update_branch: 1494 old_tip = self.branch.update(possible_transports) 1495 else: 1496 if old_tip is self._marker: 1497 old_tip = None 1498 return self._update_tree(old_tip, change_reporter, revision, show_base) 1499 finally: 1500 self.unlock() 1501 1502 def _update_tree(self, old_tip=None, change_reporter=None, revision=None, 1503 show_base=False): 1504 """Update a tree to the master branch. 1505 1506 :param old_tip: if supplied, the previous tip revision the branch, 1507 before it was changed to the master branch's tip. 1508 """ 1509 # here if old_tip is not None, it is the old tip of the branch before 1510 # it was updated from the master branch. This should become a pending 1511 # merge in the working tree to preserve the user existing work. we 1512 # cant set that until we update the working trees last revision to be 1513 # one from the new branch, because it will just get absorbed by the 1514 # parent de-duplication logic. 1515 # 1516 # We MUST save it even if an error occurs, because otherwise the users 1517 # local work is unreferenced and will appear to have been lost. 1518 # 1519 with self.lock_tree_write(): 1520 from .. import merge 1521 nb_conflicts = [] 1522 try: 1523 last_rev = self.get_parent_ids()[0] 1524 except IndexError: 1525 last_rev = _mod_revision.NULL_REVISION 1526 if revision is None: 1527 revision = self.branch.last_revision() 1528 1529 old_tip = old_tip or _mod_revision.NULL_REVISION 1530 1531 if not _mod_revision.is_null(old_tip) and old_tip != last_rev: 1532 # the branch we are bound to was updated 1533 # merge those changes in first 1534 base_tree = self.basis_tree() 1535 other_tree = self.branch.repository.revision_tree(old_tip) 1536 nb_conflicts = merge.merge_inner(self.branch, other_tree, 1537 base_tree, this_tree=self, 1538 change_reporter=change_reporter, 1539 show_base=show_base) 1540 if nb_conflicts: 1541 self.add_parent_tree((old_tip, other_tree)) 1542 return len(nb_conflicts) 1543 1544 if last_rev != _mod_revision.ensure_null(revision): 1545 to_tree = self.branch.repository.revision_tree(revision) 1546 1547 # determine the branch point 1548 graph = self.branch.repository.get_graph() 1549 base_rev_id = graph.find_unique_lca(self.branch.last_revision(), 1550 last_rev) 1551 base_tree = self.branch.repository.revision_tree(base_rev_id) 1552 1553 nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree, 1554 this_tree=self, 1555 change_reporter=change_reporter, 1556 show_base=show_base) 1557 self.set_last_revision(revision) 1558 # TODO - dedup parents list with things merged by pull ? 1559 # reuse the tree we've updated to to set the basis: 1560 parent_trees = [(revision, to_tree)] 1561 merges = self.get_parent_ids()[1:] 1562 # Ideally we ask the tree for the trees here, that way the working 1563 # tree can decide whether to give us the entire tree or give us a 1564 # lazy initialised tree. dirstate for instance will have the trees 1565 # in ram already, whereas a last-revision + basis-inventory tree 1566 # will not, but also does not need them when setting parents. 1567 for parent in merges: 1568 parent_trees.append( 1569 (parent, self.branch.repository.revision_tree(parent))) 1570 if not _mod_revision.is_null(old_tip): 1571 parent_trees.append( 1572 (old_tip, self.branch.repository.revision_tree(old_tip))) 1573 self.set_parent_trees(parent_trees) 1574 last_rev = parent_trees[0][0] 1575 return len(nb_conflicts) 1576 1577 1578class GitWorkingTreeFormat(workingtree.WorkingTreeFormat): 1579 1580 _tree_class = GitWorkingTree 1581 1582 supports_versioned_directories = False 1583 1584 supports_setting_file_ids = False 1585 1586 supports_store_uncommitted = False 1587 1588 supports_leftmost_parent_id_as_ghost = False 1589 1590 supports_righthand_parent_id_as_ghost = False 1591 1592 requires_normalized_unicode_filenames = True 1593 1594 supports_merge_modified = False 1595 1596 ignore_filename = ".gitignore" 1597 1598 @property 1599 def _matchingcontroldir(self): 1600 from .dir import LocalGitControlDirFormat 1601 return LocalGitControlDirFormat() 1602 1603 def get_format_description(self): 1604 return "Git Working Tree" 1605 1606 def initialize(self, a_controldir, revision_id=None, from_branch=None, 1607 accelerator_tree=None, hardlink=False): 1608 """See WorkingTreeFormat.initialize().""" 1609 if not isinstance(a_controldir, LocalGitDir): 1610 raise errors.IncompatibleFormat(self, a_controldir) 1611 branch = a_controldir.open_branch(nascent_ok=True) 1612 if revision_id is not None: 1613 branch.set_last_revision(revision_id) 1614 wt = GitWorkingTree( 1615 a_controldir, a_controldir.open_repository(), branch) 1616 for hook in MutableTree.hooks['post_build_tree']: 1617 hook(wt) 1618 return wt 1619