1from __future__ import print_function
2from __future__ import absolute_import
3# Written by Bram Cohen and Ross Cohen
4# Maintained by Chris Hutchinson
5# see LICENSE.txt for license information
6
7from future import standard_library
8standard_library.install_aliases()
9from builtins import input
10from builtins import str
11from builtins import range
12from builtins import object
13from future.utils import raise_
14from .bencode import bdecode, bencode
15import binascii
16from .cdv_glob import Glob
17from .client_helpers import new_handle, create_handle, unique_name, _set_name
18from .client_helpers import filename_to_handle
19from .client_helpers import handle_to_filename, _handle_to_filename, handle_name
20from .client_helpers import set_edit, unset_edit, clean_merge_point, gen_changeset
21from .client_helpers import _add_file, conflicts_in_file, CommitError
22from .client_helpers import name_use_count, rename_race, children_count, parent_loop_check, _rename_safe_check
23from .client_helpers import mark_modified_files, find_update_files, find_commit_files
24from .client_net import ClientHandler, ClientError, ServerError
25from .client_net import network_prep, authenticate
26from .db import db, ChangeDBs, write_format_version, write_rebuild_version
27from .DFS import DFS
28from .diff import unified_diff
29from getpass import getpass
30from .history import HistoryError, roothandle, rootnode
31from .history import dmerge, damerge, rename_conflict_check, db_get
32from .history import sync_history, is_ancestor, _is_ancestor
33from .history import handle_contents_at_point, handles_in_branch
34from .history import handle_name_at_point, fullpath_at_point
35from .history import handle_last_modified
36from .history import short_id, long_id, write_changeset, rebuild_from_points
37from .history import server_to_tuple, tuple_to_server, repo_head
38from .history import dump_changeinfo
39from .history import pretty_print_dag, pretty_print_big_dag
40from .history import simplify_precursors
41import locale
42from .merge import find_conflict, find_conflict_multiple_safe, find_annotation
43from . import merge
44from .network import NetworkError
45import os
46from os import path
47from .path import mdir, subpath, breakup, preserving_rename
48from random import randrange
49import re
50from sets import Set
51import sha
52import shlex
53import stat
54from sys import maxint, stdin, stdout, version_info, platform, stderr
55import tempfile
56from time import ctime, strftime, localtime
57
58assert version_info >= (2,3), "Python 2.3 or greater is required"
59
60term_encoding = stdin.encoding
61text_encoding = locale.getpreferredencoding()
62
63class CheckoutError(Exception):
64    pass
65
66class Checkout(object):
67    def __init__(self, local, init=False, metadata_dir='.cdv', rw=True):
68        self.local      = local
69        self.conf_path  = path.join(local, metadata_dir)
70
71        if init == True:
72            try:
73                os.mkdir(self.conf_path)
74            except OSError as err:
75                raise_(CheckoutError, 'Unable to create metadata directory: %s' % (err[1],))
76
77        self.dbenv = None
78        self.txn   = None
79        txn        = None
80        if rw == True:
81            flags = db.DB_CREATE|db.DB_INIT_MPOOL|db.DB_INIT_TXN|db.DB_PRIVATE
82            flags |= db.DB_RECOVER
83
84            self.dbenv = db.DBEnv()
85            self.dbenv.set_cachesize(0, 4 * 1024 * 1024)
86            self.dbenv.set_lg_bsize(1024 * 1024)
87            self.dbenv.set_get_returns_none(2)
88            self.dbenv.open(self.conf_path, flags)
89            txn = self.txn_begin()
90
91        self._openDBs(txn, init, rw)
92
93        self.name_cache          = {}
94        self.handle_name_cache   = {}
95        self.db_cache            = {}
96
97        self.temppath    = path.join(self.conf_path, 'temp')
98        self.cpath       = path.join(self.conf_path, 'contents')
99        mdir(self.temppath)
100        mdir(self.cpath)
101
102        self.nopass = 0
103        self.user   = None
104
105        if rw == True:
106            if init:
107                write_format_version(self.conf_path)
108                write_rebuild_version(self.conf_path)
109                populate_local_repos(self, txn)
110            self.txn_commit(txn)
111
112        return
113
114    def _openDBs(self, txn, init, rw):
115        flags = 0
116        if not rw:
117            flags |= db.DB_RDONLY
118
119        if init:
120            flags |= db.DB_CREATE
121
122        self.lcrepo = db.DB(dbEnv=self.dbenv)
123        self.lcrepo.open('changesets.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
124        self.linforepo = db.DB(dbEnv=self.dbenv)
125        self.linforepo.open('info.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
126        self.changesdb = db.DB(dbEnv=self.dbenv)
127        self.changesdb.open('changenums.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
128        self.branchmapdb = db.DB(dbEnv=self.dbenv)
129        self.branchmapdb.open('branchmap.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
130        self.branchdb = db.DB(dbEnv=self.dbenv)
131        self.branchdb.open('branch.db', dbtype=db.DB_RECNO, flags=flags, txn=txn)
132        self.staticdb = db.DB(dbEnv=self.dbenv)
133        self.staticdb.open('static.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
134
135        # open the mini-dags and their indices
136        self.contents = ChangeDBs(self.dbenv, 'content', flags, txn)
137        self.names    = ChangeDBs(self.dbenv, 'name', flags, txn)
138
139        self.allnamesdb = db.DB(dbEnv=self.dbenv)
140        self.allnamesdb.set_flags(db.DB_DUPSORT)
141        self.allnamesdb.open('allnames.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
142
143        # checkout-specific dbs
144        self.modtimesdb = db.DB(dbEnv=self.dbenv)
145        self.modtimesdb.open('modtimes.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
146        self.editsdb = db.DB(dbEnv=self.dbenv)
147        self.editsdb.open('edits.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
148        self.varsdb = db.DB(dbEnv=self.dbenv)
149        self.varsdb.open('vars.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
150        try:
151            self.filenamesdb = db.DB(dbEnv=self.dbenv)
152            self.filenamesdb.open('filenames.db', dbtype=db.DB_BTREE, flags=flags, txn=txn)
153        except db.DBNoSuchFileError:
154            self.filenamesdb = None
155
156        return
157
158    def close(self):
159        if self.txn is not None:
160            self.txn_abort(self.txn)
161
162        self.lcrepo.close()
163        self.linforepo.close()
164        self.changesdb.close()
165        self.branchmapdb.close()
166        self.branchdb.close()
167        self.staticdb.close()
168        self.contents.close()
169        self.names.close()
170        self.allnamesdb.close()
171        self.modtimesdb.close()
172        self.editsdb.close()
173        self.varsdb.close()
174        if self.filenamesdb is not None:
175            self.filenamesdb.close()
176
177        if self.dbenv is not None:
178            self.dbenv.txn_checkpoint()
179            for lfile in self.dbenv.log_archive():
180                os.remove(path.join(self.dbenv.db_home, lfile))
181            self.dbenv.close()
182
183        return
184
185    def txn_begin(self):
186        self.txn = self.dbenv.txn_begin()
187        return self.txn
188
189    def txn_abort(self, txn):
190        assert self.txn == txn
191        self.txn = None
192        return txn.abort()
193
194    def txn_commit(self, txn):
195        assert self.txn == txn
196        self.txn = None
197        return txn.commit()
198
199def cli_init(args):
200    local = args[0]
201    try:
202        co = Checkout(local, init=True)
203        co.close()
204    except CheckoutError as msg:
205        print('error - %s' % (msg,))
206        return 1
207    return 0
208
209def add(co, files):
210    ltxn = co.txn_begin()
211
212    cpats = ignore_patterns(co)
213
214    glob = Glob(co, files)
215    for file, expanded in glob.fs_walk():
216        fpath = breakup(file)
217        if '.cdv' in fpath:
218            print('warning - .cdv is a reserved name')
219            continue
220        # XXX: check for other frobridden names, i.e. '.' and '..'
221
222        # ignore user specified patterns, but only for expansions
223        if expanded:
224            ignore = 0
225            for cpat in cpats:
226                if cpat.search(file) is not None:
227                    ignore = 1
228                    break
229            if ignore:
230                continue
231
232        # add all the directories leading up to the file, then the file
233        rep, parent = '', roothandle
234        required = 0
235        for d in fpath:
236            rep = path.join(rep, d)
237            if rep == file:
238                required = not expanded
239            parent = _add_file(co, rep, parent, required, ltxn)
240            if not parent:
241                co.txn_abort(ltxn)
242                return 1
243
244    co.txn_commit(ltxn)
245    return 0
246
247def delete(co, files):
248    co.handle_name_cache = {}
249
250    ltxn = co.txn_begin()
251
252    fnames = []
253    for handle, expanded in Glob(co, files).db_walk():
254        file = handle_to_filename(co, handle)
255        fnames.append((file, handle))
256
257    # reverse the ordering so that dirs are deleted after their contents
258    fnames.sort()
259    fnames.reverse()
260
261    # update the database
262    for fname, handle in fnames:
263        linfo = handle_name(co, handle, ltxn)
264        if co.editsdb.has_key(handle, ltxn):
265            co.editsdb.delete(handle, txn=ltxn)
266        if 'add' in linfo:
267            #unset_edit(co, handle, {}, ltxn)
268            del co.handle_name_cache[handle]
269        else:
270            set_edit(co, handle, {'delete': 1}, ltxn)
271            co.handle_name_cache[handle]['delete'] = 1
272
273    # make sure the directories are empty
274    for fname, handle in fnames:
275        linfo = db_get(co, co.staticdb, handle, None)
276        if linfo['type'] == 'dir':
277            if len(children_count(co, handle, ltxn)):
278                print('error - %s is not empty' % (fname,))
279                co.txn_abort(ltxn)
280                return 1
281
282    # finally, do the deleting
283    for fname, handle in fnames:
284        print('deleting: ' + fname)
285        linfo = db_get(co, co.staticdb, handle, None)
286        if linfo['type'] == 'dir':
287            try:
288                os.rmdir(path.join(co.local, fname))
289            except OSError as err:
290                print('warning - %s: %s' % (err[1], fname))
291
292        elif linfo['type'] == 'file':
293            try:
294                os.remove(path.join(co.local, fname))
295            except OSError as err:
296                print('warning - %s: %s' % (err[1], fname))
297            co.modtimesdb.delete(handle, txn=ltxn)
298            co.filenamesdb.delete(fname, txn=ltxn)
299
300    co.txn_commit(ltxn)
301    return 0
302
303def rename(co, oldname, newname):
304    co.handle_name_cache = {}
305
306    try:
307        loldfile, foldname = subpath(co.local, oldname)
308    except ValueError:
309        print('error - ' + oldname + ' is outside repository')
310        return 1
311    ohandle = filename_to_handle(co, foldname)
312    if ohandle is None:
313        print('error - ' + foldname + ' is not in repository')
314        return 1
315    if not path.exists(oldname):
316        print('error - ' + oldname + ' does not exist')
317        return 1
318    try:
319        lnewfile, fnewname = subpath(co.local, newname)
320    except ValueError:
321        print('error - ' + newname + ' is outside repository')
322        return 1
323
324    dirmove, foo = 0, path.split(newname)[1]
325    if foo == '' or foo == '..' or foo == '.':
326        dirmove = 1
327        npath, nname = fnewname, ''
328    else:
329        npath, nname = path.split(fnewname)
330
331    nhandle = filename_to_handle(co, fnewname)
332    if nhandle is not None:
333        #if not dirmove or bdecode(co.staticdb.get(nhandle))['type'] != 'dir':
334        if not dirmove or db_get(co, co.staticdb, nhandle, None)['type'] != 'dir':
335            print('error - ' + newname + ' already exists in repository')
336            return 1
337        phandle = nhandle
338        nname = path.split(foldname)[1]
339        fnewname = path.join(fnewname, nname)
340        newname = path.join(newname, nname)
341    else:
342        phandle = filename_to_handle(co, npath)
343        if phandle is None:
344            print('error - cannot rename into directory not in repository')
345            return 1
346
347    if nname == '.cdv':
348        print('error - .cdv is a reserved name')
349        return 1
350    if filename_to_handle(co, fnewname) is not None:
351        print('error - ' + newname + ' already exists in repository')
352        return 1
353    if path.exists(path.join(co.local, fnewname)):
354        print('error - ' + newname + ' already exists in filesystem')
355        return 1
356
357    print('renaming: ' + foldname + ' -> ' + fnewname)
358    ltxn = co.txn_begin()
359    _set_name(co, ohandle, phandle, nname, ltxn)
360    os.rename(path.join(co.local, foldname), path.join(co.local, fnewname))
361    _rebuild_fndb(co, ltxn)
362    co.txn_commit(ltxn)
363    return 0
364
365def _rebuild_fndb(co, txn):
366    # XXX: crude, should do partial trees
367    co.filenamesdb.truncate(txn=txn)
368    for handle in co.modtimesdb.keys(txn):
369        lfile = handle_to_filename(co, handle)
370        co.filenamesdb.put(lfile, handle, txn=txn)
371    return
372
373def edit(co, files, by_id=False):
374    txn = co.txn_begin()
375    generator = None
376    if by_id:
377        generator = [(binascii.unhexlify(handle), 0) for handle in files]
378    else:
379        generator = Glob(co, files).db_walk()
380    for handle, expanded in generator:
381        file = handle_to_filename(co, handle)
382
383        sinfo = db_get(co, co.staticdb, handle, None)
384        if sinfo['type'] != 'file':
385            print('warning - %s is not a file' % (file,))
386            continue
387
388        print('editting: %s' % (file,))
389        set_edit(co, handle, {'hash': 1}, txn)
390    co.txn_commit(txn)
391    return 0
392
393def set_password(co):
394    network_prep(co)
395    ch = ClientHandler(co)
396    remote = server_to_tuple(co.repo)
397    try:
398        s = authenticate(co, ch, remote, None, srp=True)
399        newpassword = getpass('New password: ')
400        confpassword = getpass('Confirm new password: ')
401        if newpassword != confpassword:
402            print('Confirmation failed. Password not changed.')
403            return 1
404        ch.set_password(s, newpassword)
405    except NetworkError as msg:
406        print('password failed: ' + str(msg))
407        return 1
408    except ServerError as msg:
409        print('password failed: ' + str(msg))
410        return 1
411    print('Password changed')
412    return 0
413
414def merge_analyze(co, heads, rhead, named, modified, deleted, txn):
415    # clear out the merges which the new head resolved for us
416    old_heads = heads[:]
417    old_heads.remove(rhead)
418    for handle, binfo in co.editsdb.items(txn):
419        info = bdecode(binfo)
420
421        if 'nmerge' in info:
422            merge = False
423            for head in old_heads:
424                change = handle_last_modified(co, co.names, handle, head, txn)
425                if change is not None and not is_ancestor(co, change, rhead, txn):
426                    merge = True
427            if not merge:
428                unset_edit(co, handle, ['nmerge'], txn)
429
430        if 'cmerge' in info:
431            merge = False
432            for head in old_heads:
433                change = handle_last_modified(co, co.contents, handle, head, txn)
434                if change is not None and not is_ancestor(co, change, rhead, txn):
435                    merge = True
436            if not merge:
437                unset_edit(co, handle, ['cmerge'], txn)
438
439    # keep track of what's been merged because we have to generate explicit
440    # merge information for them later.
441    lnamed, lmodified, ladded, ldeleted = \
442            handles_in_branch(co, [rhead], heads, txn)
443
444    sdeleted, sldeleted = Set(deleted), Set(ldeleted)
445
446    sdmerges = sldeleted ^ sdeleted
447    s_all_deleted = sldeleted | sdeleted
448
449    lnamed = damerge(lnamed, ladded)
450    lmodified = damerge(lmodified, ladded)
451
452    slnamed, snamed = Set(lnamed), Set(named)
453    for handle in (slnamed & snamed) - s_all_deleted:
454        set_edit(co, handle, {'nmerge': 1}, txn)
455
456    slmodified, smodified = Set(lmodified), Set(modified)
457    for handle in (slmodified & smodified) - s_all_deleted:
458        set_edit(co, handle, {'cmerge': 1}, txn)
459
460    for handle in (snamed | smodified) & (sldeleted - sdeleted):
461        set_edit(co, handle, {'delete': 1}, txn)
462
463    for handle in (slnamed | slmodified) & (sdeleted - sldeleted):
464        set_edit(co, handle, {'delete': 1}, txn)
465
466    #for handle in named:
467    #    for head in heads:
468    #        change = handle_last_modified(co, co.names, handle, head, txn)
469    #        if change is not None and not is_ancestor(co, change, rhead, txn):
470    #            set_edit(co, handle, {'nmerge': 1}, txn)
471
472    #for handle in modified:
473    #    for head in heads:
474    #        change = handle_last_modified(co, co.contents, handle, head, txn)
475    #        if change is not None and not is_ancestor(co, change, rhead, txn):
476    #            set_edit(co, handle, {'cmerge': 1}, txn)
477
478    return
479
480def _update_helper(co, uinfo, named, modified, added, deleted, txn):
481    uinfo['newfiles'] = newfiles = {}
482    uinfo['deletes'] = deletes = {}
483    uinfo['names'] = names = {}
484    uinfo['infofiles'] = infofiles = []
485
486    if not co.merge:
487        return ([(handle, 0) for handle in modified], {})
488
489    local = co.local
490    rhead = uinfo['head']
491    heads = bdecode(co.linforepo.get('heads'))
492    if rhead in co.branchmapdb and _is_ancestor(co, rhead, heads, None):
493        return ([], {})
494
495    named = damerge(named, added, deleted)
496    modified = damerge(modified, added, deleted)
497
498    co.handle_name_cache = {}
499    try:
500        named_files = find_update_files(co, rhead, named, txn)
501    except HistoryError as msg:
502        raise_(ClientError, 'Got garbage from repository: ' + str(msg))
503
504    handles, nconflicts = {}, {}
505    for handle, linfo, rinfo in named_files:
506        if linfo is None:
507            if handle == roothandle:
508                continue
509            if 'delete' not in rinfo:
510                newfiles[handle] = 1
511                handles[handle] = 1
512            else:
513                handles[handle] = 0
514            continue
515        if 'delete' in rinfo:
516            if 'delete' not in linfo:
517                names[handle] = linfo
518                deletes[handle] = 1
519                handles[handle] = 1
520            else:
521                deletes[handle] = 0
522                handles[handle] = 0
523            continue
524        elif 'delete' in linfo:
525            handles[handle] = 0
526            continue
527        conflict, rename_points = rename_conflict_check(linfo, rinfo)
528        if conflict == 'remote':
529            names[handle] = linfo
530            handles[handle] = 1
531        elif conflict == 'conflict':
532            #lfile = _handle_to_filename(co, handle, names, txn)
533            #print 'file ' + lfile + ' was renamed both locally and remotely'
534            names[handle] = linfo
535            uname = unique_name(co, linfo['parent'], linfo['name'] + '.nameconflict', txn)
536            _set_name(co, handle, linfo['parent'], uname, txn)
537            infofiles.append((handle, rinfo['parent'], rinfo['name']))
538            nconflicts[handle] = 1
539            handles[handle] = 1
540
541    # create list of all the handles to be updated, will not include ones
542    # which don't exist and are in a deleted state after being pulled in
543    for handle in modified:
544        if handle in handles:
545            continue
546
547        linfo = handle_name(co, handle, txn)
548        if linfo is None:
549            rinfo = handle_name_at_point(co, handle, rhead, txn)
550            if rinfo is None or 'delete' in rinfo:
551                handles[handle] = 0
552                continue
553        elif 'delete' in linfo:
554            handles[handle] = 0
555            continue
556
557        handles[handle] = 1
558
559    orphans = []
560    for handle in list(deletes.keys()):
561        for chandle in children_count(co, handle, txn):
562            if chandle in deletes:
563                continue
564            #file = _handle_to_filename(co, chandle, {}, txn)
565            #print 'parent of ' + file + ' was deleted, orphaning'
566            cinfo = handle_name(co, chandle, txn)
567            names[chandle] = cinfo
568            infofiles.append((chandle, cinfo['parent'], ''))
569            nconflicts[chandle] = 1
570            handles[chandle] = 1
571            orphans.append((chandle, cinfo['name']))
572
573    # generate the new list of heads, the new one might encompass zero, some
574    # or all of our existing heads
575    pre_heads, temp_heads = heads[:], []
576    inserted_head = False
577    for head in heads:
578        if is_ancestor(co, head, rhead, txn):
579            if not inserted_head:
580                inserted_head = True
581                temp_heads.append(rhead)
582        else:
583            temp_heads.append(head)
584    if not inserted_head:
585        temp_heads.append(rhead)
586    heads = temp_heads
587    co.linforepo.put('heads', bencode(heads), txn=txn)
588
589    merge_analyze(co, heads, rhead, named, modified, deleted, txn)
590
591    # clear the name cache
592    co.handle_name_cache = {}
593
594    for handle, name in orphans:
595        uname = unique_name(co, roothandle, name + '.orphaned', txn)
596        _set_name(co, handle, roothandle, uname, txn)
597
598    for handle, linfo, rinfo in named_files:
599        if handle in deletes:
600            continue
601        if handle not in newfiles and linfo is None:
602            continue
603        linfo = handle_name(co, handle, txn)
604        pinfo = handle_name(co, linfo['parent'], txn)
605        if 'delete' in pinfo:
606            #file = _handle_to_filename(co, handle, {}, txn)
607            #print 'parent of ' + file + ' was deleted, orphaning'
608            if handle not in newfiles:
609                names[handle] = linfo
610            uname = unique_name(co, roothandle, linfo['name'] + '.orphaned', txn)
611            _set_name(co, handle, roothandle, uname, txn)
612            infofiles.append((handle, linfo['parent'], ''))
613            nconflicts[handle] = 1
614
615    breaks = 1
616    while breaks:
617        breaks = 0
618        for handle, info in list(names.items()):
619            if handle in deletes:
620                continue
621            #if bdecode(co.staticdb.get(handle, txn=txn))['type'] != 'dir':
622            if db_get(co, co.staticdb, handle, txn)['type'] != 'dir':
623                continue
624            lhandle = parent_loop_check(co, handle, txn)
625            if lhandle == handle:
626                breaks += 1
627                linfo = handle_name(co, handle, txn)
628                #file = _handle_to_filename(co, handle, names, txn)
629                #rfile = handle_to_filename(co, linfo['parent'], txn)
630                #rfile = path.join(rfile, linfo['name'])
631                #print 'parent loop for ' + file + ' -> ' + rfile
632                uname = unique_name(co, linfo['parent'], linfo['name'] + '.parentloop', txn)
633                _set_name(co, handle, info['parent'], uname, txn)
634                infofiles.append((handle, info['parent'], ''))
635                nconflicts[handle] = 1
636
637    newnames = list(names.keys())
638    newnames.extend(list(newfiles.keys()))
639    for handle in newnames:
640        if handle in deletes:
641            continue
642        lhandles = name_use_count(co, handle, txn)
643        if len(lhandles) == 1:
644            continue
645        lhandles.remove(handle)
646        lhandle = lhandles[0]
647        #print 'name conflict for ' + _handle_to_filename(co, handle, {}, txn)
648        linfo = handle_name(co, lhandle, txn)
649        names[lhandle] = linfo
650        uname = unique_name(co, linfo['parent'], linfo['name'] + '.nameconflict.local', txn)
651        _set_name(co, lhandle, linfo['parent'], uname, txn)
652        nconflicts[lhandle] = 1
653        handles[lhandle] = 1
654
655        linfo = handle_name(co, handle, txn)
656        uname = unique_name(co, linfo['parent'], linfo['name'] + '.nameconflict.remote', txn)
657        _set_name(co, handle, linfo['parent'], uname, txn)
658        nconflicts[handle] = 1
659
660    for handle in newnames:
661        if handle in deletes:
662            continue
663        linfo = handle_name(co, handle, txn)
664        if linfo['parent'] in newfiles:
665            continue
666        lfile = _handle_to_filename(co, linfo['parent'], names, txn)
667        if not path.exists(path.join(local, lfile)):
668            raise_(ClientError, 'directory ' + lfile + ' does not exist, you must revert it before updating')
669        mode = os.lstat(path.join(local, lfile)).st_mode
670        if not stat.S_ISDIR(mode):
671            raise_(ClientError, lfile + ' is not a directory, you must revert it before updating')
672        chandle = rename_race(co, handle, names, txn)
673        lfile = path.join(lfile, linfo['name'])
674        if not chandle and path.exists(path.join(local, lfile)):
675            raise_(ClientError, 'file ' + lfile + ' was added or renamed remotely but already exists, you must move or delete')
676
677    for handle in list(names.keys()):
678        lfile = _handle_to_filename(co, handle, names, txn)
679        if not path.exists(path.join(local, lfile)):
680            raise_(ClientError, 'file ' + lfile + ' does not exist, you must revert it before updating')
681        #linfo = bdecode(co.staticdb.get(handle, txn=txn))
682        linfo = db_get(co, co.staticdb, handle, txn)
683        mode = os.lstat(path.join(local, lfile)).st_mode
684        if linfo['type'] == 'file' and not stat.S_ISREG(mode):
685            raise_(ClientError, 'file ' + lfile + ' is expected to be of type ' + linfo['type'])
686        if linfo['type'] == 'dir' and not stat.S_ISDIR(mode):
687            raise_(ClientError, 'file ' + lfile + ' is expected to be of type ' + linfo['type'])
688
689    return list(handles.items()), nconflicts
690
691def update(co, remote, merge=True):
692    try:
693        network_prep(co)
694    except NetworkError as msg:
695        print(msg)
696        return 1
697
698    editsdb, local = co.editsdb, co.local
699
700    txn = co.txn_begin()
701    mark_modified_files(co, txn)
702    co.txn_commit(txn)
703
704    co.merge = merge
705    ch = ClientHandler(co)
706    txn = co.txn_begin()
707    try:
708        s = authenticate(co, ch, remote, txn)
709        co.txn_commit(txn)
710    except NetworkError as msg:
711        co.txn_abort(txn)
712        print('update failed: ' + str(msg))
713        return 1
714    except ServerError as msg:
715        co.txn_abort(txn)
716        print('update failed: ' + str(msg))
717        return 1
718
719    txn = co.txn_begin()
720    try:
721        updateinfo = ch.update(s, remote[2], _update_helper, txn)
722    except NetworkError as msg:
723        print('update failed: ' + str(msg))
724        co.txn_abort(txn)
725        return 1
726    except (ClientError, ServerError) as msg:
727        print('update failed: ' + str(msg))
728        co.txn_abort(txn)
729        ch.close(s)
730        return 1
731    ch.close(s)
732    ch = None
733
734    # record the repo head so we can do things relative to it later
735    co.linforepo.put(tuple_to_server(remote), updateinfo['head'], txn=txn)
736
737    if co.txn_commit(txn):
738        print('Updating local database failed, aborting...')
739        return 1
740
741    if not co.merge:
742        print('Repository head is ' + short_id(co, updateinfo['head']))
743        print('Update succeeded')
744        return 0
745
746    modified_files = updateinfo['modified']
747    if 'newfiles' not in updateinfo:
748        print('Update succeeded')
749        return 0
750    newfiles = updateinfo['newfiles']
751    deletes = updateinfo['deletes']
752    names = updateinfo['names']
753    infofiles = updateinfo['infofiles']
754
755    # XXX: need to do something about making updates atomic
756    txn = co.txn_begin()
757    for handle in list(newfiles.keys()):
758        lfile = _handle_to_filename(co, handle, names, None)
759
760        #staticinfo = bdecode(co.staticdb.get(handle, txn=txn))
761        staticinfo = db_get(co, co.staticdb, handle, txn)
762        if staticinfo['type'] == 'dir':
763            mdir(path.join(local, lfile))
764        else:
765            co.filenamesdb.put(lfile, handle, txn=txn)
766
767    do_fndb_rebuild = False
768    if names != {}:
769        do_fndb_rebuild = True
770
771    renames = list(names.keys())
772    while renames:
773        handle = renames.pop()
774        if handle in deletes or handle in newfiles:
775            continue
776        if not _rename_safe_check(co, handle, names, None):
777            renames.insert(0, handle)
778            continue
779        spath = _handle_to_filename(co, handle, names, None)
780        safename = '.safename.' + str(randrange(0, maxint, 1))
781        names[handle]['name'] = names[handle]['name'] + safename
782        info = handle_name(co, handle, None)
783        names[handle]['parent'] = info['parent']
784        dpath = _handle_to_filename(co, handle, names, None)
785        os.rename(path.join(local, spath), path.join(local, dpath))
786
787    delete_files, delete_dirs = [], []
788    for handle, present in list(deletes.items()):
789        if not present:
790            continue
791        #info = bdecode(co.staticdb.get(handle))
792        info = db_get(co, co.staticdb, handle, None)
793        lfile = _handle_to_filename(co, handle, names, None)
794        if info['type'] == 'dir':
795            delete_dirs.append(lfile)
796        elif info['type'] == 'file':
797            delete_files.append((lfile, handle))
798        if co.editsdb.has_key(handle, txn):
799            co.editsdb.delete(handle, txn=txn)
800        del names[handle]
801
802    for lfile, handle in delete_files:
803        os.remove(path.join(local, lfile))
804        co.modtimesdb.delete(handle, txn=txn)
805        co.filenamesdb.delete(lfile, txn=txn)
806
807    if do_fndb_rebuild:
808        _rebuild_fndb(co, txn)
809
810    delete_dirs.sort()
811    delete_dirs.reverse()
812    for lfile in delete_dirs:
813        try:
814            os.rmdir(path.join(local, lfile))
815        except OSError:
816            print('warning - %s could not be deleted because it is not empty' % \
817                  (lfile,))
818
819    for handle in list(names.keys()):
820        spath = _handle_to_filename(co, handle, names, None)
821        del names[handle]
822        dpath = _handle_to_filename(co, handle, names, None)
823        os.rename(path.join(local, spath), path.join(local, dpath))
824
825    for handle in modified_files:
826        if handle in deletes:
827            continue
828        temppath = path.join(co.temppath, binascii.hexlify(handle))
829        filename = path.join(co.local, _handle_to_filename(co, handle, names, txn))
830        preserving_rename(temppath, filename)
831
832    for handle, rparent, rname in infofiles:
833        assert handle not in deletes
834        info = handle_name(co, handle, txn)
835        file = unique_name(co, info['parent'], _handle_to_filename(co, handle, names, None) + '.info', None)
836        rfile = path.join(handle_to_filename(co, rparent), rname)
837        h = open(path.join(local, file), 'wb')
838        h.write(rfile + '\n')
839        h.close()
840
841    if co.txn_commit(txn):
842        print('Updating local database failed, aborting...')
843        return 1
844
845    print('Update succeeded')
846    return 0
847
848def cli_construct(co, spoint):
849    point = long_id(co, spoint)
850
851    # now create everything at the specified point
852    adds, deletes = handles_in_branch(co, [rootnode], [point], None)[2:4]
853    deletes_dict = {}.fromkeys(deletes)
854    newfiles = []
855    for handle in adds:
856        if handle in deletes_dict:
857            continue
858
859        hfile = fullpath_at_point(co, handle, point, None)
860        htype = bdecode(co.staticdb.get(handle))['type']
861        newfiles.append((hfile, handle, htype))
862
863    newfiles.sort()
864    for hfile, handle, htype in newfiles:
865        print('preparing: %s' % (hfile,))
866        if htype == 'file':
867            cinfo = handle_contents_at_point(co, handle, point, None)
868            temppath = path.join(co.temppath, binascii.hexlify(handle))
869            fd = open(temppath, 'w')
870            fd.write('\n'.join(cinfo['lines']))
871            fd.close()
872
873    # put together a list of all the files we are managing
874    handles = Glob(co, [path.join(co.local, '...')]).db_walk(deletes=0)
875    sheep = []
876    for handle, expanded in handles:
877        hfile = handle_to_filename(co, handle, None)
878        sheep.append((hfile, handle))
879
880    # delete all of them
881    sheep.sort()
882    sheep.reverse()
883    for hfile, handle in sheep:
884        print('removing: %s' % (hfile,))
885        destpath = path.join(co.local, hfile)
886        htype = bdecode(co.staticdb.get(handle))['type']
887        try:
888            if htype == 'dir':
889                os.rmdir(destpath)
890            else:
891                os.unlink(destpath)
892        except OSError as msg:
893            print('warning - %s' % (str(msg),))
894
895    txn = co.txn_begin()
896
897    # clear out whatever we were editing
898    co.editsdb.truncate(txn)
899
900    # rename everything to the right place
901    co.modtimesdb.truncate(txn)
902    co.filenamesdb.truncate(txn)
903    for hfile, handle, htype in newfiles:
904        print('creating: %s' % (hfile,))
905        destpath = path.join(co.local, hfile)
906        if htype == 'dir':
907            try:
908                os.mkdir(destpath)
909            except OSError:
910                if not os.path.isdir(destpath):
911                    raise
912            continue
913
914        elif htype == 'file':
915            temppath = path.join(co.temppath, binascii.hexlify(handle))
916            preserving_rename(temppath, destpath)
917            co.modtimesdb.put(handle, bencode(path.getmtime(destpath)), txn=txn)
918            co.filenamesdb.put(hfile, handle, txn=txn)
919
920    co.linforepo.put('heads', bencode([point]), txn=txn)
921    co.txn_commit(txn)
922
923    return 0
924
925def rebuild(co, uheads):
926    txn = co.txn_begin()
927
928    if uheads == []:
929        heads = bdecode(co.linforepo.get('heads'))
930    else:
931        heads = [long_id(co, head) for head in uheads]
932    try:
933        rebuild_from_points(co, heads, txn)
934    except HistoryError as msg:
935        print('error - ' + str(msg))
936        co.txn_abort(txn)
937        return 1
938
939    co.filenamesdb.truncate(txn)
940    for handle in list(co.modtimesdb.keys()):
941        hinfo = handle_name(co, handle, None)
942        if hinfo is None or 'delete' in hinfo:
943            co.modtimesdb.delete(handle, txn)
944            if handle in co.editsdb:
945                co.editsdb.delete(handle, txn)
946
947        lfile = handle_to_filename(co, handle, txn)
948        co.filenamesdb.put(lfile, handle, txn=txn)
949
950    for handle, value in co.editsdb.items(txn):
951        if 'delete' in bdecode(value):
952            co.editsdb.delete(handle, txn)
953            set_edit(co, handle, {'delete': 1}, txn)
954
955    for handle, value in co.editsdb.items(txn):
956        linfo = bdecode(value)
957
958        merges = []
959        if 'nmerge' in linfo:
960            merges.append('nmerge')
961        if 'cmerge' in linfo:
962            merges.append('cmerge')
963
964        unset_edit(co, handle, merges, txn)
965
966    for i in range(1, len(heads)):
967        named, modified, added, deleted = \
968               handles_in_branch(co, heads[:i], [heads[i]], txn)
969        named    = damerge(named, added, deleted)
970        modified = damerge(modified, added, deleted)
971
972        merge_analyze(co, heads[:i+1], heads[i], named, modified, deleted, txn)
973
974    print('Rebuild done.')
975    co.txn_commit(txn)
976    write_rebuild_version(co.conf_path)
977    return 0
978
979def cli_is_ancestor(co, point1, point2):
980    a, b = long_id(co, point1), long_id(co, point2)
981    if is_ancestor(co, a, b, None):
982        print(point1 + ' is an ancestor of ' + point2)
983        return 0
984    print(point1 + ' is not an ancestor of ' + point2)
985    return 1
986
987def cli_print_dag(co, uheads):
988    if uheads == []:
989        heads = bdecode(co.linforepo.get('heads'))
990    else:
991        heads = [long_id(co, head) for head in uheads]
992    pretty_print_big_dag(co, heads)
993    return 0
994
995def cli_print_mini_dag(co, file, uheads, by_id):
996    if by_id:
997        handle = binascii.unhexlify(file)
998
999    else:
1000        fname = subpath(co.local, file)[1]
1001        handle = filename_to_handle(co, fname)
1002
1003    if uheads == []:
1004        heads = bdecode(co.linforepo.get('heads'))
1005    else:
1006        heads = [long_id(co, head) for head in uheads]
1007    pretty_print_dag(co, handle, heads)
1008    return 0
1009
1010def populate_local_repos(co, ltxn):
1011    if co.linforepo.has_key('heads', ltxn):
1012        return
1013
1014    root = bencode({'precursors': [], 'handles': {roothandle: {'add': {'type': 'dir'}, 'name': ''}}})
1015    head = sha.new(root).digest()
1016    assert head == rootnode
1017    co.lcrepo.put(head, root, txn=ltxn)
1018    co.linforepo.put('heads', bencode([head]), txn=ltxn)
1019    co.linforepo.put('branchmax', bencode(0), txn=ltxn)
1020    co.linforepo.put('lasthandle', bencode(0), txn=ltxn)
1021
1022    sync_history(co, head, ltxn)
1023    return
1024
1025def _list_merge_files(co):
1026    if co.repo is None:
1027        return []
1028
1029    repohead = repo_head(co, co.repo)
1030    if repohead == None:
1031        repohead = rootnode
1032    heads = bdecode(co.linforepo.get('heads'))
1033
1034    named, modified, added, deleted = \
1035           handles_in_branch(co, [repohead], heads, None)
1036    handles = damerge(named, modified, added, deleted)
1037
1038    named, modified, added, deleted = \
1039           Set(named), Set(modified), Set(added), Set(deleted)
1040
1041    files = []
1042    for handle in handles:
1043        mletter, nletter = ' ', ' '
1044
1045        if handle in added:
1046            if handle in deleted:
1047                continue
1048            mletter, nletter = 'A', 'A'
1049
1050        elif handle in deleted:
1051            mletter, nletter = 'D', 'D'
1052
1053        else:
1054            if handle in named:
1055                nletter = 'N'
1056
1057            if handle in modified:
1058                mletter = 'M'
1059
1060        assert not (mletter == ' ' and nletter == ' ')
1061
1062        files.append((handle_to_filename(co, handle), mletter + nletter))
1063
1064    files.sort()
1065    return files
1066
1067class GarbledCommitError(Exception): pass
1068
1069def _commit_helper(co, commit_files):
1070    output = []
1071
1072    output.extend(['', '### Enter comment above', '### Files'])
1073
1074    files, name_map = [], {}
1075    for handle, info in commit_files:
1076        name = handle_to_filename(co, handle)
1077        files.append((name, ''.join(_letters(info))))
1078        name_map[name] = (handle, info)
1079
1080    files.sort()
1081
1082    for name, letters in files:
1083        output.append("%s\t%s" % (letters, name))
1084
1085    files = _list_merge_files(co)
1086    if len(files):
1087        output.append('### Merge files')
1088        for name, letters in files:
1089            output.append('%s\t%s' % (letters, name))
1090
1091    output.append(os.linesep)
1092
1093    fd, fname = tempfile.mkstemp()
1094    fhandle = os.fdopen(fd, 'w+')
1095
1096    out_str = os.linesep.join(output)
1097    out_ustr = out_str.decode('utf8')
1098    out_str = out_ustr.encode(text_encoding)
1099    fhandle.write(out_str)
1100
1101    fhandle.close()
1102
1103    if platform == 'win32':
1104        spawn = os.spawnv
1105        editor = os.environ['WINDIR'] + '\\notepad.exe'
1106    else:
1107        spawn = os.spawnvp
1108        editor = 'vi'
1109
1110    if 'CDVEDITOR' in os.environ:
1111        editor = os.environ['CDVEDITOR']
1112    elif 'EDITOR' in os.environ:
1113        editor = os.environ['EDITOR']
1114
1115    args = editor.split() + [fname]
1116    errored = True
1117    while errored:
1118        errored = False
1119        if spawn(os.P_WAIT, args[0], args):
1120            raise_(CommitError, 'Could not run editor "%s"' % (editor,))
1121
1122        fhandle = open(fname, 'rU')
1123        text = fhandle.read()
1124        fhandle.close()
1125
1126        try:
1127            try:
1128                utext = text.decode(text_encoding)
1129            except UnicodeDecodeError:
1130                raise_(GarbledCommitError, \
1131                      "Invalid %s characters in comment" % (text_encoding,))
1132
1133            lines = utext.encode('utf8').splitlines()
1134
1135            cur_line = 0
1136            try:
1137                while lines[cur_line].startswith('### Error: '):
1138                    lines.pop(cur_line)
1139            except IndexError:
1140                pass
1141
1142            while cur_line < len(lines):
1143                if lines[cur_line] == '### Enter comment above':
1144                    break
1145                cur_line += 1
1146
1147            if cur_line == len(lines):
1148                raise GarbledCommitError("Could not find end of comment")
1149
1150            comment = '\n'.join(lines[:cur_line])
1151
1152            cur_line += 1
1153            if lines[cur_line] != '### Files':
1154                raise GarbledCommitError("Expected '### Files' line after end of comment")
1155
1156            fcommit_files = []
1157
1158            cur_line += 1
1159            while cur_line < len(lines):
1160                line = lines[cur_line]
1161                cur_line += 1
1162                if line.strip() == '':
1163                    continue
1164                if line.startswith('### '):
1165                    break
1166                try:
1167                    tab = line.index('\t')
1168                except ValueError:
1169                    raise_(GarbledCommitError, "Bad commit file line:\n%s" % \
1170                          (line[:].rstrip(),))
1171                name = line[tab+1:].rstrip()
1172                fcommit_files.append(name_map[name])
1173
1174        except GarbledCommitError as msg:
1175            error = msg.args[0]
1176            errored = True
1177
1178        if errored:
1179            answer = input("Error: %s\nReturn to editor? [Y/n]: " % (error,))
1180            if answer == 'n':
1181                raise_(CommitError, msg)
1182
1183            output = ['### Error: %s' % (line,) for line in error.split('\n')]
1184            out_str = os.linesep.join(output)
1185            out_ustr = out_str.decode('utf8')
1186            out_str = out_ustr.encode(text_encoding)
1187
1188            fhandle = open(fname, 'w')
1189            fhandle.write(out_str)
1190            fhandle.write(text)
1191            fhandle.close()
1192
1193    os.remove(fname)
1194
1195    return fcommit_files, comment
1196
1197def commit(co, remote, comment, tstamp=None, backup=False, files=list()):
1198    try:
1199        if remote is None:
1200            if co.user is None:
1201                co.user = co.varsdb.get('user')
1202            if co.user is None:
1203                raise NetworkError('You must set the "user" variable')
1204            repohead = None
1205        else:
1206            network_prep(co)
1207            repohead = repo_head(co, co.repo)
1208            if repohead is None:
1209                repohead = rootnode
1210    except NetworkError as msg:
1211        print(msg)
1212        return 1
1213
1214    co.handle_name_cache = {}
1215
1216    ltxn = co.txn_begin()
1217    mark_modified_files(co, ltxn)
1218    co.txn_commit(ltxn)
1219
1220    try:
1221        if files == []:
1222            handles = [(handle, 0) for handle in list(co.editsdb.keys())]
1223        else:
1224            handles = Glob(co, files).db_walk(deletes=1)
1225
1226        commit_files = find_commit_files(co, handles)
1227
1228        # get the comment from the user if it was supplied on the command line
1229        if not backup:
1230            if comment is None:
1231                commit_files, comment = _commit_helper(co, commit_files)
1232
1233            # clean up the comment a bit
1234            comment = comment.rstrip()
1235            if comment == '' and not co.nopass:
1236                print('No comment given, aborting.')
1237                return 1
1238            comment = comment + '\n'
1239
1240    except CommitError as msg:
1241        print('commit failed: ' + str(msg))
1242        return 1
1243
1244    # create and verify the changeset
1245    ltxn = co.txn_begin()
1246
1247    point = None
1248    try:
1249        if not backup:
1250            point = gen_changeset(co, commit_files, comment, repohead, ltxn, tstamp=tstamp)
1251    except HistoryError as msg:
1252        co.txn_abort(ltxn)
1253        print('error - ' + str(msg))
1254        return 1
1255
1256    if point is not None:
1257        try:
1258            sync_history(co, point, ltxn)
1259        except HistoryError as msg:
1260            print('commit failed: ' + str(msg))
1261            print('THIS IS REALLY BAD!!!')
1262            co.txn_abort(ltxn)
1263            return 1
1264    else:
1265        precursors = bdecode(co.linforepo.get('heads', txn=ltxn))
1266        point = precursors[0]
1267
1268        if len(precursors) > 1 and backup is True:
1269            print('error - cannot use backup flag when merging')
1270            co.txn_abort(ltxn)
1271            return 1
1272
1273    if remote is not None:
1274        ch = ClientHandler(co)
1275
1276        try:
1277            s = authenticate(co, ch, remote, ltxn)
1278        except NetworkError as msg:
1279            co.txn_abort(ltxn)
1280            print('commit failed: ' + str(msg))
1281            return 1
1282        except ServerError as msg:
1283            co.txn_abort(ltxn)
1284            print('commit failed: ' + str(msg))
1285            return 1
1286
1287        try:
1288            ch.commit(s, remote[2], point, ltxn)
1289        except NetworkError as msg:
1290            co.txn_abort(ltxn)
1291            print('commit failed: ' + str(msg))
1292            return 1
1293        except ServerError as msg:
1294            ch.close(s)
1295            co.txn_abort(ltxn)
1296            print('commit failed: ' + str(msg))
1297            return 1
1298
1299        ch.close(s)
1300        ch = None
1301        co.linforepo.put(tuple_to_server(remote), point, txn=ltxn)
1302
1303    co.txn_commit(ltxn)
1304    print('commit succeeded')
1305    return 0
1306
1307def create_repo(co, remote):
1308    try:
1309        network_prep(co)
1310    except NetworkError as msg:
1311        print(msg)
1312        return 1
1313
1314    ch = ClientHandler(co)
1315
1316    txn = co.txn_begin()
1317    try:
1318        s = authenticate(co, ch, remote, txn)
1319        co.txn_commit(txn)
1320    except NetworkError as msg:
1321        co.txn_abort(txn)
1322        print('creation failed: ' + str(msg))
1323        return 1
1324    except ServerError as msg:
1325        co.txn_abort(txn)
1326        print('creation failed: ' + str(msg))
1327        return 1
1328
1329    try:
1330        ch.create_repo(s, remote[2])
1331    except NetworkError as msg:
1332        print('creation failed: ' + str(msg))
1333        return 1
1334    except ServerError as msg:
1335        print('creation failed: ' + str(msg))
1336        retval = 1
1337    else:
1338        print('creation succeeded')
1339        retval = 0
1340
1341    ch.close(s)
1342    return retval
1343
1344def remove_repo(co, remote):
1345    try:
1346        network_prep(co)
1347    except NetworkError as msg:
1348        print(msg)
1349        return 1
1350
1351    ch = ClientHandler(co)
1352
1353    txn = co.txn_begin()
1354    try:
1355        s = authenticate(co, ch, remote, txn)
1356        co.txn_commit(txn)
1357    except NetworkError as msg:
1358        co.txn_abort(txn)
1359        print('destroy failed: ' + str(msg))
1360        return 1
1361    except ServerError as msg:
1362        co.txn_abort(txn)
1363        print('destroy failed: ' + str(msg))
1364        return 1
1365
1366    try:
1367        ch.remove_repo(s, remote[2])
1368    except NetworkError as msg:
1369        print('destroy failed: ' + str(msg))
1370        return 1
1371    except ServerError as msg:
1372        print('destroy failed: ' + str(msg))
1373        retval = 1
1374    else:
1375        print('destroy succeeded')
1376        retval = 0
1377
1378    ch.close(s)
1379    return retval
1380
1381def list_repos(co):
1382    try:
1383        network_prep(co)
1384    except NetworkError as msg:
1385        print(msg)
1386        return 1
1387
1388    ch = ClientHandler(co)
1389
1390    txn = co.txn_begin()
1391    try:
1392        s = authenticate(co, ch, server_to_tuple(co.repo), txn)
1393        co.txn_commit(txn)
1394    except NetworkError as msg:
1395        co.txn_abort(txn)
1396        print('list failed: ' + str(msg))
1397        return 1
1398    except ServerError as msg:
1399        co.txn_abort(txn)
1400        print('list failed: ' + str(msg))
1401        return 1
1402
1403    try:
1404        rlist = ch.list_repos(s)
1405    except NetworkError as msg:
1406        print('list failed: ' + str(msg))
1407        return 1
1408    except ServerError as msg:
1409        print('list failed: ' + str(msg))
1410        ch.close(s)
1411        return 1
1412
1413    ch.close(s)
1414    print('Server has the following repositories:\n\t', end=' ')
1415    print('\n\t'.join(rlist))
1416    return 0
1417
1418def _letters(value):
1419    if 'add' in value:
1420        return ['A', 'A']
1421    if 'delete' in value:
1422        return ['D', 'D']
1423    letters = [' ', ' ']
1424    if 'hash' in value:
1425        letters[0] = 'M'
1426    if 'name' in value:
1427        letters[1] = 'N'
1428    return letters
1429
1430def describe(co, point, short, xml, dodiff, files):
1431    point = long_id(co, point)
1432
1433    if xml:
1434        try:
1435            print(dump_changeinfo(co, point))
1436        except ValueError:
1437            print('error - XML can only be written for clean merges.')
1438            return 1
1439        return 0
1440
1441    cset = bdecode(co.lcrepo.get(point))
1442    _print_change(co, point, cset, not short)
1443
1444    if dodiff and point != rootnode:
1445        return diff(co, [short_id(co, cset['precursors'][0]), short_id(co, point)], files, True)
1446
1447    return 0
1448
1449def status(co, files, verbose):
1450    ltxn = co.txn_begin()
1451    mark_modified_files(co, ltxn)
1452    co.txn_commit(ltxn)
1453
1454    # go do the stuff
1455    if verbose:
1456        try:
1457            plist = _status_verbose(co, files)
1458        except re.error as msg:
1459            print('error - bad ignore list: %s' % (str(msg),))
1460            return 1
1461
1462    else:
1463        plist = _status(co, files)
1464
1465    # print the list of modified files
1466    if len(plist):
1467        print('### Files')
1468        plist.sort()
1469        olist = []
1470        for value in plist:
1471            olist.append(value[1][0] + value[1][1] + '\t' + value[0])
1472        print(os.linesep.join(olist))
1473
1474    # print the list of merged files
1475    plist = _list_merge_files(co)
1476    if len(plist):
1477        print('### Merge files')
1478        for name, letters in plist:
1479            print(letters + '\t' + name)
1480
1481    return 0
1482
1483def _status(co, files):
1484    # collect info on editted files
1485    ed_set = Set(list(co.editsdb.keys()))
1486
1487    # no args means show all editted files
1488    if files == []:
1489        db_set = ed_set
1490    else:
1491        db_set = Set([handle for handle, expanded in Glob(co, files).db_walk(deletes=1)])
1492
1493    # print the ones we care about
1494    plist = []
1495    for handle in (db_set & ed_set):
1496        file = handle_to_filename(co, handle)
1497        info = bdecode(co.editsdb.get(handle))
1498        plist.append((file, _letters(info)))
1499
1500    return plist
1501
1502def _status_verbose(co, files):
1503    # Read ignore patterns
1504    cpats = ignore_patterns(co)
1505
1506    # no args means search the whole client
1507    if files == []:
1508        files.append(path.join(co.local, '...'))
1509
1510    glob = Glob(co, files)
1511
1512    # do the filename expansion
1513    fs_set = Set([file for file, expanded in glob.fs_walk()])
1514
1515    # get the list of files we manage
1516    db_set = Set([handle_to_filename(co, handle) for handle, expanded in glob.db_walk(deletes=1)])
1517
1518    # collect info on editted files
1519    ed_set, de_set = Set(), Set()
1520    for handle, value in list(co.editsdb.items()):
1521        file = handle_to_filename(co, handle)
1522        if 'delete' in bdecode(value):
1523            de_set.add(file)
1524        ed_set.add(file)
1525
1526    plist = []
1527
1528    # record unmanaged files
1529    for file in (fs_set - db_set):
1530        ignore = 0
1531        for cpat in cpats:
1532            if cpat.search(file) is not None:
1533                ignore = 1
1534                break
1535
1536        if ignore:
1537            continue
1538
1539        plist.append((file, ['?', '?']))
1540
1541    # record files inconsistent in the filesystem
1542    for file in (db_set - fs_set - de_set):
1543        plist.append((file, ['!', '!']))
1544
1545    # record all the modified files
1546    for file in ((ed_set & db_set) - (db_set - fs_set - de_set)):
1547        handle = filename_to_handle(co, file, deletes=1)
1548        hinfo = bdecode(co.editsdb.get(handle))
1549        plist.append((file, _letters(hinfo)))
1550
1551    return plist
1552
1553def cli_heads(co):
1554    heads = bdecode(co.linforepo.get('heads'))
1555    pheads = [short_id(co, head) for head in heads]
1556    print(', '.join(pheads))
1557    return 0
1558
1559def cli_last_modified(co, lname, uhead, by_id):
1560    if by_id:
1561        ohandle = binascii.unhexlify(lname)
1562
1563    else:
1564        try:
1565            lfile, fname = subpath(co.local, lname)
1566        except ValueError:
1567            print('error - ' + lname + ' is outside repository')
1568            return 1
1569        ohandle = filename_to_handle(co, fname)
1570        if ohandle is None:
1571            print('error - ' + fname + ' is not in repository')
1572            return 1
1573
1574    repohead = None
1575    if uhead is None:
1576        repohead = repo_head(co, co.repo)
1577        if repohead is None:
1578            heads = bdecode(co.linforepo.get('heads'))
1579            repohead = heads[0]
1580    else:
1581        repohead = long_id(co, uhead)
1582    point = handle_last_modified(co, co.contents, ohandle, repohead, None)
1583    print(short_id(co, point))
1584
1585    return 0
1586
1587def ignore_patterns(co):
1588    patterns = []
1589    try:
1590        fd = open(path.join(co.conf_path, 'ignore'), 'rU')
1591        patterns = fd.readlines()
1592        fd.close()
1593    except IOError:
1594        pass
1595
1596    # compile all the patterns and ensure they match full paths
1597    return [re.compile('^%s$' % pat.strip()) for pat in patterns]
1598
1599def diff(co, revs, files, print_new):
1600
1601    OK       = 0
1602    NEW_FILE = 1
1603    DELETED  = 2
1604    MISSING  = 3
1605
1606    def file_lines(handle, rev, lfile):
1607        if rev == 'local':
1608            linfo = handle_name(co, handle, None)
1609            if linfo is None:
1610                return (NEW_FILE, [''])
1611
1612            if 'delete' in linfo:
1613                return (DELETED, [''])
1614
1615            try:
1616                h = open(path.join(co.local, lfile), 'rb')
1617                lines = h.read().split('\n')
1618                h.close()
1619            except IOError:
1620                return (MISSING, [''])
1621
1622        else:
1623            linfo = handle_name_at_point(co, handle, rev, None)
1624
1625            if linfo is None:
1626                return (NEW_FILE, [''])
1627
1628            if 'delete' in linfo:
1629                return (DELETED, [''])
1630
1631            pinfo = handle_contents_at_point(co, handle, rev, None)
1632            lines = pinfo['lines']
1633
1634        return (OK, lines)
1635
1636    def print_format(error, lfile):
1637        if   error == OK:
1638            return (1, lfile, None)
1639        elif error == NEW_FILE:
1640            return (0, '(new file)', 'File "%s" added.')
1641        elif error == DELETED:
1642            return (0, '(deleted)', 'File "%s" deleted.')
1643        elif error == MISSING:
1644            print('WARNING - File not found: ' + lfile)
1645            return (0, '(File not found!)', None)
1646
1647        assert 0
1648        return
1649
1650    co.handle_name_cache = {}
1651    ltxn = co.txn_begin()
1652    mark_modified_files(co, ltxn)
1653    co.txn_commit(ltxn)
1654    editsdb = co.editsdb
1655
1656    pathfunc, patharg = [], []
1657    heads = bdecode(co.linforepo.get('heads'))
1658    for i in range(len(revs)):
1659        if revs[i] == 'repo':
1660            revs[i] = repo_head(co, co.repo)
1661            pathfunc.append(fullpath_at_point)
1662            patharg.append(revs[i])
1663
1664        elif revs[i] == 'local':
1665            pathfunc.append(handle_to_filename)
1666            patharg.append(None)
1667
1668        else:
1669            if revs[i] is None:
1670                revs[i] = heads[0]
1671            else:
1672                revs[i] = long_id(co, revs[i])
1673
1674            pathfunc.append(fullpath_at_point)
1675            patharg.append(revs[i])
1676
1677    branch = []
1678    for i in range(len(revs)):
1679        if revs[i] == 'local':
1680            branch.append(heads)
1681        else:
1682            branch.append([revs[i]])
1683
1684    if files == []:
1685        named, modified, added, deleted = \
1686               handles_in_branch(co, branch[0], branch[1], None)
1687        names2, modified2, added2, deleted2 = \
1688                handles_in_branch(co, branch[1], branch[0], None)
1689        handles = dmerge(modified, modified2)
1690
1691        if print_new:
1692            handles = damerge(handles, added, deleted, added2, deleted2)
1693
1694        if revs[0] == 'local' or revs[1] == 'local':
1695            handles = dmerge(handles, list(editsdb.keys()))
1696
1697    else:
1698        handles = []
1699        for handle, expanded in Glob(co, files).db_walk():
1700            if expanded and handle not in editsdb:
1701                continue
1702            handles.append(handle)
1703
1704    diffprog, diffpath = None, None
1705    if 'CDVDIFF' in os.environ:
1706        cdvdiff = shlex.split(os.environ['CDVDIFF'])
1707        if cdvdiff != []:
1708            diffargs = cdvdiff
1709            diffprog = diffargs[0]
1710            if platform == 'win32':
1711                # windows inteprets the argument, excitement abounds
1712                diffargs = ['"%s"' % (arg,) for arg in diffargs]
1713            diffpath = tempfile.mkdtemp('', 'cdv-')
1714            fd = open(path.join(diffpath, 'holder'), 'a')
1715            fd.close()
1716
1717    hlist = []
1718    for handle in handles:
1719        hlist.append((pathfunc[0](co, handle, patharg[0]),
1720                      pathfunc[1](co, handle, patharg[1]),
1721                      handle))
1722    hlist.sort()
1723
1724    if platform == 'win32':
1725        spawn = os.spawnv
1726    else:
1727        spawn = os.spawnvp
1728
1729    retval = 0
1730    for pre_lfile, lfile, handle in hlist:
1731        #linfo = bdecode(co.staticdb.get(handle))
1732        linfo = db_get(co, co.staticdb, handle, None)
1733        if linfo['type'] != 'file':
1734            continue
1735
1736        error, pre_lines = file_lines(handle, revs[0], pre_lfile)
1737        printable, pre_lfile, msg0 = print_format(error, pre_lfile)
1738
1739        error, lines = file_lines(handle, revs[1], lfile)
1740        printable2, lfile, msg1 = print_format(error, lfile)
1741
1742        printable += printable2
1743        if printable == 0:
1744            continue
1745
1746        if printable == 1 and not print_new:
1747            if msg0 is not None:
1748                print(msg0 % lfile)
1749            if msg1 is not None:
1750                print(msg1 % pre_lfile)
1751            continue
1752
1753        if diffprog:
1754            file1 = path.join(diffpath, 'old', pre_lfile)
1755            file2 = path.join(diffpath, 'new', lfile)
1756
1757            os.makedirs(path.split(file1)[0])
1758            foo = open(file1, 'w+')
1759            foo.write('\n'.join(pre_lines))
1760            foo.close()
1761
1762            os.makedirs(path.split(file2)[0])
1763            foo = open(file2, 'w+')
1764            foo.write('\n'.join(lines))
1765            foo.close()
1766
1767            fileargs = [file1, file2]
1768            if platform == 'win32':
1769                fileargs = ['"%s"' % (arg) for arg in fileargs]
1770            args = diffargs + fileargs
1771
1772            try:
1773                ret = spawn(os.P_WAIT, diffprog, args)
1774            except OSError:
1775                ret = 127
1776
1777            os.remove(file1)
1778            os.removedirs(path.split(file1)[0])
1779            os.remove(file2)
1780            os.removedirs(path.split(file2)[0])
1781            if ret == 127:
1782                print("error - Could not run diff program specified by CDVDIFF")
1783                retval = 1
1784                break
1785
1786        else:
1787            print('--- ' + pre_lfile)
1788            print('+++ ' + lfile)
1789            # the diff code assumes \n after each line, not between lines
1790            if pre_lines[-1] == '':
1791                pre_lines.pop()
1792            if lines[-1] == '':
1793                lines.pop()
1794            stdout.write(unified_diff(pre_lines, lines))
1795
1796    if diffpath is not None:
1797        os.unlink(path.join(diffpath, 'holder'))
1798        os.rmdir(diffpath)
1799    return retval
1800
1801def _comment_compress(comment):
1802    try:
1803        offset = comment.index('\n')
1804        comment = comment[:offset]
1805    except ValueError:
1806        pass
1807
1808    if len(comment) > 76:
1809        comment = comment[:73] + '...'
1810
1811    return comment
1812
1813def _print_change(co, point, pinfo, v, owner=None, time=None):
1814    if not v:
1815        print('Change %s' % (short_id(co, point),), end=' ')
1816        if owner is not None:
1817            print('(%s)' % (owner,), end=' ')
1818        print('by ' + pinfo['user'], end=' ')
1819        if time is not None:
1820            print('on ' + ctime(time))
1821        elif 'time' in pinfo:
1822            print('on ' + ctime(pinfo['time']))
1823        if 'comment' in pinfo:
1824            print('"' + _comment_compress(pinfo['comment']) + '"')
1825
1826    else:
1827        print('### Change: ' + binascii.hexlify(point))
1828        print('### Short change: ' + short_id(co, point))
1829        if owner is not None:
1830            print('### Commit change: ' + owner)
1831
1832        if pinfo['precursors'] != []:
1833            print('### Precursors:', end=' ')
1834            ps = []
1835            for p in pinfo['precursors']:
1836                ps.append(short_id(co, p))
1837            print(', '.join(ps))
1838
1839        if 'user' in pinfo:
1840            print('### User: ' + pinfo['user'])
1841
1842        if time is not None:
1843            print('### Date: ' + ctime(time))
1844        elif 'time' in pinfo:
1845            print('### Date: ' + ctime(pinfo['time']))
1846
1847        if 'comment' in pinfo:
1848            print('### Comment')
1849            print(pinfo['comment'].rstrip())
1850
1851        plist = []
1852        for handle, value in list(pinfo['handles'].items()):
1853            if handle == roothandle:
1854                continue
1855            plist.append((fullpath_at_point(co, handle, point), _letters(value)))
1856        if len(plist):
1857            print('### Files')
1858            plist.sort()
1859            olist = []
1860            for value in plist:
1861                olist.append(value[1][0] + value[1][1] + '\t' + value[0])
1862            print(os.linesep.join(olist))
1863
1864    return
1865
1866def _history_increment(co, handles, precursors, changes):
1867    for handle in handles:
1868        for pre in precursors:
1869            change = handle_last_modified(co, co.contents, handle, pre, None)
1870            if change is not None:
1871                changes.setdefault(change, {})[handle] = 1
1872    return
1873
1874def _history_deps(node, args):
1875    co, cutoffs = args[0], args[1]
1876    if len(cutoffs) and _is_ancestor(co, node, cutoffs, None):
1877        return []
1878    cset = bdecode(co.lcrepo.get(node))
1879    return cset['precursors']
1880
1881def _owner_deps(node, args):
1882    co, limit = args[0], args[1]
1883
1884    # heuristic shortcut
1885    limit -= 1
1886    if limit == 0:
1887        return []
1888    args[1] = limit
1889
1890    cset = bdecode(co.lcrepo.get(node))
1891    try:
1892        return [cset['precursors'][0]]
1893    except IndexError:
1894        pass
1895    return []
1896
1897def history(co, head, limit, skip, v, by_id, files):
1898    repohead = None
1899    heads = None
1900    if head is not None:
1901        heads = [head]
1902    else:
1903        heads = bdecode(co.linforepo.get('heads'))
1904        repohead = repo_head(co, co.repo)
1905        if repohead is None:
1906            if co.repo is not None:
1907                repohead = rootnode
1908                heads.insert(0, repohead)
1909        else:
1910            heads.insert(0, repohead)
1911
1912    head = heads[0]
1913
1914    # the repository head may have more than we have merged locally
1915    if repohead is not None:
1916        while not _is_ancestor(co, head, heads, None):
1917            head = bdecode(co.lcrepo.get(head))['precursors'][0]
1918
1919    # make an initial list of points to print based on user-specified files
1920    owner_cutoff = limit + skip
1921    changes = None
1922    if files != []:
1923        changes = {}
1924        if by_id:
1925            handles = [binascii.unhexlify(handle) for handle in files]
1926        else:
1927            handles = [handle for handle, expanded in Glob(co, files).db_walk()]
1928        _history_increment(co, handles, heads, changes)
1929        # the heuristic breaks if we're not printing everything
1930        owner_cutoff = -1
1931
1932    # get a list of the clean merge heads for this repository
1933    dfs = DFS(_owner_deps, [co, owner_cutoff + 1])
1934    dfs.search(head)
1935    owners = dfs.result()
1936
1937    # sort all the history points in reverse print order
1938    cutoffs = []
1939    if owners[0] != rootnode:
1940        cutoffs.append(owners[0])
1941    dfs = DFS(_history_deps, [co, cutoffs])
1942    dfs.search(rootnode)
1943    for head in heads:
1944        dfs.search(head)
1945    ordering = dfs.result()
1946    ordering.reverse()
1947    # pop off the root node
1948    assert ordering[-1] == rootnode
1949    ordering.pop()
1950
1951    owner = 'local'
1952    time = None
1953    for point in ordering:
1954        hinfo = bdecode(co.lcrepo.get(point))
1955
1956        clean = False
1957        if clean_merge_point(hinfo):
1958            clean = True
1959
1960        if point == owners[-1]:
1961            # committed change, but we don't know the server merge change
1962            owners.pop()
1963            time  = hinfo['time']
1964            owner = '----'
1965
1966            if clean:
1967                # this is the server merge change
1968                owner = short_id(co, point)
1969                assert changes is None or point not in changes
1970                continue
1971
1972        # only display if it's not a clean merge and it was asked for
1973        if clean:
1974            continue
1975
1976        if changes is not None and point not in changes:
1977            continue
1978
1979        # figure out the next set of changes to print
1980        if changes is not None and point in changes:
1981            _history_increment(co, list(changes[point].keys()), hinfo['precursors'],
1982                               changes)
1983            del changes[point]
1984
1985        if skip > 0:
1986            skip -= 1
1987            continue
1988
1989        if limit == 0:
1990            break
1991        limit -=1
1992
1993        _print_change(co, point, hinfo, v, owner=owner, time=time)
1994        if v:
1995            print('-' * 78, end=' ')
1996        print()
1997
1998    return 0
1999
2000def revert(co, files, unmod_flag):
2001    co.handle_name_cache = {}
2002    txn = co.txn_begin()
2003    mark_modified_files(co, txn)
2004
2005    heads = bdecode(co.linforepo.get('heads'))
2006    editsdb = co.editsdb
2007    #modified, names, deletes, newhandles = [], {}, {}, {}
2008    modified = []
2009    for handle, expanded in Glob(co, files).db_walk(deletes=1):
2010        filename = handle_to_filename(co, handle, txn)
2011        filepath = path.join(co.local, filename)
2012
2013        editted  = False
2014        exists   = path.exists(filepath)
2015
2016        if editsdb.has_key(handle, txn):
2017            info = bdecode(editsdb.get(handle, txn=txn))
2018            if     'add' in info or \
2019                   'name' in info or \
2020                   'delete' in info:
2021                print('warning - cannot revert name operation on %s' % (filename,))
2022
2023            # XXX: hack until reverts on name ops work
2024            if 'add' in info:
2025                exists = True
2026
2027            elif 'hash' in info:
2028                editted = True
2029
2030        elif exists:
2031            if not expanded:
2032                file = handle_to_filename(co, handle, txn)
2033                print('warning - %s is not opened for edit' % (filename,))
2034            continue
2035
2036        sinfo = bdecode(co.staticdb.get(handle, txn=txn))
2037        if sinfo['type'] == 'file' and (editted or not exists):
2038            file_points = []
2039            for point in heads:
2040                linfo = handle_contents_at_point(co, handle, point, None)
2041                if linfo is None:
2042                    continue
2043                file_points.append((linfo['lines'], linfo['line points'], linfo['points']))
2044
2045            # XXX: hack until merge-through-conflict code is done
2046            if len(file_points) == 2:
2047                local = file_points[0]
2048                remote = file_points[1]
2049                lines = find_conflict(local[0],  local[1],  local[2],
2050                                      remote[0], remote[1], remote[2])
2051
2052            else:
2053                lines = find_conflict_multiple_safe(file_points)[0]
2054
2055            if lines is None:
2056                print('error - cannot revert %s' % (filename,))
2057                co.txn_abort(txn)
2058                return 1
2059            #modified.append((handle, linfo))
2060
2061            ls, conflict = [], 0
2062            for l in lines:
2063                if type(l) is str:
2064                    ls.append(l)
2065                else:
2066                    conflict = 1
2067                    ls.append('<<<<<<< local')
2068                    ls.extend(l[0])
2069                    ls.append('=======')
2070                    ls.extend(l[1])
2071                    ls.append('>>>>>>> remote')
2072
2073            if unmod_flag:
2074                if not exists:
2075                    continue
2076
2077                h = open(filepath, 'rb')
2078                contents = h.read()
2079                h.close()
2080                if '\n'.join(ls) == contents:
2081                    unset_edit(co, handle, ['hash'], txn)
2082                    co.modtimesdb.put(handle, bencode(path.getmtime(filepath)), txn=txn)
2083                    print('reverting: %s' % (filename,))
2084                continue
2085
2086            if not conflict and editted:
2087                unset_edit(co, handle, ['hash'], txn)
2088            hfile = path.join(co.temppath, binascii.hexlify(handle))
2089            h = open(hfile, 'wb')
2090            h.write('\n'.join(ls))
2091            h.close()
2092            modified.append((handle, filepath))
2093            print('reverting: %s' % (filename,))
2094
2095    # XXX: use update code
2096
2097    for handle, filename in modified:
2098        temppath = path.join(co.temppath, binascii.hexlify(handle))
2099        destpath = path.join(co.local, filename)
2100        preserving_rename(temppath, destpath)
2101        co.modtimesdb.put(handle, bencode(path.getmtime(destpath)), txn=txn)
2102
2103    co.txn_commit(txn)
2104    print('revert succeeded')
2105    return 0
2106
2107def annotate(co, rev, files):
2108    """Print each line of files with information on the last modification"""
2109    if rev == 'repo':
2110        rev = repo_head(co, co.repo)
2111    elif rev != 'local':
2112        rev = long_id(co, rev)
2113
2114    if len(files) == 0:
2115        files = ['...']
2116
2117    if rev == 'local':
2118        precursors = bdecode(co.linforepo.get('heads'))
2119
2120        repohead = repo_head(co, co.repo)
2121        if repohead is None:
2122            repohead = rootnode
2123
2124        if repohead not in precursors:
2125            while not _is_ancestor(co, repohead, precursors, None):
2126                info = bdecode(co.lcrepo.get(repohead))
2127                try:
2128                    repohead = info['precursors'][0]
2129                except IndexError:
2130                    repohead = rootnode
2131
2132            if repohead not in precursors:
2133                precursors.insert(0, repohead)
2134
2135    cache = {}
2136    for handle, expanded in Glob(co, files).db_walk(deletes=1):
2137        sinfo = bdecode(co.staticdb.get(handle))
2138        if sinfo['type'] == 'file':
2139            if rev == 'local':
2140                filename = handle_to_filename(co, handle)
2141                file_points = []
2142                pres = simplify_precursors(co, handle, co.contents, precursors, None)[0]
2143                for pre, index in pres:
2144                    info = handle_contents_at_point(co, handle, pre, None, replayfunc=merge.annotate)
2145                    file_points.append((info['lines'], info['line points'], info['points']))
2146                lfile = path.join(co.local, filename)
2147                try:
2148                    h = open(lfile, 'rb')
2149                except IOError:
2150                    print('error - cannot open %s' % (filename,))
2151                    return 1
2152                lines = h.read().split('\n')
2153                h.close()
2154
2155                lpoints = find_annotation(file_points, lines)
2156            else:
2157                filename = fullpath_at_point (co, handle, rev)
2158                cinfo = handle_contents_at_point(co, handle, rev, None, replayfunc=merge.annotate)
2159                if cinfo is None or 'delete' in cinfo:
2160                    if expanded:
2161                        continue
2162                    print('error - cannot find %s' % (filename,))
2163                    return 1
2164
2165                lines = cinfo['lines']
2166                lpoints = cinfo['line points']
2167
2168            print('Annotations for %s' % (filename,), file=stderr)
2169            print('***************', file=stderr)
2170            for i in range(len(lines)):
2171                point = lpoints[i]
2172                if point is None:
2173                    lrev = 'local'
2174                    user = '-----'
2175                    time = '-----'
2176
2177                elif point in cache:
2178                    lrev, user, time = cache[point]
2179
2180                else:
2181                    lrev = short_id(co, point)
2182                    pinfo = bdecode(co.lcrepo.get(point))
2183                    user = pinfo['user']
2184                    if 'time' in pinfo:
2185                        time = strftime('%Y-%m-%d', localtime(pinfo['time']))
2186                    else:
2187                        time = '-----'
2188
2189                    cache[point] = (lrev, user, time)
2190
2191                print('%s (%s %s):' % (lrev, user, time,), lines[i])
2192
2193    return 0
2194
2195class PathError(Exception):
2196    pass
2197
2198def find_co(local, metadata_dir='.cdv'):
2199    if not path.exists(local):
2200        raise_(Exception, 'path ' + local + ' does not exist')
2201    while not path.exists(path.join(local, metadata_dir)):
2202        parent = path.split(local)[0]
2203        if local == parent:
2204            raise PathError('unable to find checkout, use "cdv init" to create one')
2205        local = parent
2206    return local
2207
2208#try:
2209#    import psyco
2210#    psyco.bind(diff, 0)
2211#except ImportError:
2212#    pass
2213
2214# Everything below here is for testing
2215
2216def file_contents(file):
2217    h = open(file, 'rb')
2218    contents = h.read()
2219    h.close()
2220    return contents
2221
2222def set_file_contents(file, contents):
2223    h = open(file, 'wb')
2224    h.write(contents)
2225    h.close()
2226
2227def append_file_contents(file, contents):
2228    h = open(file, 'ab')
2229    h.write(contents)
2230    h.close()
2231
2232def rm_rf(files):
2233    for local in files:
2234        mode = os.lstat(local).st_mode
2235        if stat.S_ISDIR(mode):
2236            for lamb in os.listdir(local):
2237                rm_rf([path.join(local, lamb)])
2238            os.rmdir(local)
2239        elif stat.S_ISREG(mode):
2240            os.unlink(local)
2241
2242sh = None
2243sht = None
2244ag = None
2245agt = None
2246
2247def init_test(local, remote, remote2):
2248    global sh, sht
2249    global ag, agt
2250    if path.exists(local):
2251        rm_rf([local])
2252    os.makedirs(path.join(local, 'repo'))
2253    set_file_contents(path.join(local, 'repo', 'codeville_repository'), '')
2254
2255    from .passwd import Passwd
2256    pw = Passwd(path.join(local, 'repo', 'passwd'), create=1)
2257    pw.add('unittest', '')
2258    pw.add('unittest2', '')
2259
2260    from configparser import ConfigParser
2261    sconfig = ConfigParser()
2262    sconfig.add_section('control')
2263    sconfig.set('control', 'backup', 'False')
2264    sconfig.set('control', 'datadir', path.join(local, 'repo'))
2265    sconfig.add_section('post-commit')
2266
2267    sh = ServerHandler(sconfig)
2268    sh.bind(remote[1])
2269    sh.db_init(init=True)
2270    sht = Thread(target = sh.listen_forever, args = [])
2271    sht.start()
2272
2273    from .agent import Agent
2274    ag = Agent()
2275    auth_path = tempfile.mkdtemp('', 'cdv-')
2276    auth_file = path.join(auth_path, 'agent.test')
2277    ag.listen_sock(auth_path, auth_file)
2278    agt = Thread(target = ag.listen, args = [])
2279    agt.start()
2280
2281    os.makedirs(path.join(local, 'co'))
2282    co = Checkout(path.join(local, 'co'), init=True)
2283    txn = co.txn_begin()
2284    co.varsdb.put('user', 'unittest', txn=txn)
2285    co.txn_commit(txn)
2286
2287    os.makedirs(path.join(local, 'co2'))
2288    co2 = Checkout(path.join(local, 'co2'), init=True)
2289    txn = co2.txn_begin()
2290    co2.varsdb.put('user', 'unittest2', txn=txn)
2291    co2.txn_commit(txn)
2292
2293    co.nopass = co2.nopass = 1
2294    co.repo = co2.repo = tuple_to_server(remote)
2295    create_repo(co, remote)
2296    create_repo(co, remote2)
2297    return co, co2
2298
2299def shutdown_test(local, cos):
2300    if sh is not None:
2301        sh.rs.doneflag.set()
2302        sh.rs.start_connection(('localhost', 6602))
2303        sh.shutdown.wait()
2304        sh.close()
2305
2306    from errno import ECONNRESET
2307    import socket
2308    if ag is not None:
2309        ag.shutdown_flag = 1
2310        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
2311        sock.connect(ag.auth_file)
2312        try:
2313            sock.recv(1)
2314        except socket.error as reason:
2315            assert reason[0] == ECONNRESET
2316        else:
2317            sock.close()
2318
2319    for co in cos:
2320        co.close()
2321
2322def reset_co(co):
2323    txn = co.txn_begin()
2324    co.linforepo.put('heads', bencode([rootnode]), txn=txn)
2325    co.editsdb.truncate(txn=txn)
2326    co.modtimesdb.truncate(txn=txn)
2327    co.filenamesdb.truncate(txn=txn)
2328    co.txn_commit(txn)
2329    co.handle_name_cache = {}
2330    for lamb in os.listdir(co.local):
2331        if lamb == '.cdv':
2332            continue
2333        rm_rf([path.join(co.local, lamb)])
2334
2335def reset_test(co, co2, remote, remote2=None):
2336    remove_repo(co, remote)
2337    create_repo(co, remote)
2338    if remote2:
2339        remove_repo(co, remote2)
2340        create_repo(co, remote2)
2341    reset_co(co)
2342    reset_co(co2)
2343
2344def test_client():
2345    global ServerHandler, Thread
2346    from .server import ServerHandler
2347    from threading import Thread
2348
2349    local = path.abspath('test')
2350    cop = path.join(local, 'co')
2351    co2p = path.join(local, 'co2')
2352
2353    repo = server_to_tuple('cdv://localhost:6602/unittest')
2354    repo2 = server_to_tuple('cdv://localhost:6602/unittest2')
2355
2356    co, co2 = init_test(local, repo, repo2)
2357    try:
2358        _test_client(co, cop, co2, co2p, repo, repo2)
2359    except (AssertionError, Exception):
2360        shutdown_test(local, [co, co2])
2361        raise
2362
2363    shutdown_test(local, [co, co2])
2364    if path.exists(local):
2365        rm_rf([local])
2366    return
2367
2368def _test_client(co, cop, co2, co2p, repo, repo2):
2369    print('TESTING merge conflict')
2370    set_file_contents(path.join(cop, 'a'), "aaa\nbbb\nccc\nddd\neee\nfff\n")
2371    add(co, [path.join(cop, 'a')])
2372    assert commit(co, repo, '') == 0
2373    assert update(co2, repo) == 0
2374    set_file_contents(path.join(cop, 'a'), "aaa\nbbb\nccc\nfoo\nddd\neee\nfff\n")
2375    os.utime(path.join(cop, 'a'), (0, 0))
2376    assert commit(co, repo, '') == 0
2377    set_file_contents(path.join(co2p, 'a'), "aaa\nbbb\nccc\nbar\nddd\neee\nfff\n")
2378    os.utime(path.join(co2p, 'a'), (0, 0))
2379    assert commit(co2, repo, '') == 1
2380    assert update(co2, repo) == 0
2381    assert file_contents(path.join(co2p, 'a')) == "aaa\nbbb\nccc\n<<<<<<< local\nbar\n=======\nfoo\n>>>>>>> remote\nddd\neee\nfff\n"
2382    print('TESTING add conflict')
2383    reset_test(co, co2, repo)
2384    set_file_contents(path.join(cop, 'a'), 'foo')
2385    add(co, [path.join(cop, 'a')])
2386    assert commit(co, repo, '') == 0
2387    set_file_contents(path.join(co2p, 'a'), 'bar')
2388    add(co2, [path.join(co2p, 'a')])
2389    assert commit(co2, repo, '') == 1
2390    assert update(co2, repo) == 0
2391    assert path.exists(path.join(co2p, 'a.nameconflict.local'))
2392    assert path.exists(path.join(co2p, 'a.nameconflict.remote'))
2393
2394    # use the agent for the rest of the tests
2395    os.environ['CDV_AUTH_SOCK'] = ag.auth_file
2396    co.nopass = co2.nopass = 2
2397
2398    print('TESTING rename and add file of same name')
2399    reset_test(co, co2, repo)
2400    set_file_contents(path.join(cop, 'a'), '')
2401    add(co, [path.join(cop, 'a')])
2402    assert commit(co, repo, '') == 0
2403    assert update(co2, repo) == 0
2404    rename(co2, path.join(co2p, 'a'), path.join(co2p, 'b'))
2405    set_file_contents(path.join(co2p, 'a'), '')
2406    add(co2, [path.join(co2p, 'a')])
2407    assert commit(co2, repo, '') == 0
2408    assert update(co, repo) == 0
2409    assert path.exists(path.join(cop, 'a'))
2410    assert path.exists(path.join(cop, 'b'))
2411    print('TESTING add file conflicting with remote rename')
2412    reset_test(co, co2, repo)
2413    set_file_contents(path.join(cop, 'a'), '')
2414    add(co, [path.join(cop, 'a')])
2415    assert commit(co, repo, '') == 0
2416    assert update(co2, repo) == 0
2417    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2418    assert commit(co, repo, '') == 0
2419    set_file_contents(path.join(co2p, 'b'), '')
2420    add(co2, [path.join(co2p, 'b')])
2421    assert commit(co2, repo, '') == 1
2422    assert update(co2, repo) == 0
2423    assert path.exists(path.join(co2p, 'b.nameconflict.local'))
2424    assert path.exists(path.join(co2p, 'b.nameconflict.remote'))
2425    print('TESTING add file conflicting with remote rename and merge conflict')
2426    reset_test(co, co2, repo)
2427    set_file_contents(path.join(cop, 'a'), "foo\n")
2428    add(co, [path.join(cop, 'a')])
2429    assert commit(co, repo, '') == 0
2430    assert update(co2, repo) == 0
2431    append_file_contents(path.join(cop, 'a'), "bar\n")
2432    edit(co, [path.join(cop, 'a')])
2433    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2434    assert commit(co, repo, '') == 0
2435    set_file_contents(path.join(co2p, 'b'), '')
2436    add(co2, [path.join(co2p, 'b')])
2437    append_file_contents(path.join(co2p, 'a'), "baz\n")
2438    edit(co2, [path.join(co2p, 'a')])
2439    assert commit(co2, repo, '') == 1
2440    assert update(co2, repo) == 0
2441    assert path.exists(path.join(co2p, 'b.nameconflict.local'))
2442    assert path.exists(path.join(co2p, 'b.nameconflict.remote'))
2443    assert file_contents(path.join(co2p, 'b.nameconflict.remote')) == '<<<<<<< local\nfoo\nbaz\n\n=======\nfoo\nbar\n\n>>>>>>> remote'
2444    print('TESTING conflicting local and remote rename')
2445    reset_test(co, co2, repo)
2446    set_file_contents(path.join(cop, 'a'), '')
2447    add(co, [path.join(cop, 'a')])
2448    assert commit(co, repo, '') == 0
2449    assert update(co2, repo) == 0
2450    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2451    assert commit(co, repo, '') == 0
2452    rename(co2, path.join(co2p, 'a'), path.join(co2p, 'c'))
2453    assert commit(co2, repo, '') == 1
2454    assert update(co2, repo) == 0
2455    assert path.exists(path.join(co2p, 'c.nameconflict'))
2456    assert path.exists(path.join(co2p, 'c.nameconflict.info'))
2457    print('TESTING multiple conflicting local and remote rename')
2458    reset_test(co, co2, repo)
2459    os.makedirs(path.join(cop, 'a'))
2460    set_file_contents(path.join(cop, 'a', 'x'), '')
2461    os.makedirs(path.join(cop, 'b'))
2462    set_file_contents(path.join(cop, 'b', 'y'), '')
2463    os.makedirs(path.join(cop, 'c'))
2464    set_file_contents(path.join(cop, 'c', 'z'), '')
2465    add(co, [path.join(cop, 'a', 'x'), path.join(cop, 'b', 'y'), path.join(cop, 'c', 'z')])
2466    assert commit(co, repo, '') == 0
2467    assert update(co2, repo) == 0
2468    rename(co, path.join(cop, 'b', 'y'), path.join(cop, 'a', 'y'))
2469    rename(co, path.join(cop, 'c', 'z'), path.join(cop, 'a', 'z'))
2470    assert commit(co, repo, '') == 0
2471    rename(co2, path.join(co2p, 'b', 'y'), path.join(co2p, 'b', 'x'))
2472    rename(co2, path.join(co2p, 'c', 'z'), path.join(co2p, 'c', 'x'))
2473    assert commit(co2, repo, '') == 1
2474    assert update(co2, repo) == 0
2475    assert path.exists(path.join(co2p, 'b', 'x.nameconflict'))
2476    assert path.exists(path.join(co2p, 'b', 'x.nameconflict.info'))
2477    assert path.exists(path.join(co2p, 'c', 'x.nameconflict'))
2478    assert path.exists(path.join(co2p, 'c', 'x.nameconflict.info'))
2479    print('TESTING rename and back again')
2480    reset_test(co, co2, repo)
2481    set_file_contents(path.join(cop, 'a'), 'a')
2482    add(co, [path.join(cop, 'a')])
2483    assert commit(co, repo, '') == 0
2484    assert update(co2, repo) == 0
2485    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2486    assert commit(co, repo, '') == 0
2487    rename(co, path.join(cop, 'b'), path.join(cop, 'a'))
2488    assert commit(co, repo, '') == 0
2489    assert update(co2, repo) == 0
2490    assert path.exists(path.join(co2p, 'a'))
2491    print('TESTING rename swap')
2492    reset_test(co, co2, repo)
2493    set_file_contents(path.join(cop, 'a'), 'a')
2494    set_file_contents(path.join(cop, 'b'), 'b')
2495    add(co, [path.join(cop, 'a'), path.join(cop, 'b')])
2496    assert commit(co, repo, '') == 0
2497    assert update(co2, repo) == 0
2498    rename(co, path.join(cop, 'a'), path.join(cop, 'c'))
2499    rename(co, path.join(cop, 'b'), path.join(cop, 'a'))
2500    rename(co, path.join(cop, 'c'), path.join(cop, 'b'))
2501    assert commit(co, repo, '') == 0
2502    assert update(co2, repo) == 0
2503    assert file_contents(path.join(co2p, 'a')) == 'b'
2504    assert file_contents(path.join(co2p, 'b')) == 'a'
2505    print('TESTING rename circular')
2506    reset_test(co, co2, repo)
2507    set_file_contents(path.join(cop, 'a'), 'a')
2508    set_file_contents(path.join(cop, 'b'), 'b')
2509    set_file_contents(path.join(cop, 'c'), 'c')
2510    add(co, [path.join(cop, 'a'), path.join(cop, 'b'), path.join(cop, 'c')])
2511    assert commit(co, repo, '') == 0
2512    assert update(co2, repo) == 0
2513    rename(co, path.join(cop, 'a'), path.join(cop, 'd'))
2514    rename(co, path.join(cop, 'b'), path.join(cop, 'a'))
2515    rename(co, path.join(cop, 'c'), path.join(cop, 'b'))
2516    rename(co, path.join(cop, 'd'), path.join(cop, 'c'))
2517    assert commit(co, repo, '') == 0
2518    assert update(co2, repo) == 0
2519    assert file_contents(path.join(co2p, 'a')) == 'b'
2520    assert file_contents(path.join(co2p, 'b')) == 'c'
2521    assert file_contents(path.join(co2p, 'c')) == 'a'
2522    print('TESTING clean reparent with loops in intermediate rename stages')
2523    reset_test(co, co2, repo)
2524    os.makedirs(path.join(cop, 'a', 'b', 'c', 'd'))
2525    add(co, [path.join(cop, 'a', 'b', 'c', 'd')])
2526    assert commit(co, repo, '') == 0
2527    assert update(co2, repo) == 0
2528    rename(co, path.join(cop, 'a', 'b', 'c', 'd'), path.join(cop, 'd'))
2529    rename(co, path.join(cop, 'a', 'b', 'c'), path.join(cop, 'd', 'c'))
2530    rename(co, path.join(cop, 'a', 'b'), path.join(cop, 'd', 'c', 'b'))
2531    rename(co, path.join(cop, 'a'), path.join(cop, 'd', 'c', 'b', 'a'))
2532    assert commit(co, repo, '') == 0
2533    assert update(co2, repo) == 0
2534    assert path.exists(path.join(co2p, 'd', 'c', 'b', 'a'))
2535    print('TESTING reparent twisted conflict')
2536    reset_test(co, co2, repo)
2537    os.makedirs(path.join(cop, 'a'))
2538    os.makedirs(path.join(cop, 'b'))
2539    add(co, [path.join(cop, 'a'), path.join(cop, 'b')])
2540    assert commit(co, repo, '') == 0
2541    assert update(co2, repo) == 0
2542    rename(co, path.join(cop, 'a'), path.join(cop, 'b', 'a'))
2543    assert commit(co, repo, '') == 0
2544    rename(co2, path.join(co2p, 'b'), path.join(co2p, 'a', 'b'))
2545    assert commit(co2, repo, '') == 1
2546    assert update(co2, repo) == 0
2547    assert path.exists(path.join(co2p, 'a.parentloop', 'b'))
2548    assert path.exists(path.join(co2p, 'a.parentloop.info'))
2549    print('TESTING reparent twisted conflict')
2550    reset_test(co, co2, repo)
2551    os.makedirs(path.join(cop, 'c', 'a'))
2552    os.makedirs(path.join(cop, 'b'))
2553    add(co, [path.join(cop, 'c', 'a'), path.join(cop, 'b')])
2554    assert commit(co, repo, '') == 0
2555    assert update(co2, repo) == 0
2556    rename(co, path.join(cop, 'c', 'a'), path.join(cop, 'b', 'a'))
2557    rename(co, path.join(cop, 'c'), path.join(cop, 'b', 'a', 'c'))
2558    assert commit(co, repo, '') == 0
2559    rename(co2, path.join(co2p, 'b'), path.join(co2p, 'c', 'a', 'b'))
2560    assert commit(co2, repo, '') == 1
2561    assert update(co2, repo) == 0
2562    assert path.exists(path.join(co2p, 'c.parentloop', 'a.parentloop', 'b'))
2563    assert path.exists(path.join(co2p, 'c.parentloop.info'))
2564    assert path.exists(path.join(co2p, 'c.parentloop', 'a.parentloop.info'))
2565    print('TESTING rename incidental')
2566    reset_test(co, co2, repo)
2567    os.makedirs(path.join(cop, 'a'))
2568    add(co, [path.join(cop, 'a')])
2569    assert commit(co, repo, '') == 0
2570    assert update(co2, repo) == 0
2571    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2572    assert commit(co, repo, '') == 0
2573    rename(co2, path.join(co2p, 'a'), path.join(co2p, 'b'))
2574    assert commit(co2, repo, '') == 1
2575    assert update(co2, repo) == 0
2576    assert commit(co2, repo, '') == 0
2577    rename(co, path.join(cop, 'b'), path.join(cop, 'c'))
2578    assert commit(co, repo, '') == 0
2579    assert update(co2, repo) == 0
2580    assert path.exists(path.join(co2p, 'c'))
2581    print('TESTING rename dependent')
2582    reset_test(co, co2, repo)
2583    os.makedirs(path.join(cop, 'a', 'a', 'a'))
2584    set_file_contents(path.join(cop, 'a', 'a', 'a', 'a'), '')
2585    add(co, [path.join(cop, 'a', 'a', 'a', 'a')])
2586    assert commit(co, repo, '') == 0
2587    assert update(co2, repo) == 0
2588    rename(co, path.join(cop, 'a', 'a', 'a', 'a'), path.join(cop, 'a', 'a', 'a', 'b'))
2589    rename(co, path.join(cop, 'a', 'a', 'a'), path.join(cop, 'a', 'a', 'b'))
2590    rename(co, path.join(cop, 'a', 'a'), path.join(cop, 'a', 'b'))
2591    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2592    assert commit(co, repo, '') == 0
2593    assert update(co2, repo) == 0
2594    assert path.exists(path.join(co2p, 'b', 'b', 'b', 'b'))
2595    print('TESTING update overrides coincidental name merge')
2596    reset_test(co, co2, repo)
2597    set_file_contents(path.join(cop, 'a'), '')
2598    add(co, [path.join(cop, 'a')])
2599    assert commit(co, repo, '') == 0
2600    assert update(co2, repo) == 0
2601    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2602    assert commit(co, repo, '') == 0
2603    rename(co2, path.join(co2p, 'a'), path.join(co2p, 'b'))
2604    assert commit(co2, repo2, '') == 0
2605    assert update(co, repo2) == 0
2606    assert update(co2, repo) == 0
2607    assert commit(co2, repo, '') == 0
2608    assert update(co, repo) == 0
2609    rename(co, path.join(cop, 'b'), path.join(cop, 'c'))
2610    assert commit(co, repo, '') == 0
2611    assert update(co2, repo) == 0
2612    assert path.exists(path.join(co2p, 'c'))
2613    print('TESTING delete orphan')
2614    reset_test(co, co2, repo)
2615    os.makedirs(path.join(cop, 'a'))
2616    add(co, [path.join(cop, 'a')])
2617    assert commit(co, repo, '') == 0
2618    assert update(co2, repo) == 0
2619    set_file_contents(path.join(cop, 'a', 'a'), '')
2620    add(co, [path.join(cop, 'a', 'a')])
2621    assert commit(co, repo, '') == 0
2622    delete(co2, [path.join(co2p, 'a')])
2623    assert commit(co2, repo, '') == 1
2624    assert update(co2, repo) == 0
2625    assert path.exists(path.join(co2p, 'a.orphaned'))
2626    assert path.exists(path.join(co2p, 'a.orphaned.info'))
2627    print('TESTING remote orphan')
2628    reset_test(co, co2, repo)
2629    os.makedirs(path.join(cop, 'a'))
2630    add(co, [path.join(cop, 'a')])
2631    assert commit(co, repo, '') == 0
2632    assert update(co2, repo) == 0
2633    delete(co2, [path.join(co2p, 'a')])
2634    assert commit(co2, repo, '') == 0
2635    set_file_contents(path.join(cop, 'a', 'a'), '')
2636    add(co, [path.join(cop, 'a', 'a')])
2637    assert commit(co, repo, '') == 1
2638    assert update(co, repo) == 0
2639    assert path.exists(path.join(cop, 'a.orphaned'))
2640    assert path.exists(path.join(cop, 'a.orphaned.info'))
2641    print('TESTING delete and reuse name')
2642    reset_test(co, co2, repo)
2643    os.makedirs(path.join(cop, 'a', 'a'))
2644    os.makedirs(path.join(cop, 'b'))
2645    add(co, [path.join(cop, 'a', 'a'), path.join(cop, 'b')])
2646    assert commit(co, repo, '') == 0
2647    assert update(co2, repo) == 0
2648    delete(co, [path.join(cop, 'b')])
2649    rename(co, path.join(cop, 'a'), path.join(cop, 'b'))
2650    assert commit(co, repo, '') == 0
2651    assert update(co2, repo) == 0
2652    assert path.exists(path.join(co2p, 'b', 'a'))
2653    assert not path.exists(path.join(co2p, 'a'))
2654    print('TESTING delete dependent')
2655    reset_test(co, co2, repo)
2656    os.makedirs(path.join(cop, 'a', 'a', 'a', 'a'))
2657    add(co, [path.join(cop, 'a', 'a', 'a', 'a')])
2658    assert commit(co, repo, '') == 0
2659    assert update(co2, repo) == 0
2660    delete(co, [path.join(cop, 'a', 'a', 'a', 'a')])
2661    delete(co, [path.join(cop, 'a', 'a', 'a')])
2662    delete(co, [path.join(cop, 'a', 'a')])
2663    delete(co, [path.join(cop, 'a')])
2664    assert commit(co, repo, '') == 0
2665    assert update(co2, repo) == 0
2666    assert not path.exists(path.join(co2p, 'a'))
2667    print('TESTING delete within parent loop')
2668    reset_test(co, co2, repo)
2669    os.makedirs(path.join(cop, 'a'))
2670    os.makedirs(path.join(cop, 'b'))
2671    add(co, [path.join(cop, 'a'), path.join(cop, 'b')])
2672    assert commit(co, repo, '') == 0
2673    assert update(co2, repo) == 0
2674    rename(co, path.join(cop, 'a'), path.join(cop, 'b', 'a'))
2675    assert commit(co, None, '') == 0
2676    assert update(co, repo) == 0
2677    delete(co, [path.join(cop, 'b', 'a')])
2678    assert commit(co, repo, '') == 0
2679    rename(co2, path.join(co2p, 'b'), path.join(co2p, 'a', 'b'))
2680    assert commit(co2, repo, '') == 1
2681    assert update(co2, repo) == 0
2682    assert path.exists(path.join(co2p, 'b.orphaned'))
2683    assert path.exists(path.join(co2p, 'b.orphaned.info'))
2684    print('TESTING delete entire parent loop')
2685    reset_test(co, co2, repo)
2686    os.makedirs(path.join(cop, 'a'))
2687    os.makedirs(path.join(cop, 'b'))
2688    add(co, [path.join(cop, 'a'), path.join(cop, 'b')])
2689    assert commit(co, repo, '') == 0
2690    assert update(co2, repo) == 0
2691    rename(co, path.join(cop, 'a'), path.join(cop, 'b', 'a'))
2692    assert commit(co, None, '') == 0
2693    delete(co, [path.join(cop, 'b', 'a')])
2694    assert commit(co, repo, '') == 0
2695    rename(co2, path.join(co2p, 'b'), path.join(co2p, 'a', 'b'))
2696    assert commit(co2, None, '') == 0
2697    delete(co2, [path.join(co2p, 'a', 'b')])
2698    assert commit(co2, repo, '') == 0
2699    assert update(co, repo) == 0
2700    assert update(co2, repo) == 0
2701    assert not path.exists(path.join(cop, 'a'))
2702    assert not path.exists(path.join(cop, 'b'))
2703    assert not path.exists(path.join(co2p, 'a'))
2704    assert not path.exists(path.join(co2p, 'b'))
2705    print('TESTING remote add and delete')
2706    reset_test(co, co2, repo)
2707    set_file_contents(path.join(cop, 'a'), '')
2708    add(co, [path.join(cop, 'a')])
2709    assert commit(co, repo, '') == 0
2710    delete(co, [path.join(cop, 'a')])
2711    assert commit(co, repo, '') == 0
2712    assert update(co2, repo) == 0
2713    assert not path.exists(path.join(co2p, 'a'))
2714    print('TESTING unique name mangling')
2715    reset_test(co, co2, repo)
2716    set_file_contents(path.join(cop, 'a'), '')
2717    add(co, [path.join(cop, 'a')])
2718    assert commit(co, repo, '') == 0
2719    assert update(co2, repo) == 0
2720    rename(co, path.join(cop, 'a'), path.join(cop, 'a1'))
2721    assert commit(co, repo, '') == 0
2722    set_file_contents(path.join(co2p, 'a2.nameconflict'), '')
2723    add(co2, [path.join(co2p, 'a2.nameconflict')])
2724    rename(co2, path.join(co2p, 'a'), path.join(co2p, 'a2'))
2725    assert update(co2, repo) == 0
2726    assert path.exists(path.join(co2p, 'a2.nameconflict'))
2727    assert path.exists(path.join(co2p, 'a2.nameconflict2'))
2728    assert path.exists(path.join(co2p, 'a2.nameconflict2.info'))
2729    # XXX: had to relax this restriction for now, fix next history rewrite
2730    #print 'TESTING deleted file modified remotely'
2731    #reset_test(co, co2, repo)
2732    #set_file_contents(path.join(cop, 'a'), '')
2733    #add(co, [path.join(cop, 'a')])
2734    #assert commit(co, repo, '') == 0
2735    #assert update(co2, repo) == 0
2736    #delete(co, [path.join(cop, 'a')])
2737    #assert commit(co, repo, '') == 0
2738    #set_file_contents(path.join(co2p, 'a'), 'foo')
2739    #edit(co2, [path.join(co2p, 'a')])
2740    #assert commit(co2, repo, '') == 1
2741    #assert update(co2, repo) == 0
2742    #assert commit(co2, repo, '') == 0
2743    #assert update(co, repo) == 0
2744    #assert not path.exists(path.join(cop, 'a'))
2745    print('TESTING independent local and remote deletes')
2746    set_file_contents(path.join(cop, 'a'), '')
2747    add(co, [path.join(cop, 'a')])
2748    assert commit(co, repo, '') == 0
2749    assert update(co2, repo) == 0
2750    delete(co, [path.join(cop, 'a')])
2751    assert commit(co, repo, '') == 0
2752    delete(co2, [path.join(co2p, 'a')])
2753    assert commit(co2, repo, '') == 0
2754    assert update(co2, repo) == 0
2755    assert not path.exists(path.join(cop, 'a'))
2756    assert not path.exists(path.join(co2p, 'a'))
2757    print('TESTING local filesystem conflicts with repository')
2758    reset_test(co, co2, repo)
2759    os.makedirs(path.join(cop, 'a'))
2760    add(co, [path.join(cop, 'a')])
2761    assert commit(co, repo, '') == 0
2762    set_file_contents(path.join(co2p, 'a'), '')
2763    assert update(co2, repo) == 1
2764    assert path.isfile(path.join(co2p, 'a'))
2765    print('TESTING non-existent parents needed for update')
2766    reset_test(co, co2, repo)
2767    os.makedirs(path.join(cop, 'a'))
2768    os.makedirs(path.join(cop, 'b'))
2769    add(co, [path.join(cop, 'a',), path.join(cop, 'b',)])
2770    assert commit(co, repo, '') == 0
2771    assert update(co2, repo) == 0
2772    rename(co, path.join(cop, 'a'), path.join(cop, 'b', 'a'))
2773    assert commit(co, repo, '') == 0
2774    os.rmdir(path.join(co2p, 'a'))
2775    assert update(co2, repo) == 1
2776    os.rmdir(path.join(co2p, 'b'))
2777    assert update(co2, repo) == 1
2778    set_file_contents(path.join(co2p, 'b'), '')
2779    assert update(co2, repo) == 1
2780    assert not path.exists(path.join(co2p, 'a'))
2781    assert path.isfile(path.join(co2p, 'b'))
2782