1from __future__ import print_function
2from __future__ import absolute_import
3# Written by Bram Cohen and Ross Cohen
4# Maintained by Chris Hutchinson
5# see LICENSE.txt for license information
6
7from builtins import next
8from builtins import str
9from builtins import range
10from future.utils import raise_
11from Codeville.bencode import bdecode, bencode
12import binascii
13from Codeville.db import db
14from .merge import find_conflict, find_conflict_multiple_safe, MergeError, replay
15from os import path
16from sets import Set
17from sha import sha
18import struct
19from time import localtime, strftime
20from xml.sax import saxutils
21import zlib
22
23roothandle = '\xdfp\xce\xbaNt5\xf9\xa1\xabd\xf2\xc4\x87\xc5\xd1\x0bI\x8d\xb4'
24rootnode = '\xb3L\xac\x1f\x98B\x15\\\x8c\t0&\xd7m\xecK\xdd\n\x81\xc4'
25
26class HistoryError(Exception):
27    pass
28
29def dmerge(da, db):
30    dc = {}.fromkeys(da)
31    dc.update({}.fromkeys(db))
32    return list(dc.keys())
33
34def rename_conflict_check(linfo, rinfo):
35    if linfo['name'] == rinfo['name']:
36        try:
37            if linfo['parent'] == rinfo['parent']:
38                return ('local', dmerge(linfo['rename point'], rinfo['rename point']))
39        except KeyError:
40            assert linfo['rename point'] == rinfo['rename point'] == [rootnode]
41            return ('local', linfo['rename point'])
42    for i in rinfo['rename point']:
43        if i in linfo['points']:
44            return ('local', linfo['rename point'])
45    for i in linfo['rename point']:
46        if i in rinfo['points']:
47            return ('remote', rinfo['rename point'])
48    return ('conflict', None)
49
50def _name_use_count(co, state, point, func, txn):
51    cursor = co.allnamesdb.cursor(txn=txn)
52    lookup = state['parent'] + state['name']
53    try:
54        key, value = cursor.set(lookup)
55    except (db.DBNotFoundError, TypeError):
56        return []
57    named = []
58    while key == lookup:
59        vinfo = func(co, value, point, txn)
60        if vinfo is not None:
61            if 'delete' not in vinfo and vinfo['parent'] == state['parent'] and vinfo['name'] == state['name']:
62                named.append(value)
63        # next_dup() is broken
64        foo = next(cursor)
65        if foo is None:
66            break
67        key, value = foo
68    cursor.close()
69    return named
70
71def name_use_count(co, state, point, txn):
72    return _name_use_count(co, state, point, __handle_name_at_point, txn)
73
74def _children_count(co, handle, point, func, txn):
75    cursor = co.allnamesdb.cursor(txn=txn)
76    try:
77        foo = cursor.set_range(handle)
78    except (db.DBNotFoundError, TypeError):
79        foo = None
80    children = {}
81    while foo is not None:
82        key, value = foo
83        parent = key[:20]
84        if parent != handle:
85            break
86        vinfo = func(co, value, point, txn)
87        if vinfo is not None:
88            if 'delete' not in vinfo and vinfo['parent'] == handle:
89                children[value] = None
90        foo = next(cursor)
91    cursor.close()
92    return list(children.keys())
93
94def children_count(co, handle, point, txn):
95    return _children_count(co, handle, point, __handle_name_at_point, txn)
96
97def parent_loop_check(co, handle, point, txn):
98    hseen = {}
99    while handle != roothandle:
100        if handle in hseen:
101            return handle
102        hseen[handle] = 1
103        change = handle_last_modified(co, co.names, handle, point, txn)
104        if change is None:
105            return handle
106        handle = _handle_name_at_point(co, handle, change, txn)['parent']
107    return None
108
109def is_ancestor(co, ancestor, point, txn):
110    if point is None:
111        return 0
112    return _is_ancestor(co, ancestor, [point], txn)
113
114def _is_ancestor(co, ancestor, points, txn):
115    #ainfo = bdecode(co.branchmapdb.get(ancestor, txn=txn))
116    ainfo = db_get(co, co.branchmapdb, ancestor, txn)
117    points = points[:]
118    state = {}
119    while len(points):
120        pnext = points.pop()
121        if pnext == ancestor:
122            return 1
123        if pnext in state:
124            continue
125        state[pnext] = 1
126        #pinfo = bdecode(co.branchmapdb.get(pnext, txn=txn))
127        pinfo = db_get(co, co.branchmapdb, pnext, txn)
128        if pinfo['generation'] <= ainfo['generation']:
129            continue
130        points.extend(pinfo['precursors'])
131    return 0
132
133def read_diff(co, handle, point, txn):
134    hinfo = bdecode(co.contents.dagdb.get(handle + point, txn=txn))
135    hfile = open(path.join(co.cpath, binascii.hexlify(handle)), 'rb')
136    diff = _read_diff(hinfo, hfile)
137    hfile.close()
138    return diff
139
140def _read_diff(index, hfile):
141    if 'handle' not in index:
142        return None
143    try:
144        hfile.seek(index['handle']['offset'])
145    except IOError:
146        print(index)
147        raise
148    return hfile.read(index['handle']['length'])
149
150def write_diff(co, handle, diff, txn):
151    cdagdb = co.contents.dagdb
152    try:
153        fend = struct.unpack('<I', cdagdb.get(handle, txn=txn))[0]
154        hfile = open(path.join(co.cpath, binascii.hexlify(handle)), 'r+b')
155    except TypeError:
156        fend = 0
157        hfile = open(path.join(co.cpath, binascii.hexlify(handle)), 'wb')
158
159    hfile.seek(fend)
160    hfile.write(diff)
161    hfile.close()
162    index = {'offset': fend, 'length': len(diff)}
163    fend += index['length']
164    cdagdb.put(handle, struct.pack('<I', fend), txn=txn)
165    return index
166
167def _write_diff(co, hfile, handle, diff, txn):
168    cdagdb = co.contents.dagdb
169    try:
170        fend = struct.unpack('<I', cdagdb.get(handle, txn=txn))[0]
171    except (db.DBNotFoundError, TypeError):
172        fend = 0
173    hfile.seek(fend)
174    hfile.write(diff)
175    index = {'offset': fend, 'length': len(diff)}
176    fend += index['length']
177    cdagdb.put(handle, struct.pack('<I', fend), txn=txn)
178    return index
179
180def write_index(co, point, handle, index, txn):
181    cdagdb = co.contents.dagdb
182    try:
183        old_index = bdecode(cdagdb.get(handle + point, txn=txn))
184        old_index['handle'] = index
185    except (db.DBNotFoundError, TypeError):
186        old_index = {'handle': index}
187    cdagdb.put(handle + point, bencode(old_index), txn=txn)
188
189def write_changeset(co, point, cset, txn):
190    co.lcrepo.put(point, cset, txn=txn)
191
192def verify_history(co, point, named, modified, txn):
193    for handle in named:
194        handle_name_at_point(co, handle, point, txn, dochecks=1)
195
196def sync_history(co, point, txn, cache=dict()):
197    named, modified = [], []
198
199    points = [point]
200    while len(points):
201        npoint = points[-1]
202
203        if npoint in cache:
204            pinfo = cache[npoint]
205        else:
206            pinfo = bdecode(co.lcrepo.get(npoint, txn=txn))
207            cache[npoint] = pinfo
208
209        pres = []
210        for pre in pinfo['precursors']:
211            if co.changesdb.has_key(binascii.hexlify(pre), txn):
212                continue
213            pres.append(pre)
214        pres.reverse()
215
216        points.extend(pres)
217        if pres != []:
218            continue
219
220        npoint = points.pop()
221
222        if co.changesdb.has_key(binascii.hexlify(npoint), txn):
223            continue
224
225        named, modified = _sync_history(co, npoint, txn, cache=cache)
226        co.changesdb.put(binascii.hexlify(npoint), '', txn=txn)
227
228    return named, modified
229
230def _sync_history(co, point, txn, cache=dict()):
231    pinfo = cache[point]
232
233    # see if we can extend an existing branch
234    pre, prebminfo = None, None
235    generations, pre_important = [], []
236    bminfo = {'precursors': pinfo['precursors']}
237    for pre in bminfo['precursors']:
238        prebminfo = db_get(co, co.branchmapdb, pre, txn)
239        generations.append(prebminfo['generation'])
240
241        if 'branch' in bminfo:
242            continue
243
244        binfo = db_get(co, co.branchdb, prebminfo['branch'], txn)
245        if prebminfo['branchnum'] == binfo['last']:
246            bminfo['branch'] = prebminfo['branch']
247            bminfo['branchnum'] = prebminfo['branchnum'] + 1
248            binfo['last'] += 1
249            db_put(co, co.branchdb, bminfo['branch'], binfo, txn)
250
251            pre_important.append(pre)
252
253    # generation == 'distance to root node', the exception is for the root
254    try:
255        bminfo['generation'] = max(generations) + 1
256    except ValueError:
257        bminfo['generation'] = 0
258
259    # if we couldn't extend a branch, start a new one
260    if 'branch' not in bminfo:
261        bminfo['branch'] = bdecode(co.linforepo.get('branchmax', txn=txn)) + 1
262        co.linforepo.put('branchmax', bencode(bminfo['branch']), txn=txn)
263        bminfo['branchnum'] = 0
264
265        try:
266            # using the last precursor for this did the best empirically,
267            # beating out both first precursor and largest branch number.
268            binfo = {'last': 0,
269                     'parent': prebminfo['branch'],
270                     'parentnum': prebminfo['branchnum']}
271            pre_important.append(pre)
272
273        except TypeError:
274            # special stuff for the rootnode
275            assert bminfo['branch'] == 1
276            binfo = {'last': 0}
277
278        db_put(co, co.branchdb, bminfo['branch'], binfo, txn)
279
280    db_put(co, co.branchmapdb, point, bminfo, txn)
281
282    # put new files into staticdb
283    for (handle, value) in list(pinfo['handles'].items()):
284        if 'add' in value:
285            validate_handle(handle, pinfo['precursors'], value)
286            db_put(co, co.staticdb, handle, {'type': value['add']['type']}, txn)
287
288    # figure out which files were modified here and hand off to helpers
289    named, modified = handles_in_branch(co, pre_important, [point], txn, cache=cache)
290
291    pinfo['point'] = point
292    _update_mini_dag(co, co.names, _update_helper_name,
293                     named, pinfo, txn)
294    _update_mini_dag(co, co.contents, _update_helper_content,
295                     modified, pinfo, txn)
296
297    return (named, modified)
298
299def validate_handle(handle, precursors, hinfo):
300    encinfo = bencode({'precursors': precursors, 'handle': hinfo})
301    if sha(encinfo).digest() != handle:
302        raise HistoryError('bad identifier')
303
304def pretty_print_big_dag(co, heads):
305    print('digraph {')
306
307    points = heads[:]
308    cache = {}
309    while len(points):
310        point = points.pop()
311        if point in cache:
312            continue
313        cache[point] = 1
314
315        info = bdecode(co.lcrepo.get(point))
316        sid = short_id(co, point)
317
318        color = "orange"
319        style = "dashed"
320        if clean_merge_point(info) or point == rootnode:
321            color = "lightblue"
322            style = "solid"
323
324        label = sid
325        if point == rootnode:
326            label = "root"
327
328        print('c_%s [label="%s",style=filled,color=%s]' % (sid, label, color))
329
330        for pre in info['precursors']:
331            print('c_%s -> c_%s [style=%s]' % (short_id(co, pre), sid, style))
332            style = "dashed"
333
334        points.extend([point for point in info['precursors']])
335
336    print('}')
337
338def pretty_print_dag(co, handle, heads):
339    print('digraph {')
340
341    points = []
342    for point in heads:
343        head = handle_last_modified(co, co.contents, handle, point, None)
344        if head is not None:
345            points.append(head)
346
347    cdagdb = co.contents.dagdb
348    cache = {}
349    while len(points):
350        point = points.pop()
351        if point in cache:
352            continue
353        cache[point] = 1
354
355        sid = short_id(co, point)
356        print('c_%s [label="%s"]' % (sid, sid))
357
358        info = bdecode(cdagdb.get(handle + point))
359        for pre, foo in info['precursors']:
360            print('c_%s -> c_%s' % (sid, short_id(co, pre)))
361
362        points.extend([point for point, index in info['precursors']])
363
364    print('}')
365
366def _update_mini_dag(co, changedbs, helper, handles, cset, txn):
367    indexdb = changedbs.indexdb
368    dagdb   = changedbs.dagdb
369
370    pres = cset['precursors']
371    point = cset['point']
372    #bminfo = bdecode(co.branchmapdb.get(point, txn=txn))
373    bminfo = db_get(co, co.branchmapdb, point, txn)
374    bnum = struct.pack('>II', bminfo['branch'], bminfo['branchnum'])
375
376    clean_merges = []
377    for handle in handles:
378        precursors = simplify_precursors(co, handle, changedbs, pres, txn)[0]
379
380        mdinfo = {'handle': {}}
381        if handle in cset['handles']:
382            mdinfo['handle'] = helper(co, handle, point,
383                                      cset['handles'][handle], txn)
384        if mdinfo['handle'] == {}:
385            del mdinfo['handle']
386            if len(precursors) > 1:
387                clean_merges.append(handle)
388        mdinfo['precursors'] = precursors
389        if precursors == []:
390            assert 'add' in cset['handles'][handle]
391
392        dagdb.put(handle + point, bencode(mdinfo), txn=txn)
393        indexdb.put(handle + bnum, point, txn=txn)
394
395    if len(clean_merges) > 0:
396        changedbs.mergedb.put(point, bencode(clean_merges), txn=txn)
397    return
398
399def simplify_precursors(co, handle, changedbs, pres, txn):
400    # map big DAG precursors to little DAG
401    dagdb = changedbs.dagdb
402    precursors, indices = [], []
403    for i in range(len(pres)):
404        last = handle_last_modified(co, changedbs, handle, pres[i], txn)
405        if last is None:
406            continue
407
408        # XXX: this is correct, but breaks old history
409        if 0:
410            pinfo = bdecode(dagdb.get(handle + last, txn=txn))
411            if 'handle' not in pinfo and len(pinfo['precursors']) == 1:
412                last = handle_last_modified(co, changedbs, handle,
413                                            pinfo['precursors'][0][0], txn)
414                if last is None:
415                    continue
416
417        precursors.append(last)
418        indices.append(i)
419
420    # second pass to eliminate precursors which are ancestors of others
421    retval = []
422    for i in range(len(precursors)):
423        pre = precursors.pop(0)
424        index = indices.pop(0)
425        if _is_ancestor(co, pre, precursors, txn):
426            continue
427        precursors.append(pre)
428        retval.append((pre, index))
429
430    return retval, len(pres)
431
432def _update_helper_content(co, handle, point, hinfo, txn):
433    oinfo = {}
434    if 'hash' in hinfo:
435        if co.contents.dagdb.has_key(handle + point, txn):
436            dinfo = bdecode(co.contents.dagdb.get(handle + point, txn=txn))
437            oinfo['offset'] = dinfo['handle']['offset']
438            oinfo['length'] = dinfo['handle']['length']
439        else:
440            # XXX: ugly in general
441            oinfo['offset'] = -1
442    if 'add' in hinfo:
443        oinfo['add'] = 1
444    elif 'delete' in hinfo:
445        oinfo['delete'] = 1
446    return oinfo
447
448def _update_helper_name(co, handle, point, hinfo, txn):
449    oinfo = {}
450    if 'name' in hinfo:
451        oinfo['name'] = hinfo['name']
452        try:
453            oinfo['parent'] = hinfo['parent']
454            co.allnamesdb.put(hinfo['parent'] + hinfo['name'],
455                              handle, flags=db.DB_NODUPDATA, txn=txn)
456        except KeyError:
457            assert handle == roothandle
458            assert 'add' in hinfo
459        except db.DBKeyExistError:
460            pass
461
462    if 'add' in hinfo:
463        oinfo['add'] = 1
464    elif 'delete' in hinfo:
465        oinfo['delete'] = 1
466    return oinfo
467
468def changes_in_branch(co, lpoints, bpoints, txn, cache=None):
469    points = bpoints[:]
470    seen, changes = {}, []
471    while len(points):
472        pnext = points.pop()
473        if pnext in seen:
474            continue
475        seen[pnext] = 1
476        if _is_ancestor(co, pnext, lpoints, txn):
477            continue
478        if cache:
479            if pnext in cache:
480                pinfo = cache[pnext]
481            else:
482                pinfo = bdecode(co.lcrepo.get(pnext, txn=txn))
483                cache[pnext] = pinfo
484        else:
485            pinfo = bdecode(co.lcrepo.get(pnext, txn=txn))
486
487        points.extend(pinfo['precursors'])
488
489        changes.append(pnext)
490
491    return changes
492
493def handles_in_branch(co, lpoints, bpoints, txn, cache=None, deleted_modified=False):
494    points = bpoints[:]
495    seen, named, modified, deleted = {}, {}, {}, {}
496    while len(points):
497        pnext = points.pop()
498        if pnext in seen:
499            continue
500        seen[pnext] = 1
501        if _is_ancestor(co, pnext, lpoints, txn):
502            continue
503        if cache:
504            if pnext in cache:
505                pinfo = cache[pnext]
506            else:
507                pinfo = bdecode(co.lcrepo.get(pnext, txn=txn))
508                cache[pnext] = pinfo
509        else:
510            pinfo = bdecode(co.lcrepo.get(pnext, txn=txn))
511        for handle, hinfo in list(pinfo['handles'].items()):
512            if 'name' in hinfo:
513                named[handle] = 1
514            if 'hash' in hinfo:
515                modified[handle] = 1
516            if 'delete' in hinfo:
517                named[handle] = 1
518                if not deleted_modified:
519                    deleted[handle] = 1
520
521        # XXX: afaik, this is only an issue for ambiguous clean merges, which
522        # don't happen with name operations. requires more thought.
523        if co.contents.mergedb.has_key(pnext, txn):
524            clean_merges = bdecode(co.contents.mergedb.get(pnext, txn=txn))
525            for handle in clean_merges:
526                modified[handle] = 1
527            # XXX: check for deletes?
528
529        points.extend(pinfo['precursors'])
530    for handle in list(deleted.keys()):
531        if handle in modified:
532            del modified[handle]
533    return (list(named.keys()), list(modified.keys()))
534
535def handle_last_modified(co, changedbs, handle, change, txn):
536    indexdb = changedbs.indexdb
537    dagdb   = changedbs.dagdb
538
539    if dagdb.has_key(handle + change, txn):
540        return change
541    #hinfo = bdecode(co.branchmapdb.get(change, txn=txn))
542    hinfo = db_get(co, co.branchmapdb, change, txn)
543    bbranch, bnum = int(hinfo['branch']), hinfo['branchnum']
544    retval = None
545    cursor = indexdb.cursor(txn=txn)
546    while 1:
547        branchpoint = struct.pack('>20sII', handle, bbranch, bnum)
548        try:
549            foo = cursor.set_range(branchpoint)
550        except db.DBNotFoundError:
551            foo = None
552        if foo is None:
553            foo = cursor.last()
554            if foo is None:
555                break
556            key, value = foo
557        else:
558            key, value = foo
559            if key != branchpoint:
560                try:
561                    foo = cursor.prev_nodup()
562                except db.DBNotFound:
563                    foo = None
564                if foo is None:
565                    break
566                key, value = foo
567        rhandle, rbranch, rnum = struct.unpack('>20sII', key)
568        if rhandle == handle and rbranch == bbranch:
569            retval = value
570            break
571        #pinfo = bdecode(co.branchdb.get(bbranch, txn=txn))
572        pinfo = db_get(co, co.branchdb, bbranch, txn)
573        try:
574            bbranch, bnum = int(pinfo['parent']), pinfo['parentnum']
575        except KeyError:
576            #_print_indexdb(co, indexdb, handle, txn)
577            break
578    cursor.close()
579
580    return retval
581
582def _print_indexdb(co, indexdb, handle, txn):
583    print('indexdb')
584    cursor = indexdb.cursor(txn=txn)
585    foo = cursor.set_range(handle)
586    while foo != None:
587        key, value = foo
588        nhandle, branch, num = struct.unpack('<20sII', key)
589        if handle != nhandle:
590            break
591        print("%d, %d" % (branch, num))
592        foo = next(cursor)
593    cursor.close()
594
595def handle_name_at_point(co, handle, point, txn, dochecks=0):
596    change = handle_last_modified(co, co.names, handle, point, txn)
597    if change is None:
598        return None
599    co.name_cache = {}
600    return _handle_name_at_point(co, handle, change, txn, dochecks=dochecks)
601
602def __handle_name_at_point(co, handle, point, txn, dochecks=0):
603    key = handle + point
604    if not dochecks and key in co.name_cache:
605        return co.name_cache[key]
606
607    change = handle_last_modified(co, co.names, handle, point, txn)
608    if change is None:
609        return None
610    return _handle_name_at_point(co, handle, change, txn, dochecks=dochecks)
611
612def _handle_name_from_precursors(precursors, resolved):
613    state = {}
614    for pre in precursors:
615        if state == {}:
616            state = pre.copy()
617            continue
618        if 'delete' in pre:
619            return pre
620        outcome, rename_point = rename_conflict_check(state, pre)
621        if not resolved and outcome == 'conflict':
622            raise HistoryError('double name conflict')
623        elif outcome == 'remote':
624            state['name'] = pre['name']
625            state['parent'] = pre['parent']
626        state['rename point'] = rename_point
627        state['points'] = dmerge(state['points'], pre['points'])
628    return state
629
630def _handle_name_at_point(co, handle, point, txn, dochecks=0):
631    def walk_precursors(cset, dochecks):
632        precursors, points = [], [point]
633        for pre, index in cset['precursors']:
634            foo = _handle_name_at_point(co, handle, pre, txn, dochecks=dochecks)
635            if foo is None:
636                continue
637            points = dmerge(points, foo['points'])
638            precursors.append(foo)
639        return precursors, points
640
641    cset = bdecode(co.names.dagdb.get(handle + point, txn=txn))
642    if 'handle' not in cset:
643        precursors, points = walk_precursors(cset, dochecks)
644        state = _handle_name_from_precursors(precursors, 0)
645    elif 'delete' in cset['handle']:
646        precursors, points = walk_precursors(cset, dochecks)
647        state = _handle_name_from_precursors(precursors, 1)
648        state['delete'] = 1
649    else:
650        precursors, points = walk_precursors(cset, dochecks)
651        state = {}
652        state['name'] = cset['handle']['name']
653        try:
654            state['parent'] = cset['handle']['parent']
655        except KeyError:
656            assert handle == roothandle
657            assert 'add' in cset['handle']
658        state['rename point'] = [point]
659
660    state['points'] = points
661
662    if dochecks == 0:
663        return state
664
665    co.name_cache[handle + point] = state
666
667    if state['name'] == '' and handle != roothandle:
668        raise HistoryError('illegal name')
669    if state['name'] == '.' or state['name'] == '..':
670        raise HistoryError('illegal name')
671
672    if 'delete' in state:
673        if len(children_count(co, handle, point, txn)):
674            raise HistoryError('non-empty directory can\'t be deleted')
675        return state
676
677    staticinfo = db_get(co, co.staticdb, handle, txn)
678    if staticinfo['type'] == 'dir':
679        try:
680            if parent_loop_check(co, state['parent'], point, txn):
681                raise HistoryError('parent loop')
682        except KeyError:
683            pass
684
685    try:
686        #parentinfo = bdecode(co.staticdb.get(state['parent'], txn=txn))
687        parentinfo = db_get(co, co.staticdb, state['parent'], txn)
688        if parentinfo['type'] != 'dir':
689            raise HistoryError('parent not a directory')
690
691        parentstate = __handle_name_at_point(co, state['parent'], point, txn)
692        if parentstate is None:
693            raise HistoryError('parent not in repository')
694        if 'delete' in parentstate:
695            raise HistoryError('file committed with deleted parent')
696        if len(name_use_count(co, state, point, txn)) != 1:
697            raise HistoryError('name already in use')
698        if state['name'] == 'CVILLE':
699            raise HistoryError('illegal name')
700    except KeyError:
701        assert handle == roothandle
702
703    return state
704
705def fullpath_at_point(co, handle, point, txn=None):
706    name = None
707    while handle != roothandle:
708        pinfo = handle_name_at_point(co, handle, point, txn)
709        if pinfo is None:
710            return None
711        if name is None:
712            name = pinfo['name']
713        else:
714            name = path.join(pinfo['name'], name)
715        handle = pinfo['parent']
716    return name
717
718def _mini_dag_refcount(co, handle, point, txn, cache=None, info_cache=None):
719    assert info_cache is not None
720    if cache is None:
721        cache = {}
722    points = [point]
723    while len(points):
724        point = points.pop()
725        if point in cache:
726            cache[point]['refcount'] += 1
727            continue
728        cache[point] = {'refcount': 1}
729
730        pinfo = bdecode(co.contents.dagdb.get(handle + point, txn=txn))
731        info_cache[point] = pinfo
732
733        for p, i in pinfo['precursors']:
734            points.append(p)
735    return cache
736
737def handle_contents_at_point(co, handle, point, txn, dcache=None):
738    if dcache is None:
739        dcache = {}
740    #staticinfo = bdecode(co.staticdb.get(handle, txn=txn))
741    staticinfo = db_get(co, co.staticdb, handle, txn)
742    if staticinfo['type'] != 'file':
743        raise ValueError('no contents for non-file')
744
745    change = handle_last_modified(co, co.contents, handle, point, txn)
746    if change is None:
747        return None
748
749    hcache = {}
750    cache = _mini_dag_refcount(co, handle, change, txn, info_cache=hcache)
751    hfile = open(path.join(co.cpath, binascii.hexlify(handle)), 'rb')
752
753    points = [change]
754    while len(points):
755        point = points[-1]
756
757        # we may have already done this one
758        if point in cache and 'info' in cache[point]:
759            points.pop()
760            continue
761
762        # cache this, since we visit most nodes twice
763        if point in hcache:
764            hinfo = hcache[point]
765        else:
766            hinfo = bdecode(co.contents.dagdb.get(handle + point, txn=txn))
767            hcache[point] = hinfo
768
769        # check if we've got the precursors
770        dirty = False
771        for pre, foo in hinfo['precursors']:
772            if 'info' not in cache[pre]:
773                dirty = True
774                points.append(pre)
775        if dirty:
776            continue
777        points.pop()
778
779        # read the diff
780        if point in dcache:
781            diff = dcache[point]
782        else:
783            diff = _read_diff(hinfo, hfile)
784        if diff is not None:
785            diff = bdecode(zlib.decompress(diff))
786
787        # put together the precursor list and decrement refcounts
788        precursors = []
789        for pre, foo in hinfo['precursors']:
790            precursors.append(cache[pre]['info'])
791
792            cache[pre]['refcount'] -= 1
793            if cache[pre]['refcount'] == 0:
794                del cache[pre]
795
796        # finally, get the contents
797        cache[point]['info'] = _handle_contents_at_point(point, hinfo,
798                                                         precursors, diff)
799
800    hfile.close()
801
802    cache[change]['info']['type'] = staticinfo['type']
803    return cache[change]['info']
804
805def _handle_contents_at_point(point, hinfo, precursors, diff):
806    state = {}
807    points = []
808    for pre in precursors:
809        points = dmerge(points, pre['points'])
810    state['points'] = points
811
812    matches = []
813    if diff is not None:
814        for pre, index in hinfo['precursors']:
815            matches.append(diff['matches'][index])
816
817        if 'add' in diff:
818            if precursors != []:
819                raise HistoryError('handle already exists')
820        elif precursors == []:
821            raise HistoryError('cannot modify non-existent file')
822
823        if 'delete' in diff:
824            state['delete'] = diff['delete']
825
826    if 'delete' not in state:
827        fpre = []
828        for pre in precursors:
829            fpre.append((pre['lines'], pre['line points'], pre['points']))
830        if diff is not None:
831            try:
832                lines, line_points = replay(fpre, matches,
833                                            diff['newlines'], point)
834            except MergeError as msg:
835                raise_(HistoryError, 'merge error: ' + str(msg))
836            except KeyError:
837                raise HistoryError('malformed change')
838
839            points.append(point)
840        else:
841            lines, line_points, points = find_conflict_multiple_safe(fpre)
842            if lines is None:
843                # XXX: this is a pretty gross hack
844                if len(fpre) == 2:
845                    s0 = Set(fpre[0][2])
846                    s1 = Set(fpre[1][2])
847                    if s0 == s1:
848                        raise HistoryError('merge error')
849                    elif s0.issubset(s1):
850                        lines, line_points, points = fpre[1]
851                    elif s0.issuperset(s1):
852                        lines, line_points, points = fpre[0]
853                    else:
854                        raise HistoryError('merge error')
855                else:
856                    raise HistoryError('merge error')
857        state['lines'] = lines
858        state['line points'] = line_points
859
860    return state
861
862def print_file_with_points(pre):
863    def _print_points(line_points):
864        ps = []
865        for line_point in line_points:
866            ps.append(binascii.hexlify(line_point))
867        return ', '.join(ps)
868
869    lines, line_points, points = pre
870    out = [_print_points(line_points[0])]
871    for i in range(len(lines)):
872        out.append(lines[i])
873        out.append(_print_points(line_points[i+1]))
874    return '\n'.join(out)
875
876from .merge import _find_conflict
877
878def print_conflict(co, fpre):
879    p1, p2 = fpre[0], fpre[1]
880    olines, oline_points = _find_conflict(fpre[0][0], fpre[0][1], fpre[0][2],
881                                          fpre[1][0], fpre[1][1], fpre[1][2])
882
883    ls = []
884    offset = [0, 0]
885    for i in range(len(olines)):
886        l = olines[i]
887
888        if type(l) is str:
889            offset[0] += 1
890            offset[1] += 1
891            continue
892
893        print('@@ -%d,%d +%d,%d @@' % (offset[0], len(l[0]), offset[0], len(l[1])))
894
895        offset[0] += len(l[0])
896        offset[1] += len(l[1])
897
898        lp = oline_points[i]
899        ls.append('<<<<<<< local')
900
901        ps = ', '.join([short_id(co, p) for p in lp[0][0]])
902        ls.append(ps)
903        for j in range(len(l[0])):
904            ls.append(l[0][j])
905            ps = ', '.join([short_id(co, p) for p in lp[0][j+1]])
906            ls.append(ps)
907
908        ls.append('=======')
909
910        ps = ', '.join([short_id(co, p) for p in lp[1][0]])
911        ls.append(ps)
912        for j in range(len(l[1])):
913            ls.append(l[1][j])
914            ps = ', '.join([short_id(co, p) for p in lp[1][j+1]])
915            ls.append(ps)
916
917        ls.append('>>>>>>> remote')
918
919    return '\n'.join(ls)
920
921def rebuild_from_points(co, points, txn):
922    co.changesdb.truncate(txn)
923    co.branchdb.truncate(txn)
924    co.branchmapdb.truncate(txn)
925    co.names.indexdb.truncate(txn)
926    co.names.dagdb.truncate(txn)
927    co.names.mergedb.truncate(txn)
928    co.contents.indexdb.truncate(txn)
929    co.contents.mergedb.truncate(txn)
930    # we don't truncate the cdagdb because it contains the offsets and lengths
931    # for the diffs in the files, which we can't recreate. the sync below will
932    # read those parameters out and rewrite the cdagdb, anyway.
933    co.linforepo.put('branchmax', bencode(0), txn=txn)
934
935    cdagdb = co.contents.dagdb
936    for key, value in cdagdb.items(txn):
937        if len(key) != 40:
938            continue
939        if 'handle' not in bdecode(value):
940            cdagdb.delete(key, txn=txn)
941
942    for point in points:
943        sync_history(co, point, txn)
944
945def clean_merge_point(info):
946    if info['handles'] != {}:
947        return 0
948    if len(info['precursors']) != 2:
949        return 0
950    if 'comment' in info:
951        return 0
952    return 1
953
954def short_id(co, point):
955    apoint = binascii.hexlify(point)
956    length = 3
957    key = apoint[:length]
958    cursor = co.changesdb.cursor()
959    while key.startswith(apoint[:length]) and length < len(apoint):
960        length += 1
961        try:
962            key, value = cursor.set_range(apoint[:length])
963        except (db.DBNotFoundError, TypeError):
964            break
965        if key != apoint:
966            continue
967        foo = next(cursor)
968        if foo is None:
969            break
970        key, value = foo
971    cursor.close()
972    return apoint[:length]
973
974class ChangeNotKnown(Exception): pass
975
976class ChangeNotUnique(Exception): pass
977
978class ChangeBadRepository(Exception): pass
979
980def long_id(co, point):
981    if point.startswith('cdv://'):
982        key = repo_head(co, tuple_to_server(server_to_tuple(point)))
983        if key is None:
984            raise_(ChangeBadRepository, point)
985        return key
986
987    cursor = co.changesdb.cursor()
988    try:
989        key, value = cursor.set_range(point)
990    except (db.DBNotFoundError, TypeError):
991        cursor.close()
992        raise_(ChangeNotKnown, point)
993    if not key.startswith(point):
994        cursor.close()
995        raise_(ChangeNotKnown, point)
996    try:
997        key2, value = next(cursor)
998    except (db.DBNotFoundError, TypeError):
999        cursor.close()
1000        return binascii.unhexlify(key)
1001    if key2.startswith(point):
1002        keys = [key]
1003        while key2.startswith(point):
1004            keys.append(key2)
1005            try:
1006                key2, value = next(cursor)
1007            except (db.DBNotFoundError, TypeError):
1008                break
1009        cursor.close()
1010        raise ChangeNotUnique(point, keys)
1011    cursor.close()
1012    return binascii.unhexlify(key)
1013
1014def repo_head(co, repo):
1015    if repo is None:
1016        return None
1017    repo = tuple_to_server(server_to_tuple(repo))
1018    return co.linforepo.get(repo)
1019
1020def server_to_tuple(server_string):
1021    if server_string.startswith('cdv://'):
1022        server_string = server_string[6:]
1023    temp = server_string.split('/', 1)
1024
1025    if len(temp) != 2 or temp[1] == '':
1026        repo = None
1027    else:
1028        repo = temp[1]
1029        if repo[-1] == '/':
1030            repo = repo[:-1]
1031
1032    temp  = temp[0].split(':', 1)
1033    if len(temp) == 1:
1034        port = 6601
1035    else:
1036        try:
1037            port = int(temp[1])
1038        except ValueError:
1039            return None
1040    server_string = temp[0]
1041    return (temp[0], port, repo)
1042
1043def tuple_to_server(tuple):
1044    return 'cdv://%s:%d/%s' % tuple
1045
1046def print_change(co, change, changeset):
1047    escape = saxutils.escape
1048
1049    output = []
1050    output.append("<change>")
1051
1052    output.append("<changenumber>" + binascii.hexlify(change) + "</changenumber>")
1053    output.append("<short-changenumber>" + short_id(co, change) + "</short-changenumber>")
1054    output.append("<user>" + escape(changeset['user']) + "</user>")
1055    try:
1056        output.append("<comment>" + escape(changeset['comment']) + "</comment>")
1057    except KeyError:
1058        pass
1059    output.append("<date>" + str(changeset['time']) + "</date>")
1060    output.append("<pretty-date>" + strftime('%a %b %d %H:%M:%S %Y %Z', localtime(changeset['time'])) + "</pretty-date>")
1061
1062    adds, deletes, renames, mods = {}, {}, {}, {}
1063    for handle, value in list(changeset['handles'].items()):
1064        if 'add' in value:
1065            adds[handle] = value['add']
1066        else:
1067            if 'delete' in value:
1068                deletes[handle] = 1
1069            if 'name' in value:
1070                renames[handle] = 1
1071            if 'hash' in value:
1072                mods[handle] = 1
1073
1074    output.append("<added-files>")
1075    for handle, info in list(adds.items()):
1076        output.append("<file>")
1077        output.append("<name>" + escape(fullpath_at_point(co, handle, change)) + "</name>")
1078        output.append("<type>" + info['type'] + "</type>")
1079        output.append("</file>")
1080    output.append("</added-files>")
1081
1082    output.append("<deleted-files>")
1083    for handle in list(deletes.keys()):
1084        output.append("<file>")
1085        output.append("<name>" + escape(fullpath_at_point(co, handle, change)) + "</name>")
1086        output.append("</file>")
1087    output.append("</deleted-files>")
1088
1089    output.append("<renamed-files>")
1090    for handle in list(renames.keys()):
1091        output.append("<file>")
1092        output.append("<name>" + escape(fullpath_at_point(co, handle, change)) + "</name>")
1093        output.append("</file>")
1094    output.append("</renamed-files>")
1095
1096    output.append("<modified-files>")
1097    for handle in list(mods.keys()):
1098        output.append("<file>")
1099        output.append("<name>" + escape(fullpath_at_point(co, handle, change)) + "</name>")
1100        output.append("</file>")
1101    output.append("</modified-files>")
1102
1103    output.append("</change>")
1104    return output
1105
1106def dump_changeinfo(co, change, repo=None):
1107    output = []
1108
1109    changeset = bdecode(co.lcrepo.get(change))
1110    if not clean_merge_point(changeset):
1111        raise ValueError
1112
1113    output.append("<root>")
1114    if repo:
1115        output.append("<repository>" + saxutils.escape(repo) + "</repository>")
1116    output.append("<changenumber>" + binascii.hexlify(change) + "</changenumber>")
1117    output.append("<short-changenumber>" + short_id(co, change) + "</short-changenumber>")
1118    output.append("<committer>" + saxutils.escape(changeset['user']) + "</committer>")
1119    output.append("<date>" + str(changeset['time']) + "</date>")
1120
1121    for change in changes_in_branch(co, [changeset['precursors'][0]],
1122                                    changeset['precursors'][1:], None):
1123        changeset = bdecode(co.lcrepo.get(change))
1124        if not clean_merge_point(changeset):
1125            output.extend(print_change(co, change, changeset))
1126
1127    output.append("</root>\n")
1128    return '\n'.join(output)
1129
1130def db_get(co, cdb, key, txn):
1131    try:
1132        cache = co.db_cache[db]
1133    except KeyError:
1134        cache = co.db_cache[db] = {}
1135    if key in cache:
1136        return cache[key]
1137    cache[key] = bdecode(cdb.get(key, txn=txn))
1138    #try:
1139    #    return cache[key]
1140    #except KeyError:
1141    #    cache[key] = bdecode(cdb.get(key, txn=txn))
1142    return cache[key]
1143
1144def db_put(co, cdb, key, value, txn):
1145    cdb.put(key, bencode(value), txn=txn)
1146    try:
1147        cache = co.db_cache[db]
1148    except KeyError:
1149        cache = co.db_cache[db] = {}
1150    cache[key] = value
1151
1152try:
1153    import psyco
1154    psyco.bind(_is_ancestor, 0)
1155except ImportError:
1156    pass
1157