1# Copyright (C) 2006-2012, 2016 Canonical Ltd
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License
14# along with this program; if not, write to the Free Software
15# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
17"""Tests for the Repository facility that are not interface tests.
18
19For interface tests see tests/per_repository/*.py.
20
21For concrete class tests see this file, and for storage formats tests
22also see this file.
23"""
24
25from stat import S_ISDIR
26
27import breezy
28from breezy.errors import (
29    UnknownFormatError,
30    )
31from breezy import (
32    tests,
33    transport,
34    )
35from breezy.bzr import (
36    bzrdir,
37    btree_index,
38    inventory,
39    repository as bzrrepository,
40    versionedfile,
41    vf_repository,
42    vf_search,
43    )
44from breezy.bzr.btree_index import BTreeBuilder, BTreeGraphIndex
45from breezy.bzr.index import GraphIndex
46from breezy.repository import RepositoryFormat
47from breezy.tests import (
48    TestCase,
49    TestCaseWithTransport,
50    )
51from breezy import (
52    controldir,
53    errors,
54    osutils,
55    repository,
56    revision as _mod_revision,
57    upgrade,
58    workingtree,
59    )
60from breezy.bzr import (
61    groupcompress_repo,
62    knitrepo,
63    knitpack_repo,
64    pack_repo,
65    )
66
67
68class TestDefaultFormat(TestCase):
69
70    def test_get_set_default_format(self):
71        old_default = controldir.format_registry.get('default')
72        old_default_help = controldir.format_registry.get_help('default')
73        private_default = old_default().repository_format.__class__
74        old_format = repository.format_registry.get_default()
75        self.assertTrue(isinstance(old_format, private_default))
76
77        def make_sample_bzrdir():
78            my_bzrdir = bzrdir.BzrDirMetaFormat1()
79            my_bzrdir.repository_format = SampleRepositoryFormat()
80            return my_bzrdir
81        controldir.format_registry.remove('default')
82        controldir.format_registry.register('sample', make_sample_bzrdir, '')
83        controldir.format_registry.set_default('sample')
84        # creating a repository should now create an instrumented dir.
85        try:
86            # the default branch format is used by the meta dir format
87            # which is not the default bzrdir format at this point
88            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
89            result = dir.create_repository()
90            self.assertEqual(result, 'A bzr repository dir')
91        finally:
92            controldir.format_registry.remove('default')
93            controldir.format_registry.remove('sample')
94            controldir.format_registry.register(
95                'default', old_default, old_default_help)
96        self.assertIsInstance(repository.format_registry.get_default(),
97                              old_format.__class__)
98
99
100class SampleRepositoryFormat(bzrrepository.RepositoryFormatMetaDir):
101    """A sample format
102
103    this format is initializable, unsupported to aid in testing the
104    open and open(unsupported=True) routines.
105    """
106
107    @classmethod
108    def get_format_string(cls):
109        """See RepositoryFormat.get_format_string()."""
110        return b"Sample .bzr repository format."
111
112    def initialize(self, a_controldir, shared=False):
113        """Initialize a repository in a BzrDir"""
114        t = a_controldir.get_repository_transport(self)
115        t.put_bytes('format', self.get_format_string())
116        return 'A bzr repository dir'
117
118    def is_supported(self):
119        return False
120
121    def open(self, a_controldir, _found=False):
122        return "opened repository."
123
124
125class SampleExtraRepositoryFormat(repository.RepositoryFormat):
126    """A sample format that can not be used in a metadir
127
128    """
129
130    def get_format_string(self):
131        raise NotImplementedError
132
133
134class TestRepositoryFormat(TestCaseWithTransport):
135    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
136
137    def test_find_format(self):
138        # is the right format object found for a repository?
139        # create a branch with a few known format objects.
140        # this is not quite the same as
141        self.build_tree(["foo/", "bar/"])
142
143        def check_format(format, url):
144            dir = format._matchingcontroldir.initialize(url)
145            format.initialize(dir)
146            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
147                dir)
148            self.assertIsInstance(found_format, format.__class__)
149        check_format(repository.format_registry.get_default(), "bar")
150
151    def test_find_format_no_repository(self):
152        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
153        self.assertRaises(errors.NoRepositoryPresent,
154                          bzrrepository.RepositoryFormatMetaDir.find_format,
155                          dir)
156
157    def test_from_string(self):
158        self.assertIsInstance(
159            SampleRepositoryFormat.from_string(
160                b"Sample .bzr repository format."),
161            SampleRepositoryFormat)
162        self.assertRaises(AssertionError,
163                          SampleRepositoryFormat.from_string,
164                          b"Different .bzr repository format.")
165
166    def test_find_format_unknown_format(self):
167        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
168        SampleRepositoryFormat().initialize(dir)
169        self.assertRaises(UnknownFormatError,
170                          bzrrepository.RepositoryFormatMetaDir.find_format,
171                          dir)
172
173    def test_find_format_with_features(self):
174        tree = self.make_branch_and_tree('.', format='2a')
175        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
176        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
177            tree.controldir)
178        self.assertIsInstance(
179            found_format, bzrrepository.RepositoryFormatMetaDir)
180        self.assertEqual(found_format.features.get(b"name"), b"necessity")
181        self.assertRaises(
182            bzrdir.MissingFeature, found_format.check_support_status, True)
183        self.addCleanup(
184            bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
185        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
186        found_format.check_support_status(True)
187
188
189class TestRepositoryFormatRegistry(TestCase):
190
191    def setUp(self):
192        super(TestRepositoryFormatRegistry, self).setUp()
193        self.registry = repository.RepositoryFormatRegistry()
194
195    def test_register_unregister_format(self):
196        format = SampleRepositoryFormat()
197        self.registry.register(format)
198        self.assertEqual(format, self.registry.get(
199            b"Sample .bzr repository format."))
200        self.registry.remove(format)
201        self.assertRaises(KeyError, self.registry.get,
202                          b"Sample .bzr repository format.")
203
204    def test_get_all(self):
205        format = SampleRepositoryFormat()
206        self.assertEqual([], self.registry._get_all())
207        self.registry.register(format)
208        self.assertEqual([format], self.registry._get_all())
209
210    def test_register_extra(self):
211        format = SampleExtraRepositoryFormat()
212        self.assertEqual([], self.registry._get_all())
213        self.registry.register_extra(format)
214        self.assertEqual([format], self.registry._get_all())
215
216    def test_register_extra_lazy(self):
217        self.assertEqual([], self.registry._get_all())
218        self.registry.register_extra_lazy(__name__,
219                                          "SampleExtraRepositoryFormat")
220        formats = self.registry._get_all()
221        self.assertEqual(1, len(formats))
222        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
223
224
225class TestFormatKnit1(TestCaseWithTransport):
226
227    def test_attribute__fetch_order(self):
228        """Knits need topological data insertion."""
229        repo = self.make_repository(
230            '.', format=controldir.format_registry.get('knit')())
231        self.assertEqual('topological', repo._format._fetch_order)
232
233    def test_attribute__fetch_uses_deltas(self):
234        """Knits reuse deltas."""
235        repo = self.make_repository(
236            '.', format=controldir.format_registry.get('knit')())
237        self.assertEqual(True, repo._format._fetch_uses_deltas)
238
239    def test_disk_layout(self):
240        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
241        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
242        # in case of side effects of locking.
243        repo.lock_write()
244        repo.unlock()
245        # we want:
246        # format 'Bazaar-NG Knit Repository Format 1'
247        # lock: is a directory
248        # inventory.weave == empty_weave
249        # empty revision-store directory
250        # empty weaves directory
251        t = control.get_repository_transport(None)
252        with t.get('format') as f:
253            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
254                                 f.read())
255        # XXX: no locks left when unlocked at the moment
256        # self.assertEqualDiff('', t.get('lock').read())
257        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
258        self.check_knits(t)
259        # Check per-file knits.
260        control.create_branch()
261        tree = control.create_workingtree()
262        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
263        tree.put_file_bytes_non_atomic('foo', b'')
264        tree.commit('1st post', rev_id=b'foo')
265        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
266                           b'\nfoo fulltext 0 81  :')
267
268    def assertHasKnit(self, t, knit_name, extra_content=b''):
269        """Assert that knit_name exists on t."""
270        self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
271                             t.get(knit_name + '.kndx').read())
272
273    def check_knits(self, t):
274        """check knit content for a repository."""
275        self.assertHasKnit(t, 'inventory')
276        self.assertHasKnit(t, 'revisions')
277        self.assertHasKnit(t, 'signatures')
278
279    def test_shared_disk_layout(self):
280        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
281        knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
282        # we want:
283        # format 'Bazaar-NG Knit Repository Format 1'
284        # lock: is a directory
285        # inventory.weave == empty_weave
286        # empty revision-store directory
287        # empty weaves directory
288        # a 'shared-storage' marker file.
289        t = control.get_repository_transport(None)
290        with t.get('format') as f:
291            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
292                                 f.read())
293        # XXX: no locks left when unlocked at the moment
294        # self.assertEqualDiff('', t.get('lock').read())
295        self.assertEqualDiff(b'', t.get('shared-storage').read())
296        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
297        self.check_knits(t)
298
299    def test_shared_no_tree_disk_layout(self):
300        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
301        repo = knitrepo.RepositoryFormatKnit1().initialize(
302            control, shared=True)
303        repo.set_make_working_trees(False)
304        # we want:
305        # format 'Bazaar-NG Knit Repository Format 1'
306        # lock ''
307        # inventory.weave == empty_weave
308        # empty revision-store directory
309        # empty weaves directory
310        # a 'shared-storage' marker file.
311        t = control.get_repository_transport(None)
312        with t.get('format') as f:
313            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
314                                 f.read())
315        # XXX: no locks left when unlocked at the moment
316        # self.assertEqualDiff('', t.get('lock').read())
317        self.assertEqualDiff(b'', t.get('shared-storage').read())
318        self.assertEqualDiff(b'', t.get('no-working-trees').read())
319        repo.set_make_working_trees(True)
320        self.assertFalse(t.has('no-working-trees'))
321        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
322        self.check_knits(t)
323
324    def test_deserialise_sets_root_revision(self):
325        """We must have a inventory.root.revision
326
327        Old versions of the XML5 serializer did not set the revision_id for
328        the whole inventory. So we grab the one from the expected text. Which
329        is valid when the api is not being abused.
330        """
331        repo = self.make_repository(
332            '.', format=controldir.format_registry.get('knit')())
333        inv_xml = b'<inventory format="5">\n</inventory>\n'
334        inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
335        self.assertEqual(b'test-rev-id', inv.root.revision)
336
337    def test_deserialise_uses_global_revision_id(self):
338        """If it is set, then we re-use the global revision id"""
339        repo = self.make_repository(
340            '.', format=controldir.format_registry.get('knit')())
341        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
342                   b'</inventory>\n')
343        # Arguably, the deserialise_inventory should detect a mismatch, and
344        # raise an error, rather than silently using one revision_id over the
345        # other.
346        self.assertRaises(AssertionError, repo._deserialise_inventory,
347                          b'test-rev-id', [inv_xml])
348        inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
349        self.assertEqual(b'other-rev-id', inv.root.revision)
350
351    def test_supports_external_lookups(self):
352        repo = self.make_repository(
353            '.', format=controldir.format_registry.get('knit')())
354        self.assertFalse(repo._format.supports_external_lookups)
355
356
357class DummyRepository(object):
358    """A dummy repository for testing."""
359
360    _format = None
361    _serializer = None
362
363    def supports_rich_root(self):
364        if self._format is not None:
365            return self._format.rich_root_data
366        return False
367
368    def get_graph(self):
369        raise NotImplementedError
370
371    def get_parent_map(self, revision_ids):
372        raise NotImplementedError
373
374
375class InterDummy(repository.InterRepository):
376    """An inter-repository optimised code path for DummyRepository.
377
378    This is for use during testing where we use DummyRepository as repositories
379    so that none of the default regsitered inter-repository classes will
380    MATCH.
381    """
382
383    @staticmethod
384    def is_compatible(repo_source, repo_target):
385        """InterDummy is compatible with DummyRepository."""
386        return (isinstance(repo_source, DummyRepository) and
387                isinstance(repo_target, DummyRepository))
388
389
390class TestInterRepository(TestCaseWithTransport):
391
392    def test_get_default_inter_repository(self):
393        # test that the InterRepository.get(repo_a, repo_b) probes
394        # for a inter_repo class where is_compatible(repo_a, repo_b) returns
395        # true and returns a default inter_repo otherwise.
396        # This also tests that the default registered optimised interrepository
397        # classes do not barf inappropriately when a surprising repository type
398        # is handed to them.
399        dummy_a = DummyRepository()
400        dummy_a._format = RepositoryFormat()
401        dummy_a._format.supports_full_versioned_files = True
402        dummy_b = DummyRepository()
403        dummy_b._format = RepositoryFormat()
404        dummy_b._format.supports_full_versioned_files = True
405        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
406
407    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
408        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
409
410        The effective default is now InterSameDataRepository because there is
411        no actual sane default in the presence of incompatible data models.
412        """
413        inter_repo = repository.InterRepository.get(repo_a, repo_b)
414        self.assertEqual(vf_repository.InterSameDataRepository,
415                         inter_repo.__class__)
416        self.assertEqual(repo_a, inter_repo.source)
417        self.assertEqual(repo_b, inter_repo.target)
418
419    def test_register_inter_repository_class(self):
420        # test that a optimised code path provider - a
421        # InterRepository subclass can be registered and unregistered
422        # and that it is correctly selected when given a repository
423        # pair that it returns true on for the is_compatible static method
424        # check
425        dummy_a = DummyRepository()
426        dummy_a._format = RepositoryFormat()
427        dummy_b = DummyRepository()
428        dummy_b._format = RepositoryFormat()
429        repo = self.make_repository('.')
430        # hack dummies to look like repo somewhat.
431        dummy_a._serializer = repo._serializer
432        dummy_a._format.supports_tree_reference = (
433            repo._format.supports_tree_reference)
434        dummy_a._format.rich_root_data = repo._format.rich_root_data
435        dummy_a._format.supports_full_versioned_files = (
436            repo._format.supports_full_versioned_files)
437        dummy_b._serializer = repo._serializer
438        dummy_b._format.supports_tree_reference = (
439            repo._format.supports_tree_reference)
440        dummy_b._format.rich_root_data = repo._format.rich_root_data
441        dummy_b._format.supports_full_versioned_files = (
442            repo._format.supports_full_versioned_files)
443        repository.InterRepository.register_optimiser(InterDummy)
444        try:
445            # we should get the default for something InterDummy returns False
446            # to
447            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
448            self.assertGetsDefaultInterRepository(dummy_a, repo)
449            # and we should get an InterDummy for a pair it 'likes'
450            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
451            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
452            self.assertEqual(InterDummy, inter_repo.__class__)
453            self.assertEqual(dummy_a, inter_repo.source)
454            self.assertEqual(dummy_b, inter_repo.target)
455        finally:
456            repository.InterRepository.unregister_optimiser(InterDummy)
457        # now we should get the default InterRepository object again.
458        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
459
460
461class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
462
463    @classmethod
464    def get_format_string(cls):
465        return b"Test Format 1"
466
467
468class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
469
470    @classmethod
471    def get_format_string(cls):
472        return b"Test Format 2"
473
474
475class TestRepositoryConverter(TestCaseWithTransport):
476
477    def test_convert_empty(self):
478        source_format = TestRepositoryFormat1()
479        target_format = TestRepositoryFormat2()
480        repository.format_registry.register(source_format)
481        self.addCleanup(repository.format_registry.remove,
482                        source_format)
483        repository.format_registry.register(target_format)
484        self.addCleanup(repository.format_registry.remove,
485                        target_format)
486        t = self.get_transport()
487        t.mkdir('repository')
488        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
489        repo = TestRepositoryFormat1().initialize(repo_dir)
490        converter = repository.CopyConverter(target_format)
491        with breezy.ui.ui_factory.nested_progress_bar() as pb:
492            converter.convert(repo, pb)
493        repo = repo_dir.open_repository()
494        self.assertTrue(isinstance(target_format, repo._format.__class__))
495
496
497class TestRepositoryFormatKnit3(TestCaseWithTransport):
498
499    def test_attribute__fetch_order(self):
500        """Knits need topological data insertion."""
501        format = bzrdir.BzrDirMetaFormat1()
502        format.repository_format = knitrepo.RepositoryFormatKnit3()
503        repo = self.make_repository('.', format=format)
504        self.assertEqual('topological', repo._format._fetch_order)
505
506    def test_attribute__fetch_uses_deltas(self):
507        """Knits reuse deltas."""
508        format = bzrdir.BzrDirMetaFormat1()
509        format.repository_format = knitrepo.RepositoryFormatKnit3()
510        repo = self.make_repository('.', format=format)
511        self.assertEqual(True, repo._format._fetch_uses_deltas)
512
513    def test_convert(self):
514        """Ensure the upgrade adds weaves for roots"""
515        format = bzrdir.BzrDirMetaFormat1()
516        format.repository_format = knitrepo.RepositoryFormatKnit1()
517        tree = self.make_branch_and_tree('.', format)
518        tree.commit("Dull commit", rev_id=b"dull")
519        revision_tree = tree.branch.repository.revision_tree(b'dull')
520        with revision_tree.lock_read():
521            self.assertRaises(
522                errors.NoSuchFile, revision_tree.get_file_lines, u'')
523        format = bzrdir.BzrDirMetaFormat1()
524        format.repository_format = knitrepo.RepositoryFormatKnit3()
525        upgrade.Convert('.', format)
526        tree = workingtree.WorkingTree.open('.')
527        revision_tree = tree.branch.repository.revision_tree(b'dull')
528        with revision_tree.lock_read():
529            revision_tree.get_file_lines(u'')
530        tree.commit("Another dull commit", rev_id=b'dull2')
531        revision_tree = tree.branch.repository.revision_tree(b'dull2')
532        revision_tree.lock_read()
533        self.addCleanup(revision_tree.unlock)
534        self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
535
536    def test_supports_external_lookups(self):
537        format = bzrdir.BzrDirMetaFormat1()
538        format.repository_format = knitrepo.RepositoryFormatKnit3()
539        repo = self.make_repository('.', format=format)
540        self.assertFalse(repo._format.supports_external_lookups)
541
542
543class Test2a(tests.TestCaseWithMemoryTransport):
544
545    def test_chk_bytes_uses_custom_btree_parser(self):
546        mt = self.make_branch_and_memory_tree('test', format='2a')
547        mt.lock_write()
548        self.addCleanup(mt.unlock)
549        mt.add([''], [b'root-id'])
550        mt.commit('first')
551        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
552        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
553        # It should also work if we re-open the repo
554        repo = mt.branch.repository.controldir.open_repository()
555        repo.lock_read()
556        self.addCleanup(repo.unlock)
557        index = repo.chk_bytes._index._graph_index._indices[0]
558        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
559
560    def test_fetch_combines_groups(self):
561        builder = self.make_branch_builder('source', format='2a')
562        builder.start_series()
563        builder.build_snapshot(None, [
564            ('add', ('', b'root-id', 'directory', '')),
565            ('add', ('file', b'file-id', 'file', b'content\n'))],
566            revision_id=b'1')
567        builder.build_snapshot([b'1'], [
568            ('modify', ('file', b'content-2\n'))],
569            revision_id=b'2')
570        builder.finish_series()
571        source = builder.get_branch()
572        target = self.make_repository('target', format='2a')
573        target.fetch(source.repository)
574        target.lock_read()
575        self.addCleanup(target.unlock)
576        details = target.texts._index.get_build_details(
577            [(b'file-id', b'1',), (b'file-id', b'2',)])
578        file_1_details = details[(b'file-id', b'1')]
579        file_2_details = details[(b'file-id', b'2')]
580        # The index, and what to read off disk, should be the same for both
581        # versions of the file.
582        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
583
584    def test_fetch_combines_groups(self):
585        builder = self.make_branch_builder('source', format='2a')
586        builder.start_series()
587        builder.build_snapshot(None, [
588            ('add', ('', b'root-id', 'directory', '')),
589            ('add', ('file', b'file-id', 'file', b'content\n'))],
590            revision_id=b'1')
591        builder.build_snapshot([b'1'], [
592            ('modify', ('file', b'content-2\n'))],
593            revision_id=b'2')
594        builder.finish_series()
595        source = builder.get_branch()
596        target = self.make_repository('target', format='2a')
597        target.fetch(source.repository)
598        target.lock_read()
599        self.addCleanup(target.unlock)
600        details = target.texts._index.get_build_details(
601            [(b'file-id', b'1',), (b'file-id', b'2',)])
602        file_1_details = details[(b'file-id', b'1')]
603        file_2_details = details[(b'file-id', b'2')]
604        # The index, and what to read off disk, should be the same for both
605        # versions of the file.
606        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
607
608    def test_fetch_combines_groups(self):
609        builder = self.make_branch_builder('source', format='2a')
610        builder.start_series()
611        builder.build_snapshot(None, [
612            ('add', ('', b'root-id', 'directory', '')),
613            ('add', ('file', b'file-id', 'file', b'content\n'))],
614            revision_id=b'1')
615        builder.build_snapshot([b'1'], [
616            ('modify', ('file', b'content-2\n'))],
617            revision_id=b'2')
618        builder.finish_series()
619        source = builder.get_branch()
620        target = self.make_repository('target', format='2a')
621        target.fetch(source.repository)
622        target.lock_read()
623        self.addCleanup(target.unlock)
624        details = target.texts._index.get_build_details(
625            [(b'file-id', b'1',), (b'file-id', b'2',)])
626        file_1_details = details[(b'file-id', b'1')]
627        file_2_details = details[(b'file-id', b'2')]
628        # The index, and what to read off disk, should be the same for both
629        # versions of the file.
630        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
631
632    def test_format_pack_compresses_True(self):
633        repo = self.make_repository('repo', format='2a')
634        self.assertTrue(repo._format.pack_compresses)
635
636    def test_inventories_use_chk_map_with_parent_base_dict(self):
637        tree = self.make_branch_and_memory_tree('repo', format="2a")
638        tree.lock_write()
639        tree.add([''], [b'TREE_ROOT'])
640        revid = tree.commit("foo")
641        tree.unlock()
642        tree.lock_read()
643        self.addCleanup(tree.unlock)
644        inv = tree.branch.repository.get_inventory(revid)
645        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
646        inv.parent_id_basename_to_file_id._ensure_root()
647        inv.id_to_entry._ensure_root()
648        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
649        self.assertEqual(
650            65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
651
652    def test_autopack_unchanged_chk_nodes(self):
653        # at 20 unchanged commits, chk pages are packed that are split into
654        # two groups such that the new pack being made doesn't have all its
655        # pages in the source packs (though they are in the repository).
656        # Use a memory backed repository, we don't need to hit disk for this
657        tree = self.make_branch_and_memory_tree('tree', format='2a')
658        tree.lock_write()
659        self.addCleanup(tree.unlock)
660        tree.add([''], [b'TREE_ROOT'])
661        for pos in range(20):
662            tree.commit(str(pos))
663
664    def test_pack_with_hint(self):
665        tree = self.make_branch_and_memory_tree('tree', format='2a')
666        tree.lock_write()
667        self.addCleanup(tree.unlock)
668        tree.add([''], [b'TREE_ROOT'])
669        # 1 commit to leave untouched
670        tree.commit('1')
671        to_keep = tree.branch.repository._pack_collection.names()
672        # 2 to combine
673        tree.commit('2')
674        tree.commit('3')
675        all = tree.branch.repository._pack_collection.names()
676        combine = list(set(all) - set(to_keep))
677        self.assertLength(3, all)
678        self.assertLength(2, combine)
679        tree.branch.repository.pack(hint=combine)
680        final = tree.branch.repository._pack_collection.names()
681        self.assertLength(2, final)
682        self.assertFalse(combine[0] in final)
683        self.assertFalse(combine[1] in final)
684        self.assertSubset(to_keep, final)
685
686    def test_stream_source_to_gc(self):
687        source = self.make_repository('source', format='2a')
688        target = self.make_repository('target', format='2a')
689        stream = source._get_source(target._format)
690        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
691
692    def test_stream_source_to_non_gc(self):
693        source = self.make_repository('source', format='2a')
694        target = self.make_repository('target', format='rich-root-pack')
695        stream = source._get_source(target._format)
696        # We don't want the child GroupCHKStreamSource
697        self.assertIs(type(stream), vf_repository.StreamSource)
698
699    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
700        source_builder = self.make_branch_builder('source',
701                                                  format='2a')
702        # We have to build a fairly large tree, so that we are sure the chk
703        # pages will have split into multiple pages.
704        entries = [('add', ('', b'a-root-id', 'directory', None))]
705        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
706            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
707                fname = i + j
708                fid = fname.encode('utf-8') + b'-id'
709                content = b'content for %s\n' % (fname.encode('utf-8'),)
710                entries.append(('add', (fname, fid, 'file', content)))
711        source_builder.start_series()
712        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
713        # Now change a few of them, so we get a few new pages for the second
714        # revision
715        source_builder.build_snapshot([b'rev-1'], [
716            ('modify', ('aa', b'new content for aa-id\n')),
717            ('modify', ('cc', b'new content for cc-id\n')),
718            ('modify', ('zz', b'new content for zz-id\n')),
719            ], revision_id=b'rev-2')
720        source_builder.finish_series()
721        source_branch = source_builder.get_branch()
722        source_branch.lock_read()
723        self.addCleanup(source_branch.unlock)
724        target = self.make_repository('target', format='2a')
725        source = source_branch.repository._get_source(target._format)
726        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
727
728        # On a regular pass, getting the inventories and chk pages for rev-2
729        # would only get the newly created chk pages
730        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
731                                        {b'rev-2'})
732        simple_chk_records = set()
733        for vf_name, substream in source.get_stream(search):
734            if vf_name == 'chk_bytes':
735                for record in substream:
736                    simple_chk_records.add(record.key)
737            else:
738                for _ in substream:
739                    continue
740        # 3 pages, the root (InternalNode), + 2 pages which actually changed
741        self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
742                          (b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
743                          (b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
744                          (b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
745                         set(simple_chk_records))
746        # Now, when we do a similar call using 'get_stream_for_missing_keys'
747        # we should get a much larger set of pages.
748        missing = [('inventories', b'rev-2')]
749        full_chk_records = set()
750        for vf_name, substream in source.get_stream_for_missing_keys(missing):
751            if vf_name == 'inventories':
752                for record in substream:
753                    self.assertEqual((b'rev-2',), record.key)
754            elif vf_name == 'chk_bytes':
755                for record in substream:
756                    full_chk_records.add(record.key)
757            else:
758                self.fail('Should not be getting a stream of %s' % (vf_name,))
759        # We have 257 records now. This is because we have 1 root page, and 256
760        # leaf pages in a complete listing.
761        self.assertEqual(257, len(full_chk_records))
762        self.assertSubset(simple_chk_records, full_chk_records)
763
764    def test_inconsistency_fatal(self):
765        repo = self.make_repository('repo', format='2a')
766        self.assertTrue(repo.revisions._index._inconsistency_fatal)
767        self.assertFalse(repo.texts._index._inconsistency_fatal)
768        self.assertFalse(repo.inventories._index._inconsistency_fatal)
769        self.assertFalse(repo.signatures._index._inconsistency_fatal)
770        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
771
772
773class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
774
775    def test_source_to_exact_pack_092(self):
776        source = self.make_repository('source', format='pack-0.92')
777        target = self.make_repository('target', format='pack-0.92')
778        stream_source = source._get_source(target._format)
779        self.assertIsInstance(
780            stream_source, knitpack_repo.KnitPackStreamSource)
781
782    def test_source_to_exact_pack_rich_root_pack(self):
783        source = self.make_repository('source', format='rich-root-pack')
784        target = self.make_repository('target', format='rich-root-pack')
785        stream_source = source._get_source(target._format)
786        self.assertIsInstance(
787            stream_source, knitpack_repo.KnitPackStreamSource)
788
789    def test_source_to_exact_pack_19(self):
790        source = self.make_repository('source', format='1.9')
791        target = self.make_repository('target', format='1.9')
792        stream_source = source._get_source(target._format)
793        self.assertIsInstance(
794            stream_source, knitpack_repo.KnitPackStreamSource)
795
796    def test_source_to_exact_pack_19_rich_root(self):
797        source = self.make_repository('source', format='1.9-rich-root')
798        target = self.make_repository('target', format='1.9-rich-root')
799        stream_source = source._get_source(target._format)
800        self.assertIsInstance(
801            stream_source, knitpack_repo.KnitPackStreamSource)
802
803    def test_source_to_remote_exact_pack_19(self):
804        trans = self.make_smart_server('target')
805        trans.ensure_base()
806        source = self.make_repository('source', format='1.9')
807        target = self.make_repository('target', format='1.9')
808        target = repository.Repository.open(trans.base)
809        stream_source = source._get_source(target._format)
810        self.assertIsInstance(
811            stream_source, knitpack_repo.KnitPackStreamSource)
812
813    def test_stream_source_to_non_exact(self):
814        source = self.make_repository('source', format='pack-0.92')
815        target = self.make_repository('target', format='1.9')
816        stream = source._get_source(target._format)
817        self.assertIs(type(stream), vf_repository.StreamSource)
818
819    def test_stream_source_to_non_exact_rich_root(self):
820        source = self.make_repository('source', format='1.9')
821        target = self.make_repository('target', format='1.9-rich-root')
822        stream = source._get_source(target._format)
823        self.assertIs(type(stream), vf_repository.StreamSource)
824
825    def test_source_to_remote_non_exact_pack_19(self):
826        trans = self.make_smart_server('target')
827        trans.ensure_base()
828        source = self.make_repository('source', format='1.9')
829        target = self.make_repository('target', format='1.6')
830        target = repository.Repository.open(trans.base)
831        stream_source = source._get_source(target._format)
832        self.assertIs(type(stream_source), vf_repository.StreamSource)
833
834    def test_stream_source_to_knit(self):
835        source = self.make_repository('source', format='pack-0.92')
836        target = self.make_repository('target', format='dirstate')
837        stream = source._get_source(target._format)
838        self.assertIs(type(stream), vf_repository.StreamSource)
839
840
841class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
842    """Tests for _find_parent_ids_of_revisions."""
843
844    def setUp(self):
845        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
846        self.builder = self.make_branch_builder('source')
847        self.builder.start_series()
848        self.builder.build_snapshot(
849            None,
850            [('add', ('', b'tree-root', 'directory', None))],
851            revision_id=b'initial')
852        self.repo = self.builder.get_branch().repository
853        self.addCleanup(self.builder.finish_series)
854
855    def assertParentIds(self, expected_result, rev_set):
856        self.assertEqual(
857            sorted(expected_result),
858            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
859
860    def test_simple(self):
861        self.builder.build_snapshot(None, [], revision_id=b'revid1')
862        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
863        rev_set = [b'revid2']
864        self.assertParentIds([b'revid1'], rev_set)
865
866    def test_not_first_parent(self):
867        self.builder.build_snapshot(None, [], revision_id=b'revid1')
868        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
869        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
870        rev_set = [b'revid3', b'revid2']
871        self.assertParentIds([b'revid1'], rev_set)
872
873    def test_not_null(self):
874        rev_set = [b'initial']
875        self.assertParentIds([], rev_set)
876
877    def test_not_null_set(self):
878        self.builder.build_snapshot(None, [], revision_id=b'revid1')
879        rev_set = [_mod_revision.NULL_REVISION]
880        self.assertParentIds([], rev_set)
881
882    def test_ghost(self):
883        self.builder.build_snapshot(None, [], revision_id=b'revid1')
884        rev_set = [b'ghost', b'revid1']
885        self.assertParentIds([b'initial'], rev_set)
886
887    def test_ghost_parent(self):
888        self.builder.build_snapshot(None, [], revision_id=b'revid1')
889        self.builder.build_snapshot(
890            [b'revid1', b'ghost'], [], revision_id=b'revid2')
891        rev_set = [b'revid2', b'revid1']
892        self.assertParentIds([b'ghost', b'initial'], rev_set)
893
894    def test_righthand_parent(self):
895        self.builder.build_snapshot(None, [], revision_id=b'revid1')
896        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
897        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
898        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
899                                    revision_id=b'revid3')
900        rev_set = [b'revid3', b'revid2a']
901        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
902
903
904class TestWithBrokenRepo(TestCaseWithTransport):
905    """These tests seem to be more appropriate as interface tests?"""
906
907    def make_broken_repository(self):
908        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
909        # parent references" branch which is due to land in bzr.dev soon.  Once
910        # it does, this duplication should be removed.
911        repo = self.make_repository('broken-repo')
912        cleanups = []
913        try:
914            repo.lock_write()
915            cleanups.append(repo.unlock)
916            repo.start_write_group()
917            cleanups.append(repo.commit_write_group)
918            # make rev1a: A well-formed revision, containing 'file1'
919            inv = inventory.Inventory(revision_id=b'rev1a')
920            inv.root.revision = b'rev1a'
921            self.add_file(repo, inv, 'file1', b'rev1a', [])
922            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
923            repo.add_inventory(b'rev1a', inv, [])
924            revision = _mod_revision.Revision(
925                b'rev1a',
926                committer='jrandom@example.com', timestamp=0,
927                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
928            repo.add_revision(b'rev1a', revision, inv)
929
930            # make rev1b, which has no Revision, but has an Inventory, and
931            # file1
932            inv = inventory.Inventory(revision_id=b'rev1b')
933            inv.root.revision = b'rev1b'
934            self.add_file(repo, inv, 'file1', b'rev1b', [])
935            repo.add_inventory(b'rev1b', inv, [])
936
937            # make rev2, with file1 and file2
938            # file2 is sane
939            # file1 has 'rev1b' as an ancestor, even though this is not
940            # mentioned by 'rev1a', making it an unreferenced ancestor
941            inv = inventory.Inventory()
942            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
943            self.add_file(repo, inv, 'file2', b'rev2', [])
944            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
945
946            # make ghost revision rev1c
947            inv = inventory.Inventory()
948            self.add_file(repo, inv, 'file2', b'rev1c', [])
949
950            # make rev3 with file2
951            # file2 refers to 'rev1c', which is a ghost in this repository, so
952            # file2 cannot have rev1c as its ancestor.
953            inv = inventory.Inventory()
954            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
955            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
956            return repo
957        finally:
958            for cleanup in reversed(cleanups):
959                cleanup()
960
961    def add_revision(self, repo, revision_id, inv, parent_ids):
962        inv.revision_id = revision_id
963        inv.root.revision = revision_id
964        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
965        repo.add_inventory(revision_id, inv, parent_ids)
966        revision = _mod_revision.Revision(
967            revision_id,
968            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
969            timezone=0, message='foo', parent_ids=parent_ids)
970        repo.add_revision(revision_id, revision, inv)
971
972    def add_file(self, repo, inv, filename, revision, parents):
973        file_id = filename.encode('utf-8') + b'-id'
974        content = [b'line\n']
975        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
976        entry.revision = revision
977        entry.text_sha1 = osutils.sha_strings(content)
978        entry.text_size = 0
979        inv.add(entry)
980        text_key = (file_id, revision)
981        parent_keys = [(file_id, parent) for parent in parents]
982        repo.texts.add_lines(text_key, parent_keys, content)
983
984    def test_insert_from_broken_repo(self):
985        """Inserting a data stream from a broken repository won't silently
986        corrupt the target repository.
987        """
988        broken_repo = self.make_broken_repository()
989        empty_repo = self.make_repository('empty-repo')
990        try:
991            empty_repo.fetch(broken_repo)
992        except (errors.RevisionNotPresent, errors.BzrCheckError):
993            # Test successful: compression parent not being copied leads to
994            # error.
995            return
996        empty_repo.lock_read()
997        self.addCleanup(empty_repo.unlock)
998        text = next(empty_repo.texts.get_record_stream(
999            [(b'file2-id', b'rev3')], 'topological', True))
1000        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
1001
1002
1003class TestRepositoryPackCollection(TestCaseWithTransport):
1004
1005    def get_format(self):
1006        return controldir.format_registry.make_controldir('pack-0.92')
1007
1008    def get_packs(self):
1009        format = self.get_format()
1010        repo = self.make_repository('.', format=format)
1011        return repo._pack_collection
1012
1013    def make_packs_and_alt_repo(self, write_lock=False):
1014        """Create a pack repo with 3 packs, and access it via a second repo."""
1015        tree = self.make_branch_and_tree('.', format=self.get_format())
1016        tree.lock_write()
1017        self.addCleanup(tree.unlock)
1018        rev1 = tree.commit('one')
1019        rev2 = tree.commit('two')
1020        rev3 = tree.commit('three')
1021        r = repository.Repository.open('.')
1022        if write_lock:
1023            r.lock_write()
1024        else:
1025            r.lock_read()
1026        self.addCleanup(r.unlock)
1027        packs = r._pack_collection
1028        packs.ensure_loaded()
1029        return tree, r, packs, [rev1, rev2, rev3]
1030
1031    def test__clear_obsolete_packs(self):
1032        packs = self.get_packs()
1033        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1034        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1035        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1036        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1037        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1038        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1039        res = packs._clear_obsolete_packs()
1040        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1041        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1042
1043    def test__clear_obsolete_packs_preserve(self):
1044        packs = self.get_packs()
1045        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1046        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1047        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1048        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1049        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1050        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1051        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1052        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1053        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1054                         sorted(obsolete_pack_trans.list_dir('.')))
1055
1056    def test__max_pack_count(self):
1057        """The maximum pack count is a function of the number of revisions."""
1058        # no revisions - one pack, so that we can have a revision free repo
1059        # without it blowing up
1060        packs = self.get_packs()
1061        self.assertEqual(1, packs._max_pack_count(0))
1062        # after that the sum of the digits, - check the first 1-9
1063        self.assertEqual(1, packs._max_pack_count(1))
1064        self.assertEqual(2, packs._max_pack_count(2))
1065        self.assertEqual(3, packs._max_pack_count(3))
1066        self.assertEqual(4, packs._max_pack_count(4))
1067        self.assertEqual(5, packs._max_pack_count(5))
1068        self.assertEqual(6, packs._max_pack_count(6))
1069        self.assertEqual(7, packs._max_pack_count(7))
1070        self.assertEqual(8, packs._max_pack_count(8))
1071        self.assertEqual(9, packs._max_pack_count(9))
1072        # check the boundary cases with two digits for the next decade
1073        self.assertEqual(1, packs._max_pack_count(10))
1074        self.assertEqual(2, packs._max_pack_count(11))
1075        self.assertEqual(10, packs._max_pack_count(19))
1076        self.assertEqual(2, packs._max_pack_count(20))
1077        self.assertEqual(3, packs._max_pack_count(21))
1078        # check some arbitrary big numbers
1079        self.assertEqual(25, packs._max_pack_count(112894))
1080
1081    def test_repr(self):
1082        packs = self.get_packs()
1083        self.assertContainsRe(repr(packs),
1084                              'RepositoryPackCollection(.*Repository(.*))')
1085
1086    def test__obsolete_packs(self):
1087        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1088        names = packs.names()
1089        pack = packs.get_pack_by_name(names[0])
1090        # Schedule this one for removal
1091        packs._remove_pack_from_memory(pack)
1092        # Simulate a concurrent update by renaming the .pack file and one of
1093        # the indices
1094        packs.transport.rename('packs/%s.pack' % (names[0],),
1095                               'obsolete_packs/%s.pack' % (names[0],))
1096        packs.transport.rename('indices/%s.iix' % (names[0],),
1097                               'obsolete_packs/%s.iix' % (names[0],))
1098        # Now trigger the obsoletion, and ensure that all the remaining files
1099        # are still renamed
1100        packs._obsolete_packs([pack])
1101        self.assertEqual([n + '.pack' for n in names[1:]],
1102                         sorted(packs._pack_transport.list_dir('.')))
1103        # names[0] should not be present in the index anymore
1104        self.assertEqual(names[1:],
1105                         sorted({osutils.splitext(n)[0] for n in
1106                                 packs._index_transport.list_dir('.')}))
1107
1108    def test__obsolete_packs_missing_directory(self):
1109        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1110        r.control_transport.rmdir('obsolete_packs')
1111        names = packs.names()
1112        pack = packs.get_pack_by_name(names[0])
1113        # Schedule this one for removal
1114        packs._remove_pack_from_memory(pack)
1115        # Now trigger the obsoletion, and ensure that all the remaining files
1116        # are still renamed
1117        packs._obsolete_packs([pack])
1118        self.assertEqual([n + '.pack' for n in names[1:]],
1119                         sorted(packs._pack_transport.list_dir('.')))
1120        # names[0] should not be present in the index anymore
1121        self.assertEqual(names[1:],
1122                         sorted({osutils.splitext(n)[0] for n in
1123                                 packs._index_transport.list_dir('.')}))
1124
1125    def test_pack_distribution_zero(self):
1126        packs = self.get_packs()
1127        self.assertEqual([0], packs.pack_distribution(0))
1128
1129    def test_ensure_loaded_unlocked(self):
1130        packs = self.get_packs()
1131        self.assertRaises(errors.ObjectNotLocked,
1132                          packs.ensure_loaded)
1133
1134    def test_pack_distribution_one_to_nine(self):
1135        packs = self.get_packs()
1136        self.assertEqual([1],
1137                         packs.pack_distribution(1))
1138        self.assertEqual([1, 1],
1139                         packs.pack_distribution(2))
1140        self.assertEqual([1, 1, 1],
1141                         packs.pack_distribution(3))
1142        self.assertEqual([1, 1, 1, 1],
1143                         packs.pack_distribution(4))
1144        self.assertEqual([1, 1, 1, 1, 1],
1145                         packs.pack_distribution(5))
1146        self.assertEqual([1, 1, 1, 1, 1, 1],
1147                         packs.pack_distribution(6))
1148        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1149                         packs.pack_distribution(7))
1150        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1151                         packs.pack_distribution(8))
1152        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1153                         packs.pack_distribution(9))
1154
1155    def test_pack_distribution_stable_at_boundaries(self):
1156        """When there are multi-rev packs the counts are stable."""
1157        packs = self.get_packs()
1158        # in 10s:
1159        self.assertEqual([10], packs.pack_distribution(10))
1160        self.assertEqual([10, 1], packs.pack_distribution(11))
1161        self.assertEqual([10, 10], packs.pack_distribution(20))
1162        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1163        # 100s
1164        self.assertEqual([100], packs.pack_distribution(100))
1165        self.assertEqual([100, 1], packs.pack_distribution(101))
1166        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1167        self.assertEqual([100, 100], packs.pack_distribution(200))
1168        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1169        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1170
1171    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1172        packs = self.get_packs()
1173        existing_packs = [(2000, "big"), (9, "medium")]
1174        # rev count - 2009 -> 2x1000 + 9x1
1175        pack_operations = packs.plan_autopack_combinations(
1176            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1177        self.assertEqual([], pack_operations)
1178
1179    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1180        packs = self.get_packs()
1181        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1182        # rev count - 2010 -> 2x1000 + 1x10
1183        pack_operations = packs.plan_autopack_combinations(
1184            existing_packs, [1000, 1000, 10])
1185        self.assertEqual([], pack_operations)
1186
1187    def test_plan_pack_operations_2010_combines_smallest_two(self):
1188        packs = self.get_packs()
1189        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1190                          (1, "single1")]
1191        # rev count - 2010 -> 2x1000 + 1x10 (3)
1192        pack_operations = packs.plan_autopack_combinations(
1193            existing_packs, [1000, 1000, 10])
1194        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1195
1196    def test_plan_pack_operations_creates_a_single_op(self):
1197        packs = self.get_packs()
1198        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1199                          (10, 'e'), (6, 'f'), (4, 'g')]
1200        # rev count 150 -> 1x100 and 5x10
1201        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1202        # be combined into a single 120 size pack, and the 6 & 4 would
1203        # becombined into a size 10 pack. However, if we have to rewrite them,
1204        # we save a pack file with no increased I/O by putting them into the
1205        # same file.
1206        distribution = packs.pack_distribution(150)
1207        pack_operations = packs.plan_autopack_combinations(existing_packs,
1208                                                           distribution)
1209        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1210
1211    def test_all_packs_none(self):
1212        format = self.get_format()
1213        tree = self.make_branch_and_tree('.', format=format)
1214        tree.lock_read()
1215        self.addCleanup(tree.unlock)
1216        packs = tree.branch.repository._pack_collection
1217        packs.ensure_loaded()
1218        self.assertEqual([], packs.all_packs())
1219
1220    def test_all_packs_one(self):
1221        format = self.get_format()
1222        tree = self.make_branch_and_tree('.', format=format)
1223        tree.commit('start')
1224        tree.lock_read()
1225        self.addCleanup(tree.unlock)
1226        packs = tree.branch.repository._pack_collection
1227        packs.ensure_loaded()
1228        self.assertEqual([
1229            packs.get_pack_by_name(packs.names()[0])],
1230            packs.all_packs())
1231
1232    def test_all_packs_two(self):
1233        format = self.get_format()
1234        tree = self.make_branch_and_tree('.', format=format)
1235        tree.commit('start')
1236        tree.commit('continue')
1237        tree.lock_read()
1238        self.addCleanup(tree.unlock)
1239        packs = tree.branch.repository._pack_collection
1240        packs.ensure_loaded()
1241        self.assertEqual([
1242            packs.get_pack_by_name(packs.names()[0]),
1243            packs.get_pack_by_name(packs.names()[1]),
1244            ], packs.all_packs())
1245
1246    def test_get_pack_by_name(self):
1247        format = self.get_format()
1248        tree = self.make_branch_and_tree('.', format=format)
1249        tree.commit('start')
1250        tree.lock_read()
1251        self.addCleanup(tree.unlock)
1252        packs = tree.branch.repository._pack_collection
1253        packs.reset()
1254        packs.ensure_loaded()
1255        name = packs.names()[0]
1256        pack_1 = packs.get_pack_by_name(name)
1257        # the pack should be correctly initialised
1258        sizes = packs._names[name]
1259        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1260        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1261        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1262        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1263        self.assertEqual(
1264            pack_repo.ExistingPack(
1265                packs._pack_transport, name, rev_index, inv_index, txt_index,
1266                sig_index), pack_1)
1267        # and the same instance should be returned on successive calls.
1268        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1269
1270    def test_reload_pack_names_new_entry(self):
1271        tree, r, packs, revs = self.make_packs_and_alt_repo()
1272        names = packs.names()
1273        # Add a new pack file into the repository
1274        rev4 = tree.commit('four')
1275        new_names = tree.branch.repository._pack_collection.names()
1276        new_name = set(new_names).difference(names)
1277        self.assertEqual(1, len(new_name))
1278        new_name = new_name.pop()
1279        # The old collection hasn't noticed yet
1280        self.assertEqual(names, packs.names())
1281        self.assertTrue(packs.reload_pack_names())
1282        self.assertEqual(new_names, packs.names())
1283        # And the repository can access the new revision
1284        self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
1285        self.assertFalse(packs.reload_pack_names())
1286
1287    def test_reload_pack_names_added_and_removed(self):
1288        tree, r, packs, revs = self.make_packs_and_alt_repo()
1289        names = packs.names()
1290        # Now repack the whole thing
1291        tree.branch.repository.pack()
1292        new_names = tree.branch.repository._pack_collection.names()
1293        # The other collection hasn't noticed yet
1294        self.assertEqual(names, packs.names())
1295        self.assertTrue(packs.reload_pack_names())
1296        self.assertEqual(new_names, packs.names())
1297        self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
1298        self.assertFalse(packs.reload_pack_names())
1299
1300    def test_reload_pack_names_preserves_pending(self):
1301        # TODO: Update this to also test for pending-deleted names
1302        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1303        # We will add one pack (via start_write_group + insert_record_stream),
1304        # and remove another pack (via _remove_pack_from_memory)
1305        orig_names = packs.names()
1306        orig_at_load = packs._packs_at_load
1307        to_remove_name = next(iter(orig_names))
1308        r.start_write_group()
1309        self.addCleanup(r.abort_write_group)
1310        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1311            (b'text', b'rev'), (), None, b'content\n')])
1312        new_pack = packs._new_pack
1313        self.assertTrue(new_pack.data_inserted())
1314        new_pack.finish()
1315        packs.allocate(new_pack)
1316        packs._new_pack = None
1317        removed_pack = packs.get_pack_by_name(to_remove_name)
1318        packs._remove_pack_from_memory(removed_pack)
1319        names = packs.names()
1320        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1321        new_names = {x[0] for x in new_nodes}
1322        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1323        self.assertEqual(set(names) - set(orig_names), new_names)
1324        self.assertEqual({new_pack.name}, new_names)
1325        self.assertEqual([to_remove_name],
1326                         sorted([x[0] for x in deleted_nodes]))
1327        packs.reload_pack_names()
1328        reloaded_names = packs.names()
1329        self.assertEqual(orig_at_load, packs._packs_at_load)
1330        self.assertEqual(names, reloaded_names)
1331        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1332        new_names = {x[0] for x in new_nodes}
1333        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1334        self.assertEqual(set(names) - set(orig_names), new_names)
1335        self.assertEqual({new_pack.name}, new_names)
1336        self.assertEqual([to_remove_name],
1337                         sorted([x[0] for x in deleted_nodes]))
1338
1339    def test_autopack_obsoletes_new_pack(self):
1340        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1341        packs._max_pack_count = lambda x: 1
1342        packs.pack_distribution = lambda x: [10]
1343        r.start_write_group()
1344        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1345            (b'bogus-rev',), (), None, b'bogus-content\n')])
1346        # This should trigger an autopack, which will combine everything into a
1347        # single pack file.
1348        r.commit_write_group()
1349        names = packs.names()
1350        self.assertEqual(1, len(names))
1351        self.assertEqual([names[0] + '.pack'],
1352                         packs._pack_transport.list_dir('.'))
1353
1354    def test_autopack_reloads_and_stops(self):
1355        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1356        # After we have determined what needs to be autopacked, trigger a
1357        # full-pack via the other repo which will cause us to re-evaluate and
1358        # decide we don't need to do anything
1359        orig_execute = packs._execute_pack_operations
1360
1361        def _munged_execute_pack_ops(*args, **kwargs):
1362            tree.branch.repository.pack()
1363            return orig_execute(*args, **kwargs)
1364        packs._execute_pack_operations = _munged_execute_pack_ops
1365        packs._max_pack_count = lambda x: 1
1366        packs.pack_distribution = lambda x: [10]
1367        self.assertFalse(packs.autopack())
1368        self.assertEqual(1, len(packs.names()))
1369        self.assertEqual(tree.branch.repository._pack_collection.names(),
1370                         packs.names())
1371
1372    def test__save_pack_names(self):
1373        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1374        names = packs.names()
1375        pack = packs.get_pack_by_name(names[0])
1376        packs._remove_pack_from_memory(pack)
1377        packs._save_pack_names(obsolete_packs=[pack])
1378        cur_packs = packs._pack_transport.list_dir('.')
1379        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1380        # obsolete_packs will also have stuff like .rix and .iix present.
1381        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1382        obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1383        self.assertEqual([pack.name], sorted(obsolete_names))
1384
1385    def test__save_pack_names_already_obsoleted(self):
1386        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1387        names = packs.names()
1388        pack = packs.get_pack_by_name(names[0])
1389        packs._remove_pack_from_memory(pack)
1390        # We are going to simulate a concurrent autopack by manually obsoleting
1391        # the pack directly.
1392        packs._obsolete_packs([pack])
1393        packs._save_pack_names(clear_obsolete_packs=True,
1394                               obsolete_packs=[pack])
1395        cur_packs = packs._pack_transport.list_dir('.')
1396        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1397        # Note that while we set clear_obsolete_packs=True, it should not
1398        # delete a pack file that we have also scheduled for obsoletion.
1399        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1400        obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1401        self.assertEqual([pack.name], sorted(obsolete_names))
1402
1403    def test_pack_no_obsolete_packs_directory(self):
1404        """Bug #314314, don't fail if obsolete_packs directory does
1405        not exist."""
1406        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1407        r.control_transport.rmdir('obsolete_packs')
1408        packs._clear_obsolete_packs()
1409
1410
1411class TestPack(TestCaseWithTransport):
1412    """Tests for the Pack object."""
1413
1414    def assertCurrentlyEqual(self, left, right):
1415        self.assertTrue(left == right)
1416        self.assertTrue(right == left)
1417        self.assertFalse(left != right)
1418        self.assertFalse(right != left)
1419
1420    def assertCurrentlyNotEqual(self, left, right):
1421        self.assertFalse(left == right)
1422        self.assertFalse(right == left)
1423        self.assertTrue(left != right)
1424        self.assertTrue(right != left)
1425
1426    def test___eq____ne__(self):
1427        left = pack_repo.ExistingPack('', '', '', '', '', '')
1428        right = pack_repo.ExistingPack('', '', '', '', '', '')
1429        self.assertCurrentlyEqual(left, right)
1430        # change all attributes and ensure equality changes as we do.
1431        left.revision_index = 'a'
1432        self.assertCurrentlyNotEqual(left, right)
1433        right.revision_index = 'a'
1434        self.assertCurrentlyEqual(left, right)
1435        left.inventory_index = 'a'
1436        self.assertCurrentlyNotEqual(left, right)
1437        right.inventory_index = 'a'
1438        self.assertCurrentlyEqual(left, right)
1439        left.text_index = 'a'
1440        self.assertCurrentlyNotEqual(left, right)
1441        right.text_index = 'a'
1442        self.assertCurrentlyEqual(left, right)
1443        left.signature_index = 'a'
1444        self.assertCurrentlyNotEqual(left, right)
1445        right.signature_index = 'a'
1446        self.assertCurrentlyEqual(left, right)
1447        left.name = 'a'
1448        self.assertCurrentlyNotEqual(left, right)
1449        right.name = 'a'
1450        self.assertCurrentlyEqual(left, right)
1451        left.transport = 'a'
1452        self.assertCurrentlyNotEqual(left, right)
1453        right.transport = 'a'
1454        self.assertCurrentlyEqual(left, right)
1455
1456    def test_file_name(self):
1457        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1458        self.assertEqual('a_name.pack', pack.file_name())
1459
1460
1461class TestNewPack(TestCaseWithTransport):
1462    """Tests for pack_repo.NewPack."""
1463
1464    def test_new_instance_attributes(self):
1465        upload_transport = self.get_transport('upload')
1466        pack_transport = self.get_transport('pack')
1467        index_transport = self.get_transport('index')
1468        upload_transport.mkdir('.')
1469        collection = pack_repo.RepositoryPackCollection(
1470            repo=None,
1471            transport=self.get_transport('.'),
1472            index_transport=index_transport,
1473            upload_transport=upload_transport,
1474            pack_transport=pack_transport,
1475            index_builder_class=BTreeBuilder,
1476            index_class=BTreeGraphIndex,
1477            use_chk_index=False)
1478        pack = pack_repo.NewPack(collection)
1479        self.addCleanup(pack.abort)  # Make sure the write stream gets closed
1480        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1481        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1482        self.assertIsInstance(pack._hash, type(osutils.md5()))
1483        self.assertTrue(pack.upload_transport is upload_transport)
1484        self.assertTrue(pack.index_transport is index_transport)
1485        self.assertTrue(pack.pack_transport is pack_transport)
1486        self.assertEqual(None, pack.index_sizes)
1487        self.assertEqual(20, len(pack.random_name))
1488        self.assertIsInstance(pack.random_name, str)
1489        self.assertIsInstance(pack.start_time, float)
1490
1491
1492class TestPacker(TestCaseWithTransport):
1493    """Tests for the packs repository Packer class."""
1494
1495    def test_pack_optimizes_pack_order(self):
1496        builder = self.make_branch_builder('.', format="1.9")
1497        builder.start_series()
1498        builder.build_snapshot(None, [
1499            ('add', ('', b'root-id', 'directory', None)),
1500            ('add', ('f', b'f-id', 'file', b'content\n'))],
1501            revision_id=b'A')
1502        builder.build_snapshot([b'A'],
1503                               [('modify', ('f', b'new-content\n'))],
1504                               revision_id=b'B')
1505        builder.build_snapshot([b'B'],
1506                               [('modify', ('f', b'third-content\n'))],
1507                               revision_id=b'C')
1508        builder.build_snapshot([b'C'],
1509                               [('modify', ('f', b'fourth-content\n'))],
1510                               revision_id=b'D')
1511        b = builder.get_branch()
1512        b.lock_read()
1513        builder.finish_series()
1514        self.addCleanup(b.unlock)
1515        # At this point, we should have 4 pack files available
1516        # Because of how they were built, they correspond to
1517        # ['D', 'C', 'B', 'A']
1518        packs = b.repository._pack_collection.packs
1519        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1520                                          packs, 'testing',
1521                                          revision_ids=[b'B', b'C'])
1522        # Now, when we are copying the B & C revisions, their pack files should
1523        # be moved to the front of the stack
1524        # The new ordering moves B & C to the front of the .packs attribute,
1525        # and leaves the others in the original order.
1526        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1527        packer.pack()
1528        self.assertEqual(new_packs, packer.packs)
1529
1530
1531class TestOptimisingPacker(TestCaseWithTransport):
1532    """Tests for the OptimisingPacker class."""
1533
1534    def get_pack_collection(self):
1535        repo = self.make_repository('.')
1536        return repo._pack_collection
1537
1538    def test_open_pack_will_optimise(self):
1539        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1540                                                    [], '.test')
1541        new_pack = packer.open_pack()
1542        self.addCleanup(new_pack.abort)  # ensure cleanup
1543        self.assertIsInstance(new_pack, pack_repo.NewPack)
1544        self.assertTrue(new_pack.revision_index._optimize_for_size)
1545        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1546        self.assertTrue(new_pack.text_index._optimize_for_size)
1547        self.assertTrue(new_pack.signature_index._optimize_for_size)
1548
1549
1550class TestGCCHKPacker(TestCaseWithTransport):
1551
1552    def make_abc_branch(self):
1553        builder = self.make_branch_builder('source')
1554        builder.start_series()
1555        builder.build_snapshot(None, [
1556            ('add', ('', b'root-id', 'directory', None)),
1557            ('add', ('file', b'file-id', 'file', b'content\n')),
1558            ], revision_id=b'A')
1559        builder.build_snapshot([b'A'], [
1560            ('add', ('dir', b'dir-id', 'directory', None))],
1561            revision_id=b'B')
1562        builder.build_snapshot([b'B'], [
1563            ('modify', ('file', b'new content\n'))],
1564            revision_id=b'C')
1565        builder.finish_series()
1566        return builder.get_branch()
1567
1568    def make_branch_with_disjoint_inventory_and_revision(self):
1569        """a repo with separate packs for a revisions Revision and Inventory.
1570
1571        There will be one pack file that holds the Revision content, and one
1572        for the Inventory content.
1573
1574        :return: (repository,
1575                  pack_name_with_rev_A_Revision,
1576                  pack_name_with_rev_A_Inventory,
1577                  pack_name_with_rev_C_content)
1578        """
1579        b_source = self.make_abc_branch()
1580        b_base = b_source.controldir.sprout(
1581            'base', revision_id=b'A').open_branch()
1582        b_stacked = b_base.controldir.sprout(
1583            'stacked', stacked=True).open_branch()
1584        b_stacked.lock_write()
1585        self.addCleanup(b_stacked.unlock)
1586        b_stacked.fetch(b_source, b'B')
1587        # Now re-open the stacked repo directly (no fallbacks) so that we can
1588        # fill in the A rev.
1589        repo_not_stacked = b_stacked.controldir.open_repository()
1590        repo_not_stacked.lock_write()
1591        self.addCleanup(repo_not_stacked.unlock)
1592        # Now we should have a pack file with A's inventory, but not its
1593        # Revision
1594        self.assertEqual([(b'A',), (b'B',)],
1595                         sorted(repo_not_stacked.inventories.keys()))
1596        self.assertEqual([(b'B',)],
1597                         sorted(repo_not_stacked.revisions.keys()))
1598        stacked_pack_names = repo_not_stacked._pack_collection.names()
1599        # We have a couple names here, figure out which has A's inventory
1600        for name in stacked_pack_names:
1601            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1602            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1603            if (b'A',) in keys:
1604                inv_a_pack_name = name
1605                break
1606        else:
1607            self.fail('Could not find pack containing A\'s inventory')
1608        repo_not_stacked.fetch(b_source.repository, b'A')
1609        self.assertEqual([(b'A',), (b'B',)],
1610                         sorted(repo_not_stacked.revisions.keys()))
1611        new_pack_names = set(repo_not_stacked._pack_collection.names())
1612        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1613        self.assertEqual(1, len(rev_a_pack_names))
1614        rev_a_pack_name = list(rev_a_pack_names)[0]
1615        # Now fetch 'C', so we have a couple pack files to join
1616        repo_not_stacked.fetch(b_source.repository, b'C')
1617        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1618        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1619        self.assertEqual(1, len(rev_c_pack_names))
1620        rev_c_pack_name = list(rev_c_pack_names)[0]
1621        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1622                rev_c_pack_name)
1623
1624    def test_pack_with_distant_inventories(self):
1625        # See https://bugs.launchpad.net/bzr/+bug/437003
1626        # When repacking, it is possible to have an inventory in a different
1627        # pack file than the associated revision. An autopack can then come
1628        # along, and miss that inventory, and complain.
1629        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1630         ) = self.make_branch_with_disjoint_inventory_and_revision()
1631        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1632        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1633        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1634                                                [a_pack, c_pack], '.test-pack')
1635        # This would raise ValueError in bug #437003, but should not raise an
1636        # error once fixed.
1637        packer.pack()
1638
1639    def test_pack_with_missing_inventory(self):
1640        # Similar to test_pack_with_missing_inventory, but this time, we force
1641        # the A inventory to actually be gone from the repository.
1642        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1643         ) = self.make_branch_with_disjoint_inventory_and_revision()
1644        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1645        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1646        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1647                                                repo._pack_collection.all_packs(), '.test-pack')
1648        e = self.assertRaises(ValueError, packer.pack)
1649        packer.new_pack.abort()
1650        self.assertContainsRe(str(e),
1651                              r"We are missing inventories for revisions: .*'A'")
1652
1653
1654class TestCrossFormatPacks(TestCaseWithTransport):
1655
1656    def log_pack(self, hint=None):
1657        self.calls.append(('pack', hint))
1658        self.orig_pack(hint=hint)
1659        if self.expect_hint:
1660            self.assertTrue(hint)
1661
1662    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1663        self.expect_hint = expect_pack_called
1664        self.calls = []
1665        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1666        source_tree.lock_write()
1667        self.addCleanup(source_tree.unlock)
1668        tip = source_tree.commit('foo')
1669        target = self.make_repository('target', format=target_fmt)
1670        target.lock_write()
1671        self.addCleanup(target.unlock)
1672        source = source_tree.branch.repository._get_source(target._format)
1673        self.orig_pack = target.pack
1674        self.overrideAttr(target, "pack", self.log_pack)
1675        search = target.search_missing_revision_ids(
1676            source_tree.branch.repository, revision_ids=[tip])
1677        stream = source.get_stream(search)
1678        from_format = source_tree.branch.repository._format
1679        sink = target._get_sink()
1680        sink.insert_stream(stream, from_format, [])
1681        if expect_pack_called:
1682            self.assertLength(1, self.calls)
1683        else:
1684            self.assertLength(0, self.calls)
1685
1686    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1687        self.expect_hint = expect_pack_called
1688        self.calls = []
1689        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1690        source_tree.lock_write()
1691        self.addCleanup(source_tree.unlock)
1692        source_tree.commit('foo')
1693        target = self.make_repository('target', format=target_fmt)
1694        target.lock_write()
1695        self.addCleanup(target.unlock)
1696        source = source_tree.branch.repository
1697        self.orig_pack = target.pack
1698        self.overrideAttr(target, "pack", self.log_pack)
1699        target.fetch(source)
1700        if expect_pack_called:
1701            self.assertLength(1, self.calls)
1702        else:
1703            self.assertLength(0, self.calls)
1704
1705    def test_sink_format_hint_no(self):
1706        # When the target format says packing makes no difference, pack is not
1707        # called.
1708        self.run_stream('1.9', 'rich-root-pack', False)
1709
1710    def test_sink_format_hint_yes(self):
1711        # When the target format says packing makes a difference, pack is
1712        # called.
1713        self.run_stream('1.9', '2a', True)
1714
1715    def test_sink_format_same_no(self):
1716        # When the formats are the same, pack is not called.
1717        self.run_stream('2a', '2a', False)
1718
1719    def test_IDS_format_hint_no(self):
1720        # When the target format says packing makes no difference, pack is not
1721        # called.
1722        self.run_fetch('1.9', 'rich-root-pack', False)
1723
1724    def test_IDS_format_hint_yes(self):
1725        # When the target format says packing makes a difference, pack is
1726        # called.
1727        self.run_fetch('1.9', '2a', True)
1728
1729    def test_IDS_format_same_no(self):
1730        # When the formats are the same, pack is not called.
1731        self.run_fetch('2a', '2a', False)
1732
1733
1734class Test_LazyListJoin(tests.TestCase):
1735
1736    def test__repr__(self):
1737        lazy = repository._LazyListJoin(['a'], ['b'])
1738        self.assertEqual("breezy.repository._LazyListJoin((['a'], ['b']))",
1739                         repr(lazy))
1740
1741
1742class TestFeatures(tests.TestCaseWithTransport):
1743
1744    def test_open_with_present_feature(self):
1745        self.addCleanup(
1746            bzrrepository.RepositoryFormatMetaDir.unregister_feature,
1747            b"makes-cheese-sandwich")
1748        bzrrepository.RepositoryFormatMetaDir.register_feature(
1749            b"makes-cheese-sandwich")
1750        repo = self.make_repository('.')
1751        repo.lock_write()
1752        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1753        repo._format.check_support_status(False)
1754        repo.unlock()
1755
1756    def test_open_with_missing_required_feature(self):
1757        repo = self.make_repository('.')
1758        repo.lock_write()
1759        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1760        self.assertRaises(bzrdir.MissingFeature,
1761                          repo._format.check_support_status, False)
1762