~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Ross Lagerwall
  • Date: 2012-08-07 06:32:51 UTC
  • mto: (6437.63.5 2.5)
  • mto: This revision was merged to the branch mainline in revision 6558.
  • Revision ID: rosslagerwall@gmail.com-20120807063251-x9p03ghg2ws8oqjc
Add bzrlib/locale to .bzrignore

bzrlib/locale is generated with ./setup.py build_mo which is in turn called
by ./setup.py build

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
 
from bzrlib import symbol_versioning
29
27
import bzrlib
30
 
import bzrlib.bzrdir as bzrdir
31
 
import bzrlib.errors as errors
32
 
from bzrlib.errors import (NotBranchError,
33
 
                           NoSuchFile,
34
 
                           UnknownFormatError,
35
 
                           UnsupportedFormatError,
36
 
                           )
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from bzrlib import (
 
33
    btree_index,
 
34
    symbol_versioning,
 
35
    tests,
 
36
    transport,
 
37
    vf_search,
 
38
    )
 
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
40
from bzrlib.index import GraphIndex
37
41
from bzrlib.repository import RepositoryFormat
38
 
from bzrlib.tests import TestCase, TestCaseWithTransport
39
 
from bzrlib.transport import get_transport
40
 
from bzrlib.transport.memory import MemoryServer
 
42
from bzrlib.tests import (
 
43
    TestCase,
 
44
    TestCaseWithTransport,
 
45
    )
41
46
from bzrlib import (
 
47
    bzrdir,
 
48
    errors,
 
49
    inventory,
 
50
    osutils,
42
51
    repository,
 
52
    revision as _mod_revision,
43
53
    upgrade,
 
54
    versionedfile,
 
55
    vf_repository,
44
56
    workingtree,
45
57
    )
46
 
from bzrlib.repofmt import knitrepo, weaverepo
 
58
from bzrlib.repofmt import (
 
59
    groupcompress_repo,
 
60
    knitrepo,
 
61
    knitpack_repo,
 
62
    pack_repo,
 
63
    )
47
64
 
48
65
 
49
66
class TestDefaultFormat(TestCase):
51
68
    def test_get_set_default_format(self):
52
69
        old_default = bzrdir.format_registry.get('default')
53
70
        private_default = old_default().repository_format.__class__
54
 
        old_format = repository.RepositoryFormat.get_default_format()
 
71
        old_format = repository.format_registry.get_default()
55
72
        self.assertTrue(isinstance(old_format, private_default))
56
73
        def make_sample_bzrdir():
57
74
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
69
86
            self.assertEqual(result, 'A bzr repository dir')
70
87
        finally:
71
88
            bzrdir.format_registry.remove('default')
 
89
            bzrdir.format_registry.remove('sample')
72
90
            bzrdir.format_registry.register('default', old_default, '')
73
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
91
        self.assertIsInstance(repository.format_registry.get_default(),
74
92
                              old_format.__class__)
75
93
 
76
94
 
77
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
95
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
78
96
    """A sample format
79
97
 
80
 
    this format is initializable, unsupported to aid in testing the 
 
98
    this format is initializable, unsupported to aid in testing the
81
99
    open and open(unsupported=True) routines.
82
100
    """
83
101
 
84
 
    def get_format_string(self):
 
102
    @classmethod
 
103
    def get_format_string(cls):
85
104
        """See RepositoryFormat.get_format_string()."""
86
105
        return "Sample .bzr repository format."
87
106
 
98
117
        return "opened repository."
99
118
 
100
119
 
 
120
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
121
    """A sample format that can not be used in a metadir
 
122
 
 
123
    """
 
124
 
 
125
    def get_format_string(self):
 
126
        raise NotImplementedError
 
127
 
 
128
 
101
129
class TestRepositoryFormat(TestCaseWithTransport):
102
130
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
103
131
 
104
132
    def test_find_format(self):
105
133
        # is the right format object found for a repository?
106
134
        # create a branch with a few known format objects.
107
 
        # this is not quite the same as 
 
135
        # this is not quite the same as
108
136
        self.build_tree(["foo/", "bar/"])
109
137
        def check_format(format, url):
110
138
            dir = format._matchingbzrdir.initialize(url)
111
139
            format.initialize(dir)
112
 
            t = get_transport(url)
113
 
            found_format = repository.RepositoryFormat.find_format(dir)
114
 
            self.failUnless(isinstance(found_format, format.__class__))
115
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
116
 
        
 
140
            t = transport.get_transport_from_path(url)
 
141
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
 
142
            self.assertIsInstance(found_format, format.__class__)
 
143
        check_format(repository.format_registry.get_default(), "bar")
 
144
 
117
145
    def test_find_format_no_repository(self):
118
146
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
119
147
        self.assertRaises(errors.NoRepositoryPresent,
120
 
                          repository.RepositoryFormat.find_format,
 
148
                          repository.RepositoryFormatMetaDir.find_format,
121
149
                          dir)
122
150
 
 
151
    def test_from_string(self):
 
152
        self.assertIsInstance(
 
153
            SampleRepositoryFormat.from_string(
 
154
                "Sample .bzr repository format."),
 
155
            SampleRepositoryFormat)
 
156
        self.assertRaises(AssertionError,
 
157
            SampleRepositoryFormat.from_string,
 
158
                "Different .bzr repository format.")
 
159
 
123
160
    def test_find_format_unknown_format(self):
124
161
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
125
162
        SampleRepositoryFormat().initialize(dir)
126
163
        self.assertRaises(UnknownFormatError,
127
 
                          repository.RepositoryFormat.find_format,
 
164
                          repository.RepositoryFormatMetaDir.find_format,
128
165
                          dir)
129
166
 
 
167
    def test_find_format_with_features(self):
 
168
        tree = self.make_branch_and_tree('.', format='2a')
 
169
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
170
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
171
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
 
172
        self.assertEquals(found_format.features.get("name"), "necessity")
 
173
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
174
            True)
 
175
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
 
176
            "name")
 
177
        repository.RepositoryFormatMetaDir.register_feature("name")
 
178
        found_format.check_support_status(True)
 
179
 
130
180
    def test_register_unregister_format(self):
 
181
        # Test deprecated format registration functions
131
182
        format = SampleRepositoryFormat()
132
183
        # make a control dir
133
184
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
134
185
        # make a repo
135
186
        format.initialize(dir)
136
187
        # register a format for it.
137
 
        repository.RepositoryFormat.register_format(format)
 
188
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
189
            repository.RepositoryFormat.register_format, format)
138
190
        # which repository.Open will refuse (not supported)
139
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
191
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
 
192
            self.get_url())
140
193
        # but open(unsupported) will work
141
194
        self.assertEqual(format.open(dir), "opened repository.")
142
195
        # unregister the format
143
 
        repository.RepositoryFormat.unregister_format(format)
144
 
 
145
 
 
146
 
class TestFormat6(TestCaseWithTransport):
147
 
 
148
 
    def test_no_ancestry_weave(self):
149
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
150
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
151
 
        # We no longer need to create the ancestry.weave file
152
 
        # since it is *never* used.
153
 
        self.assertRaises(NoSuchFile,
154
 
                          control.transport.get,
155
 
                          'ancestry.weave')
156
 
 
157
 
 
158
 
class TestFormat7(TestCaseWithTransport):
159
 
    
160
 
    def test_disk_layout(self):
161
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
163
 
        # in case of side effects of locking.
164
 
        repo.lock_write()
165
 
        repo.unlock()
166
 
        # we want:
167
 
        # format 'Bazaar-NG Repository format 7'
168
 
        # lock ''
169
 
        # inventory.weave == empty_weave
170
 
        # empty revision-store directory
171
 
        # empty weaves directory
172
 
        t = control.get_repository_transport(None)
173
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
174
 
                             t.get('format').read())
175
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
176
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
177
 
        self.assertEqualDiff('# bzr weave file v5\n'
178
 
                             'w\n'
179
 
                             'W\n',
180
 
                             t.get('inventory.weave').read())
181
 
 
182
 
    def test_shared_disk_layout(self):
183
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
184
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
185
 
        # we want:
186
 
        # format 'Bazaar-NG Repository format 7'
187
 
        # inventory.weave == empty_weave
188
 
        # empty revision-store directory
189
 
        # empty weaves directory
190
 
        # a 'shared-storage' marker file.
191
 
        # lock is not present when unlocked
192
 
        t = control.get_repository_transport(None)
193
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
194
 
                             t.get('format').read())
195
 
        self.assertEqualDiff('', t.get('shared-storage').read())
196
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
197
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
198
 
        self.assertEqualDiff('# bzr weave file v5\n'
199
 
                             'w\n'
200
 
                             'W\n',
201
 
                             t.get('inventory.weave').read())
202
 
        self.assertFalse(t.has('branch-lock'))
203
 
 
204
 
    def test_creates_lockdir(self):
205
 
        """Make sure it appears to be controlled by a LockDir existence"""
206
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
207
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
208
 
        t = control.get_repository_transport(None)
209
 
        # TODO: Should check there is a 'lock' toplevel directory, 
210
 
        # regardless of contents
211
 
        self.assertFalse(t.has('lock/held/info'))
212
 
        repo.lock_write()
213
 
        try:
214
 
            self.assertTrue(t.has('lock/held/info'))
215
 
        finally:
216
 
            # unlock so we don't get a warning about failing to do so
217
 
            repo.unlock()
218
 
 
219
 
    def test_uses_lockdir(self):
220
 
        """repo format 7 actually locks on lockdir"""
221
 
        base_url = self.get_url()
222
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
223
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
224
 
        t = control.get_repository_transport(None)
225
 
        repo.lock_write()
226
 
        repo.unlock()
227
 
        del repo
228
 
        # make sure the same lock is created by opening it
229
 
        repo = repository.Repository.open(base_url)
230
 
        repo.lock_write()
231
 
        self.assertTrue(t.has('lock/held/info'))
232
 
        repo.unlock()
233
 
        self.assertFalse(t.has('lock/held/info'))
234
 
 
235
 
    def test_shared_no_tree_disk_layout(self):
236
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
237
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
238
 
        repo.set_make_working_trees(False)
239
 
        # we want:
240
 
        # format 'Bazaar-NG Repository format 7'
241
 
        # lock ''
242
 
        # inventory.weave == empty_weave
243
 
        # empty revision-store directory
244
 
        # empty weaves directory
245
 
        # a 'shared-storage' marker file.
246
 
        t = control.get_repository_transport(None)
247
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
248
 
                             t.get('format').read())
249
 
        ## self.assertEqualDiff('', t.get('lock').read())
250
 
        self.assertEqualDiff('', t.get('shared-storage').read())
251
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
252
 
        repo.set_make_working_trees(True)
253
 
        self.assertFalse(t.has('no-working-trees'))
254
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
255
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
256
 
        self.assertEqualDiff('# bzr weave file v5\n'
257
 
                             'w\n'
258
 
                             'W\n',
259
 
                             t.get('inventory.weave').read())
 
196
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
197
            repository.RepositoryFormat.unregister_format, format)
 
198
 
 
199
 
 
200
class TestRepositoryFormatRegistry(TestCase):
 
201
 
 
202
    def setUp(self):
 
203
        super(TestRepositoryFormatRegistry, self).setUp()
 
204
        self.registry = repository.RepositoryFormatRegistry()
 
205
 
 
206
    def test_register_unregister_format(self):
 
207
        format = SampleRepositoryFormat()
 
208
        self.registry.register(format)
 
209
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
 
210
        self.registry.remove(format)
 
211
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
212
 
 
213
    def test_get_all(self):
 
214
        format = SampleRepositoryFormat()
 
215
        self.assertEquals([], self.registry._get_all())
 
216
        self.registry.register(format)
 
217
        self.assertEquals([format], self.registry._get_all())
 
218
 
 
219
    def test_register_extra(self):
 
220
        format = SampleExtraRepositoryFormat()
 
221
        self.assertEquals([], self.registry._get_all())
 
222
        self.registry.register_extra(format)
 
223
        self.assertEquals([format], self.registry._get_all())
 
224
 
 
225
    def test_register_extra_lazy(self):
 
226
        self.assertEquals([], self.registry._get_all())
 
227
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
228
            "SampleExtraRepositoryFormat")
 
229
        formats = self.registry._get_all()
 
230
        self.assertEquals(1, len(formats))
 
231
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
260
232
 
261
233
 
262
234
class TestFormatKnit1(TestCaseWithTransport):
263
 
    
 
235
 
 
236
    def test_attribute__fetch_order(self):
 
237
        """Knits need topological data insertion."""
 
238
        repo = self.make_repository('.',
 
239
                format=bzrdir.format_registry.get('knit')())
 
240
        self.assertEqual('topological', repo._format._fetch_order)
 
241
 
 
242
    def test_attribute__fetch_uses_deltas(self):
 
243
        """Knits reuse deltas."""
 
244
        repo = self.make_repository('.',
 
245
                format=bzrdir.format_registry.get('knit')())
 
246
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
247
 
264
248
    def test_disk_layout(self):
265
249
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
266
250
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
280
264
        # self.assertEqualDiff('', t.get('lock').read())
281
265
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
282
266
        self.check_knits(t)
 
267
        # Check per-file knits.
 
268
        branch = control.create_branch()
 
269
        tree = control.create_workingtree()
 
270
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
271
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
272
        tree.commit('1st post', rev_id='foo')
 
273
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
274
            '\nfoo fulltext 0 81  :')
283
275
 
284
 
    def assertHasKnit(self, t, knit_name):
 
276
    def assertHasKnit(self, t, knit_name, extra_content=''):
285
277
        """Assert that knit_name exists on t."""
286
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
278
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
287
279
                             t.get(knit_name + '.kndx').read())
288
 
        # no default content
289
 
        self.assertTrue(t.has(knit_name + '.knit'))
290
280
 
291
281
    def check_knits(self, t):
292
282
        """check knit content for a repository."""
336
326
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
337
327
        self.check_knits(t)
338
328
 
339
 
 
340
 
class InterString(repository.InterRepository):
341
 
    """An inter-repository optimised code path for strings.
342
 
 
343
 
    This is for use during testing where we use strings as repositories
 
329
    def test_deserialise_sets_root_revision(self):
 
330
        """We must have a inventory.root.revision
 
331
 
 
332
        Old versions of the XML5 serializer did not set the revision_id for
 
333
        the whole inventory. So we grab the one from the expected text. Which
 
334
        is valid when the api is not being abused.
 
335
        """
 
336
        repo = self.make_repository('.',
 
337
                format=bzrdir.format_registry.get('knit')())
 
338
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
339
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
340
        self.assertEqual('test-rev-id', inv.root.revision)
 
341
 
 
342
    def test_deserialise_uses_global_revision_id(self):
 
343
        """If it is set, then we re-use the global revision id"""
 
344
        repo = self.make_repository('.',
 
345
                format=bzrdir.format_registry.get('knit')())
 
346
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
347
                   '</inventory>\n')
 
348
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
349
        # raise an error, rather than silently using one revision_id over the
 
350
        # other.
 
351
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
352
            'test-rev-id', inv_xml)
 
353
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
354
        self.assertEqual('other-rev-id', inv.root.revision)
 
355
 
 
356
    def test_supports_external_lookups(self):
 
357
        repo = self.make_repository('.',
 
358
                format=bzrdir.format_registry.get('knit')())
 
359
        self.assertFalse(repo._format.supports_external_lookups)
 
360
 
 
361
 
 
362
class DummyRepository(object):
 
363
    """A dummy repository for testing."""
 
364
 
 
365
    _format = None
 
366
    _serializer = None
 
367
 
 
368
    def supports_rich_root(self):
 
369
        if self._format is not None:
 
370
            return self._format.rich_root_data
 
371
        return False
 
372
 
 
373
    def get_graph(self):
 
374
        raise NotImplementedError
 
375
 
 
376
    def get_parent_map(self, revision_ids):
 
377
        raise NotImplementedError
 
378
 
 
379
 
 
380
class InterDummy(repository.InterRepository):
 
381
    """An inter-repository optimised code path for DummyRepository.
 
382
 
 
383
    This is for use during testing where we use DummyRepository as repositories
344
384
    so that none of the default regsitered inter-repository classes will
345
 
    match.
 
385
    MATCH.
346
386
    """
347
387
 
348
388
    @staticmethod
349
389
    def is_compatible(repo_source, repo_target):
350
 
        """InterString is compatible with strings-as-repos."""
351
 
        return isinstance(repo_source, str) and isinstance(repo_target, str)
 
390
        """InterDummy is compatible with DummyRepository."""
 
391
        return (isinstance(repo_source, DummyRepository) and
 
392
            isinstance(repo_target, DummyRepository))
352
393
 
353
394
 
354
395
class TestInterRepository(TestCaseWithTransport):
360
401
        # This also tests that the default registered optimised interrepository
361
402
        # classes do not barf inappropriately when a surprising repository type
362
403
        # is handed to them.
363
 
        dummy_a = "Repository 1."
364
 
        dummy_b = "Repository 2."
 
404
        dummy_a = DummyRepository()
 
405
        dummy_a._format = RepositoryFormat()
 
406
        dummy_a._format.supports_full_versioned_files = True
 
407
        dummy_b = DummyRepository()
 
408
        dummy_b._format = RepositoryFormat()
 
409
        dummy_b._format.supports_full_versioned_files = True
365
410
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
366
411
 
367
412
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
368
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
 
413
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
414
 
 
415
        The effective default is now InterSameDataRepository because there is
 
416
        no actual sane default in the presence of incompatible data models.
 
417
        """
369
418
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
370
 
        self.assertEqual(repository.InterRepository,
 
419
        self.assertEqual(vf_repository.InterSameDataRepository,
371
420
                         inter_repo.__class__)
372
421
        self.assertEqual(repo_a, inter_repo.source)
373
422
        self.assertEqual(repo_b, inter_repo.target)
378
427
        # and that it is correctly selected when given a repository
379
428
        # pair that it returns true on for the is_compatible static method
380
429
        # check
381
 
        dummy_a = "Repository 1."
382
 
        dummy_b = "Repository 2."
383
 
        repository.InterRepository.register_optimiser(InterString)
 
430
        dummy_a = DummyRepository()
 
431
        dummy_a._format = RepositoryFormat()
 
432
        dummy_b = DummyRepository()
 
433
        dummy_b._format = RepositoryFormat()
 
434
        repo = self.make_repository('.')
 
435
        # hack dummies to look like repo somewhat.
 
436
        dummy_a._serializer = repo._serializer
 
437
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
438
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
439
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
 
440
        dummy_b._serializer = repo._serializer
 
441
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
442
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
443
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
 
444
        repository.InterRepository.register_optimiser(InterDummy)
384
445
        try:
385
 
            # we should get the default for something InterString returns False
 
446
            # we should get the default for something InterDummy returns False
386
447
            # to
387
 
            self.assertFalse(InterString.is_compatible(dummy_a, None))
388
 
            self.assertGetsDefaultInterRepository(dummy_a, None)
389
 
            # and we should get an InterString for a pair it 'likes'
390
 
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
 
448
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
449
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
450
            # and we should get an InterDummy for a pair it 'likes'
 
451
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
391
452
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
392
 
            self.assertEqual(InterString, inter_repo.__class__)
 
453
            self.assertEqual(InterDummy, inter_repo.__class__)
393
454
            self.assertEqual(dummy_a, inter_repo.source)
394
455
            self.assertEqual(dummy_b, inter_repo.target)
395
456
        finally:
396
 
            repository.InterRepository.unregister_optimiser(InterString)
 
457
            repository.InterRepository.unregister_optimiser(InterDummy)
397
458
        # now we should get the default InterRepository object again.
398
459
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
399
460
 
400
461
 
401
 
class TestInterWeaveRepo(TestCaseWithTransport):
402
 
 
403
 
    def test_is_compatible_and_registered(self):
404
 
        # InterWeaveRepo is compatible when either side
405
 
        # is a format 5/6/7 branch
406
 
        from bzrlib.repofmt import knitrepo, weaverepo
407
 
        formats = [weaverepo.RepositoryFormat5(),
408
 
                   weaverepo.RepositoryFormat6(),
409
 
                   weaverepo.RepositoryFormat7()]
410
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
411
 
                                knitrepo.RepositoryFormatKnit1(),
412
 
                                ]
413
 
        repo_a = self.make_repository('a')
414
 
        repo_b = self.make_repository('b')
415
 
        is_compatible = repository.InterWeaveRepo.is_compatible
416
 
        for source in incompatible_formats:
417
 
            # force incompatible left then right
418
 
            repo_a._format = source
419
 
            repo_b._format = formats[0]
420
 
            self.assertFalse(is_compatible(repo_a, repo_b))
421
 
            self.assertFalse(is_compatible(repo_b, repo_a))
422
 
        for source in formats:
423
 
            repo_a._format = source
424
 
            for target in formats:
425
 
                repo_b._format = target
426
 
                self.assertTrue(is_compatible(repo_a, repo_b))
427
 
        self.assertEqual(repository.InterWeaveRepo,
428
 
                         repository.InterRepository.get(repo_a,
429
 
                                                        repo_b).__class__)
 
462
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
463
 
 
464
    @classmethod
 
465
    def get_format_string(cls):
 
466
        return "Test Format 1"
 
467
 
 
468
 
 
469
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
470
 
 
471
    @classmethod
 
472
    def get_format_string(cls):
 
473
        return "Test Format 2"
430
474
 
431
475
 
432
476
class TestRepositoryConverter(TestCaseWithTransport):
433
477
 
434
478
    def test_convert_empty(self):
435
 
        t = get_transport(self.get_url('.'))
 
479
        source_format = TestRepositoryFormat1()
 
480
        target_format = TestRepositoryFormat2()
 
481
        repository.format_registry.register(source_format)
 
482
        self.addCleanup(repository.format_registry.remove,
 
483
            source_format)
 
484
        repository.format_registry.register(target_format)
 
485
        self.addCleanup(repository.format_registry.remove,
 
486
            target_format)
 
487
        t = self.get_transport()
436
488
        t.mkdir('repository')
437
489
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
438
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
439
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
490
        repo = TestRepositoryFormat1().initialize(repo_dir)
440
491
        converter = repository.CopyConverter(target_format)
441
492
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
442
493
        try:
447
498
        self.assertTrue(isinstance(target_format, repo._format.__class__))
448
499
 
449
500
 
450
 
class TestMisc(TestCase):
451
 
    
452
 
    def test_unescape_xml(self):
453
 
        """We get some kind of error when malformed entities are passed"""
454
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
455
 
 
456
 
 
457
 
class TestRepositoryFormatKnit2(TestCaseWithTransport):
 
501
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
502
 
 
503
    def test_attribute__fetch_order(self):
 
504
        """Knits need topological data insertion."""
 
505
        format = bzrdir.BzrDirMetaFormat1()
 
506
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
507
        repo = self.make_repository('.', format=format)
 
508
        self.assertEqual('topological', repo._format._fetch_order)
 
509
 
 
510
    def test_attribute__fetch_uses_deltas(self):
 
511
        """Knits reuse deltas."""
 
512
        format = bzrdir.BzrDirMetaFormat1()
 
513
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
514
        repo = self.make_repository('.', format=format)
 
515
        self.assertEqual(True, repo._format._fetch_uses_deltas)
458
516
 
459
517
    def test_convert(self):
460
518
        """Ensure the upgrade adds weaves for roots"""
463
521
        tree = self.make_branch_and_tree('.', format)
464
522
        tree.commit("Dull commit", rev_id="dull")
465
523
        revision_tree = tree.branch.repository.revision_tree('dull')
466
 
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
467
 
            revision_tree.inventory.root.file_id)
 
524
        revision_tree.lock_read()
 
525
        try:
 
526
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
527
                revision_tree.inventory.root.file_id)
 
528
        finally:
 
529
            revision_tree.unlock()
468
530
        format = bzrdir.BzrDirMetaFormat1()
469
 
        format.repository_format = knitrepo.RepositoryFormatKnit2()
 
531
        format.repository_format = knitrepo.RepositoryFormatKnit3()
470
532
        upgrade.Convert('.', format)
471
533
        tree = workingtree.WorkingTree.open('.')
472
534
        revision_tree = tree.branch.repository.revision_tree('dull')
473
 
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
535
        revision_tree.lock_read()
 
536
        try:
 
537
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
538
        finally:
 
539
            revision_tree.unlock()
474
540
        tree.commit("Another dull commit", rev_id='dull2')
475
541
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
542
        revision_tree.lock_read()
 
543
        self.addCleanup(revision_tree.unlock)
476
544
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
545
 
 
546
    def test_supports_external_lookups(self):
 
547
        format = bzrdir.BzrDirMetaFormat1()
 
548
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
549
        repo = self.make_repository('.', format=format)
 
550
        self.assertFalse(repo._format.supports_external_lookups)
 
551
 
 
552
 
 
553
class Test2a(tests.TestCaseWithMemoryTransport):
 
554
 
 
555
    def test_chk_bytes_uses_custom_btree_parser(self):
 
556
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
557
        mt.lock_write()
 
558
        self.addCleanup(mt.unlock)
 
559
        mt.add([''], ['root-id'])
 
560
        mt.commit('first')
 
561
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
562
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
563
        # It should also work if we re-open the repo
 
564
        repo = mt.branch.repository.bzrdir.open_repository()
 
565
        repo.lock_read()
 
566
        self.addCleanup(repo.unlock)
 
567
        index = repo.chk_bytes._index._graph_index._indices[0]
 
568
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
569
 
 
570
    def test_fetch_combines_groups(self):
 
571
        builder = self.make_branch_builder('source', format='2a')
 
572
        builder.start_series()
 
573
        builder.build_snapshot('1', None, [
 
574
            ('add', ('', 'root-id', 'directory', '')),
 
575
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
576
        builder.build_snapshot('2', ['1'], [
 
577
            ('modify', ('file-id', 'content-2\n'))])
 
578
        builder.finish_series()
 
579
        source = builder.get_branch()
 
580
        target = self.make_repository('target', format='2a')
 
581
        target.fetch(source.repository)
 
582
        target.lock_read()
 
583
        self.addCleanup(target.unlock)
 
584
        details = target.texts._index.get_build_details(
 
585
            [('file-id', '1',), ('file-id', '2',)])
 
586
        file_1_details = details[('file-id', '1')]
 
587
        file_2_details = details[('file-id', '2')]
 
588
        # The index, and what to read off disk, should be the same for both
 
589
        # versions of the file.
 
590
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
591
 
 
592
    def test_fetch_combines_groups(self):
 
593
        builder = self.make_branch_builder('source', format='2a')
 
594
        builder.start_series()
 
595
        builder.build_snapshot('1', None, [
 
596
            ('add', ('', 'root-id', 'directory', '')),
 
597
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
598
        builder.build_snapshot('2', ['1'], [
 
599
            ('modify', ('file-id', 'content-2\n'))])
 
600
        builder.finish_series()
 
601
        source = builder.get_branch()
 
602
        target = self.make_repository('target', format='2a')
 
603
        target.fetch(source.repository)
 
604
        target.lock_read()
 
605
        self.addCleanup(target.unlock)
 
606
        details = target.texts._index.get_build_details(
 
607
            [('file-id', '1',), ('file-id', '2',)])
 
608
        file_1_details = details[('file-id', '1')]
 
609
        file_2_details = details[('file-id', '2')]
 
610
        # The index, and what to read off disk, should be the same for both
 
611
        # versions of the file.
 
612
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
613
 
 
614
    def test_fetch_combines_groups(self):
 
615
        builder = self.make_branch_builder('source', format='2a')
 
616
        builder.start_series()
 
617
        builder.build_snapshot('1', None, [
 
618
            ('add', ('', 'root-id', 'directory', '')),
 
619
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
620
        builder.build_snapshot('2', ['1'], [
 
621
            ('modify', ('file-id', 'content-2\n'))])
 
622
        builder.finish_series()
 
623
        source = builder.get_branch()
 
624
        target = self.make_repository('target', format='2a')
 
625
        target.fetch(source.repository)
 
626
        target.lock_read()
 
627
        self.addCleanup(target.unlock)
 
628
        details = target.texts._index.get_build_details(
 
629
            [('file-id', '1',), ('file-id', '2',)])
 
630
        file_1_details = details[('file-id', '1')]
 
631
        file_2_details = details[('file-id', '2')]
 
632
        # The index, and what to read off disk, should be the same for both
 
633
        # versions of the file.
 
634
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
635
 
 
636
    def test_format_pack_compresses_True(self):
 
637
        repo = self.make_repository('repo', format='2a')
 
638
        self.assertTrue(repo._format.pack_compresses)
 
639
 
 
640
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
641
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
642
        tree.lock_write()
 
643
        tree.add([''], ['TREE_ROOT'])
 
644
        revid = tree.commit("foo")
 
645
        tree.unlock()
 
646
        tree.lock_read()
 
647
        self.addCleanup(tree.unlock)
 
648
        inv = tree.branch.repository.get_inventory(revid)
 
649
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
650
        inv.parent_id_basename_to_file_id._ensure_root()
 
651
        inv.id_to_entry._ensure_root()
 
652
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
653
        self.assertEqual(65536,
 
654
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
655
 
 
656
    def test_autopack_unchanged_chk_nodes(self):
 
657
        # at 20 unchanged commits, chk pages are packed that are split into
 
658
        # two groups such that the new pack being made doesn't have all its
 
659
        # pages in the source packs (though they are in the repository).
 
660
        # Use a memory backed repository, we don't need to hit disk for this
 
661
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
662
        tree.lock_write()
 
663
        self.addCleanup(tree.unlock)
 
664
        tree.add([''], ['TREE_ROOT'])
 
665
        for pos in range(20):
 
666
            tree.commit(str(pos))
 
667
 
 
668
    def test_pack_with_hint(self):
 
669
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
670
        tree.lock_write()
 
671
        self.addCleanup(tree.unlock)
 
672
        tree.add([''], ['TREE_ROOT'])
 
673
        # 1 commit to leave untouched
 
674
        tree.commit('1')
 
675
        to_keep = tree.branch.repository._pack_collection.names()
 
676
        # 2 to combine
 
677
        tree.commit('2')
 
678
        tree.commit('3')
 
679
        all = tree.branch.repository._pack_collection.names()
 
680
        combine = list(set(all) - set(to_keep))
 
681
        self.assertLength(3, all)
 
682
        self.assertLength(2, combine)
 
683
        tree.branch.repository.pack(hint=combine)
 
684
        final = tree.branch.repository._pack_collection.names()
 
685
        self.assertLength(2, final)
 
686
        self.assertFalse(combine[0] in final)
 
687
        self.assertFalse(combine[1] in final)
 
688
        self.assertSubset(to_keep, final)
 
689
 
 
690
    def test_stream_source_to_gc(self):
 
691
        source = self.make_repository('source', format='2a')
 
692
        target = self.make_repository('target', format='2a')
 
693
        stream = source._get_source(target._format)
 
694
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
695
 
 
696
    def test_stream_source_to_non_gc(self):
 
697
        source = self.make_repository('source', format='2a')
 
698
        target = self.make_repository('target', format='rich-root-pack')
 
699
        stream = source._get_source(target._format)
 
700
        # We don't want the child GroupCHKStreamSource
 
701
        self.assertIs(type(stream), vf_repository.StreamSource)
 
702
 
 
703
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
704
        source_builder = self.make_branch_builder('source',
 
705
                            format='2a')
 
706
        # We have to build a fairly large tree, so that we are sure the chk
 
707
        # pages will have split into multiple pages.
 
708
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
709
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
710
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
711
                fname = i + j
 
712
                fid = fname + '-id'
 
713
                content = 'content for %s\n' % (fname,)
 
714
                entries.append(('add', (fname, fid, 'file', content)))
 
715
        source_builder.start_series()
 
716
        source_builder.build_snapshot('rev-1', None, entries)
 
717
        # Now change a few of them, so we get a few new pages for the second
 
718
        # revision
 
719
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
720
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
721
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
722
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
723
            ])
 
724
        source_builder.finish_series()
 
725
        source_branch = source_builder.get_branch()
 
726
        source_branch.lock_read()
 
727
        self.addCleanup(source_branch.unlock)
 
728
        target = self.make_repository('target', format='2a')
 
729
        source = source_branch.repository._get_source(target._format)
 
730
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
731
 
 
732
        # On a regular pass, getting the inventories and chk pages for rev-2
 
733
        # would only get the newly created chk pages
 
734
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
735
                                    set(['rev-2']))
 
736
        simple_chk_records = []
 
737
        for vf_name, substream in source.get_stream(search):
 
738
            if vf_name == 'chk_bytes':
 
739
                for record in substream:
 
740
                    simple_chk_records.append(record.key)
 
741
            else:
 
742
                for _ in substream:
 
743
                    continue
 
744
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
745
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
746
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
747
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
748
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
749
                         simple_chk_records)
 
750
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
751
        # we should get a much larger set of pages.
 
752
        missing = [('inventories', 'rev-2')]
 
753
        full_chk_records = []
 
754
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
755
            if vf_name == 'inventories':
 
756
                for record in substream:
 
757
                    self.assertEqual(('rev-2',), record.key)
 
758
            elif vf_name == 'chk_bytes':
 
759
                for record in substream:
 
760
                    full_chk_records.append(record.key)
 
761
            else:
 
762
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
763
        # We have 257 records now. This is because we have 1 root page, and 256
 
764
        # leaf pages in a complete listing.
 
765
        self.assertEqual(257, len(full_chk_records))
 
766
        self.assertSubset(simple_chk_records, full_chk_records)
 
767
 
 
768
    def test_inconsistency_fatal(self):
 
769
        repo = self.make_repository('repo', format='2a')
 
770
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
771
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
772
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
773
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
774
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
775
 
 
776
 
 
777
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
778
 
 
779
    def test_source_to_exact_pack_092(self):
 
780
        source = self.make_repository('source', format='pack-0.92')
 
781
        target = self.make_repository('target', format='pack-0.92')
 
782
        stream_source = source._get_source(target._format)
 
783
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
784
 
 
785
    def test_source_to_exact_pack_rich_root_pack(self):
 
786
        source = self.make_repository('source', format='rich-root-pack')
 
787
        target = self.make_repository('target', format='rich-root-pack')
 
788
        stream_source = source._get_source(target._format)
 
789
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
790
 
 
791
    def test_source_to_exact_pack_19(self):
 
792
        source = self.make_repository('source', format='1.9')
 
793
        target = self.make_repository('target', format='1.9')
 
794
        stream_source = source._get_source(target._format)
 
795
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
796
 
 
797
    def test_source_to_exact_pack_19_rich_root(self):
 
798
        source = self.make_repository('source', format='1.9-rich-root')
 
799
        target = self.make_repository('target', format='1.9-rich-root')
 
800
        stream_source = source._get_source(target._format)
 
801
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
802
 
 
803
    def test_source_to_remote_exact_pack_19(self):
 
804
        trans = self.make_smart_server('target')
 
805
        trans.ensure_base()
 
806
        source = self.make_repository('source', format='1.9')
 
807
        target = self.make_repository('target', format='1.9')
 
808
        target = repository.Repository.open(trans.base)
 
809
        stream_source = source._get_source(target._format)
 
810
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
811
 
 
812
    def test_stream_source_to_non_exact(self):
 
813
        source = self.make_repository('source', format='pack-0.92')
 
814
        target = self.make_repository('target', format='1.9')
 
815
        stream = source._get_source(target._format)
 
816
        self.assertIs(type(stream), vf_repository.StreamSource)
 
817
 
 
818
    def test_stream_source_to_non_exact_rich_root(self):
 
819
        source = self.make_repository('source', format='1.9')
 
820
        target = self.make_repository('target', format='1.9-rich-root')
 
821
        stream = source._get_source(target._format)
 
822
        self.assertIs(type(stream), vf_repository.StreamSource)
 
823
 
 
824
    def test_source_to_remote_non_exact_pack_19(self):
 
825
        trans = self.make_smart_server('target')
 
826
        trans.ensure_base()
 
827
        source = self.make_repository('source', format='1.9')
 
828
        target = self.make_repository('target', format='1.6')
 
829
        target = repository.Repository.open(trans.base)
 
830
        stream_source = source._get_source(target._format)
 
831
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
832
 
 
833
    def test_stream_source_to_knit(self):
 
834
        source = self.make_repository('source', format='pack-0.92')
 
835
        target = self.make_repository('target', format='dirstate')
 
836
        stream = source._get_source(target._format)
 
837
        self.assertIs(type(stream), vf_repository.StreamSource)
 
838
 
 
839
 
 
840
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
841
    """Tests for _find_parent_ids_of_revisions."""
 
842
 
 
843
    def setUp(self):
 
844
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
845
        self.builder = self.make_branch_builder('source')
 
846
        self.builder.start_series()
 
847
        self.builder.build_snapshot('initial', None,
 
848
            [('add', ('', 'tree-root', 'directory', None))])
 
849
        self.repo = self.builder.get_branch().repository
 
850
        self.addCleanup(self.builder.finish_series)
 
851
 
 
852
    def assertParentIds(self, expected_result, rev_set):
 
853
        self.assertEqual(sorted(expected_result),
 
854
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
855
 
 
856
    def test_simple(self):
 
857
        self.builder.build_snapshot('revid1', None, [])
 
858
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
859
        rev_set = ['revid2']
 
860
        self.assertParentIds(['revid1'], rev_set)
 
861
 
 
862
    def test_not_first_parent(self):
 
863
        self.builder.build_snapshot('revid1', None, [])
 
864
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
865
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
866
        rev_set = ['revid3', 'revid2']
 
867
        self.assertParentIds(['revid1'], rev_set)
 
868
 
 
869
    def test_not_null(self):
 
870
        rev_set = ['initial']
 
871
        self.assertParentIds([], rev_set)
 
872
 
 
873
    def test_not_null_set(self):
 
874
        self.builder.build_snapshot('revid1', None, [])
 
875
        rev_set = [_mod_revision.NULL_REVISION]
 
876
        self.assertParentIds([], rev_set)
 
877
 
 
878
    def test_ghost(self):
 
879
        self.builder.build_snapshot('revid1', None, [])
 
880
        rev_set = ['ghost', 'revid1']
 
881
        self.assertParentIds(['initial'], rev_set)
 
882
 
 
883
    def test_ghost_parent(self):
 
884
        self.builder.build_snapshot('revid1', None, [])
 
885
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
886
        rev_set = ['revid2', 'revid1']
 
887
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
888
 
 
889
    def test_righthand_parent(self):
 
890
        self.builder.build_snapshot('revid1', None, [])
 
891
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
892
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
893
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
894
        rev_set = ['revid3', 'revid2a']
 
895
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
896
 
 
897
 
 
898
class TestWithBrokenRepo(TestCaseWithTransport):
 
899
    """These tests seem to be more appropriate as interface tests?"""
 
900
 
 
901
    def make_broken_repository(self):
 
902
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
903
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
904
        # it does, this duplication should be removed.
 
905
        repo = self.make_repository('broken-repo')
 
906
        cleanups = []
 
907
        try:
 
908
            repo.lock_write()
 
909
            cleanups.append(repo.unlock)
 
910
            repo.start_write_group()
 
911
            cleanups.append(repo.commit_write_group)
 
912
            # make rev1a: A well-formed revision, containing 'file1'
 
913
            inv = inventory.Inventory(revision_id='rev1a')
 
914
            inv.root.revision = 'rev1a'
 
915
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
916
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
917
            repo.add_inventory('rev1a', inv, [])
 
918
            revision = _mod_revision.Revision('rev1a',
 
919
                committer='jrandom@example.com', timestamp=0,
 
920
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
921
            repo.add_revision('rev1a', revision, inv)
 
922
 
 
923
            # make rev1b, which has no Revision, but has an Inventory, and
 
924
            # file1
 
925
            inv = inventory.Inventory(revision_id='rev1b')
 
926
            inv.root.revision = 'rev1b'
 
927
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
928
            repo.add_inventory('rev1b', inv, [])
 
929
 
 
930
            # make rev2, with file1 and file2
 
931
            # file2 is sane
 
932
            # file1 has 'rev1b' as an ancestor, even though this is not
 
933
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
934
            inv = inventory.Inventory()
 
935
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
936
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
937
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
938
 
 
939
            # make ghost revision rev1c
 
940
            inv = inventory.Inventory()
 
941
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
942
 
 
943
            # make rev3 with file2
 
944
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
945
            # file2 cannot have rev1c as its ancestor.
 
946
            inv = inventory.Inventory()
 
947
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
948
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
949
            return repo
 
950
        finally:
 
951
            for cleanup in reversed(cleanups):
 
952
                cleanup()
 
953
 
 
954
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
955
        inv.revision_id = revision_id
 
956
        inv.root.revision = revision_id
 
957
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
958
        repo.add_inventory(revision_id, inv, parent_ids)
 
959
        revision = _mod_revision.Revision(revision_id,
 
960
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
961
            timezone=0, message='foo', parent_ids=parent_ids)
 
962
        repo.add_revision(revision_id, revision, inv)
 
963
 
 
964
    def add_file(self, repo, inv, filename, revision, parents):
 
965
        file_id = filename + '-id'
 
966
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
967
        entry.revision = revision
 
968
        entry.text_size = 0
 
969
        inv.add(entry)
 
970
        text_key = (file_id, revision)
 
971
        parent_keys = [(file_id, parent) for parent in parents]
 
972
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
973
 
 
974
    def test_insert_from_broken_repo(self):
 
975
        """Inserting a data stream from a broken repository won't silently
 
976
        corrupt the target repository.
 
977
        """
 
978
        broken_repo = self.make_broken_repository()
 
979
        empty_repo = self.make_repository('empty-repo')
 
980
        try:
 
981
            empty_repo.fetch(broken_repo)
 
982
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
983
            # Test successful: compression parent not being copied leads to
 
984
            # error.
 
985
            return
 
986
        empty_repo.lock_read()
 
987
        self.addCleanup(empty_repo.unlock)
 
988
        text = empty_repo.texts.get_record_stream(
 
989
            [('file2-id', 'rev3')], 'topological', True).next()
 
990
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
991
 
 
992
 
 
993
class TestRepositoryPackCollection(TestCaseWithTransport):
 
994
 
 
995
    def get_format(self):
 
996
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
997
 
 
998
    def get_packs(self):
 
999
        format = self.get_format()
 
1000
        repo = self.make_repository('.', format=format)
 
1001
        return repo._pack_collection
 
1002
 
 
1003
    def make_packs_and_alt_repo(self, write_lock=False):
 
1004
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1005
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1006
        tree.lock_write()
 
1007
        self.addCleanup(tree.unlock)
 
1008
        rev1 = tree.commit('one')
 
1009
        rev2 = tree.commit('two')
 
1010
        rev3 = tree.commit('three')
 
1011
        r = repository.Repository.open('.')
 
1012
        if write_lock:
 
1013
            r.lock_write()
 
1014
        else:
 
1015
            r.lock_read()
 
1016
        self.addCleanup(r.unlock)
 
1017
        packs = r._pack_collection
 
1018
        packs.ensure_loaded()
 
1019
        return tree, r, packs, [rev1, rev2, rev3]
 
1020
 
 
1021
    def test__clear_obsolete_packs(self):
 
1022
        packs = self.get_packs()
 
1023
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1024
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1025
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1026
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1027
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1028
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1029
        res = packs._clear_obsolete_packs()
 
1030
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1031
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1032
 
 
1033
    def test__clear_obsolete_packs_preserve(self):
 
1034
        packs = self.get_packs()
 
1035
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1036
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1037
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1038
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1039
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1040
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1041
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1042
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1043
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1044
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1045
 
 
1046
    def test__max_pack_count(self):
 
1047
        """The maximum pack count is a function of the number of revisions."""
 
1048
        # no revisions - one pack, so that we can have a revision free repo
 
1049
        # without it blowing up
 
1050
        packs = self.get_packs()
 
1051
        self.assertEqual(1, packs._max_pack_count(0))
 
1052
        # after that the sum of the digits, - check the first 1-9
 
1053
        self.assertEqual(1, packs._max_pack_count(1))
 
1054
        self.assertEqual(2, packs._max_pack_count(2))
 
1055
        self.assertEqual(3, packs._max_pack_count(3))
 
1056
        self.assertEqual(4, packs._max_pack_count(4))
 
1057
        self.assertEqual(5, packs._max_pack_count(5))
 
1058
        self.assertEqual(6, packs._max_pack_count(6))
 
1059
        self.assertEqual(7, packs._max_pack_count(7))
 
1060
        self.assertEqual(8, packs._max_pack_count(8))
 
1061
        self.assertEqual(9, packs._max_pack_count(9))
 
1062
        # check the boundary cases with two digits for the next decade
 
1063
        self.assertEqual(1, packs._max_pack_count(10))
 
1064
        self.assertEqual(2, packs._max_pack_count(11))
 
1065
        self.assertEqual(10, packs._max_pack_count(19))
 
1066
        self.assertEqual(2, packs._max_pack_count(20))
 
1067
        self.assertEqual(3, packs._max_pack_count(21))
 
1068
        # check some arbitrary big numbers
 
1069
        self.assertEqual(25, packs._max_pack_count(112894))
 
1070
 
 
1071
    def test_repr(self):
 
1072
        packs = self.get_packs()
 
1073
        self.assertContainsRe(repr(packs),
 
1074
            'RepositoryPackCollection(.*Repository(.*))')
 
1075
 
 
1076
    def test__obsolete_packs(self):
 
1077
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1078
        names = packs.names()
 
1079
        pack = packs.get_pack_by_name(names[0])
 
1080
        # Schedule this one for removal
 
1081
        packs._remove_pack_from_memory(pack)
 
1082
        # Simulate a concurrent update by renaming the .pack file and one of
 
1083
        # the indices
 
1084
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1085
                               'obsolete_packs/%s.pack' % (names[0],))
 
1086
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1087
                               'obsolete_packs/%s.iix' % (names[0],))
 
1088
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1089
        # are still renamed
 
1090
        packs._obsolete_packs([pack])
 
1091
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1092
                         sorted(packs._pack_transport.list_dir('.')))
 
1093
        # names[0] should not be present in the index anymore
 
1094
        self.assertEqual(names[1:],
 
1095
            sorted(set([osutils.splitext(n)[0] for n in
 
1096
                        packs._index_transport.list_dir('.')])))
 
1097
 
 
1098
    def test__obsolete_packs_missing_directory(self):
 
1099
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1100
        r.control_transport.rmdir('obsolete_packs')
 
1101
        names = packs.names()
 
1102
        pack = packs.get_pack_by_name(names[0])
 
1103
        # Schedule this one for removal
 
1104
        packs._remove_pack_from_memory(pack)
 
1105
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1106
        # are still renamed
 
1107
        packs._obsolete_packs([pack])
 
1108
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1109
                         sorted(packs._pack_transport.list_dir('.')))
 
1110
        # names[0] should not be present in the index anymore
 
1111
        self.assertEqual(names[1:],
 
1112
            sorted(set([osutils.splitext(n)[0] for n in
 
1113
                        packs._index_transport.list_dir('.')])))
 
1114
 
 
1115
    def test_pack_distribution_zero(self):
 
1116
        packs = self.get_packs()
 
1117
        self.assertEqual([0], packs.pack_distribution(0))
 
1118
 
 
1119
    def test_ensure_loaded_unlocked(self):
 
1120
        packs = self.get_packs()
 
1121
        self.assertRaises(errors.ObjectNotLocked,
 
1122
                          packs.ensure_loaded)
 
1123
 
 
1124
    def test_pack_distribution_one_to_nine(self):
 
1125
        packs = self.get_packs()
 
1126
        self.assertEqual([1],
 
1127
            packs.pack_distribution(1))
 
1128
        self.assertEqual([1, 1],
 
1129
            packs.pack_distribution(2))
 
1130
        self.assertEqual([1, 1, 1],
 
1131
            packs.pack_distribution(3))
 
1132
        self.assertEqual([1, 1, 1, 1],
 
1133
            packs.pack_distribution(4))
 
1134
        self.assertEqual([1, 1, 1, 1, 1],
 
1135
            packs.pack_distribution(5))
 
1136
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1137
            packs.pack_distribution(6))
 
1138
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1139
            packs.pack_distribution(7))
 
1140
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1141
            packs.pack_distribution(8))
 
1142
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1143
            packs.pack_distribution(9))
 
1144
 
 
1145
    def test_pack_distribution_stable_at_boundaries(self):
 
1146
        """When there are multi-rev packs the counts are stable."""
 
1147
        packs = self.get_packs()
 
1148
        # in 10s:
 
1149
        self.assertEqual([10], packs.pack_distribution(10))
 
1150
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1151
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1152
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1153
        # 100s
 
1154
        self.assertEqual([100], packs.pack_distribution(100))
 
1155
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1156
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1157
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1158
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1159
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1160
 
 
1161
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1162
        packs = self.get_packs()
 
1163
        existing_packs = [(2000, "big"), (9, "medium")]
 
1164
        # rev count - 2009 -> 2x1000 + 9x1
 
1165
        pack_operations = packs.plan_autopack_combinations(
 
1166
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1167
        self.assertEqual([], pack_operations)
 
1168
 
 
1169
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1170
        packs = self.get_packs()
 
1171
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1172
        # rev count - 2010 -> 2x1000 + 1x10
 
1173
        pack_operations = packs.plan_autopack_combinations(
 
1174
            existing_packs, [1000, 1000, 10])
 
1175
        self.assertEqual([], pack_operations)
 
1176
 
 
1177
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1178
        packs = self.get_packs()
 
1179
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1180
            (1, "single1")]
 
1181
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1182
        pack_operations = packs.plan_autopack_combinations(
 
1183
            existing_packs, [1000, 1000, 10])
 
1184
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1185
 
 
1186
    def test_plan_pack_operations_creates_a_single_op(self):
 
1187
        packs = self.get_packs()
 
1188
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1189
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1190
        # rev count 150 -> 1x100 and 5x10
 
1191
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1192
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1193
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1194
        # we save a pack file with no increased I/O by putting them into the
 
1195
        # same file.
 
1196
        distribution = packs.pack_distribution(150)
 
1197
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1198
                                                           distribution)
 
1199
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1200
 
 
1201
    def test_all_packs_none(self):
 
1202
        format = self.get_format()
 
1203
        tree = self.make_branch_and_tree('.', format=format)
 
1204
        tree.lock_read()
 
1205
        self.addCleanup(tree.unlock)
 
1206
        packs = tree.branch.repository._pack_collection
 
1207
        packs.ensure_loaded()
 
1208
        self.assertEqual([], packs.all_packs())
 
1209
 
 
1210
    def test_all_packs_one(self):
 
1211
        format = self.get_format()
 
1212
        tree = self.make_branch_and_tree('.', format=format)
 
1213
        tree.commit('start')
 
1214
        tree.lock_read()
 
1215
        self.addCleanup(tree.unlock)
 
1216
        packs = tree.branch.repository._pack_collection
 
1217
        packs.ensure_loaded()
 
1218
        self.assertEqual([
 
1219
            packs.get_pack_by_name(packs.names()[0])],
 
1220
            packs.all_packs())
 
1221
 
 
1222
    def test_all_packs_two(self):
 
1223
        format = self.get_format()
 
1224
        tree = self.make_branch_and_tree('.', format=format)
 
1225
        tree.commit('start')
 
1226
        tree.commit('continue')
 
1227
        tree.lock_read()
 
1228
        self.addCleanup(tree.unlock)
 
1229
        packs = tree.branch.repository._pack_collection
 
1230
        packs.ensure_loaded()
 
1231
        self.assertEqual([
 
1232
            packs.get_pack_by_name(packs.names()[0]),
 
1233
            packs.get_pack_by_name(packs.names()[1]),
 
1234
            ], packs.all_packs())
 
1235
 
 
1236
    def test_get_pack_by_name(self):
 
1237
        format = self.get_format()
 
1238
        tree = self.make_branch_and_tree('.', format=format)
 
1239
        tree.commit('start')
 
1240
        tree.lock_read()
 
1241
        self.addCleanup(tree.unlock)
 
1242
        packs = tree.branch.repository._pack_collection
 
1243
        packs.reset()
 
1244
        packs.ensure_loaded()
 
1245
        name = packs.names()[0]
 
1246
        pack_1 = packs.get_pack_by_name(name)
 
1247
        # the pack should be correctly initialised
 
1248
        sizes = packs._names[name]
 
1249
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1250
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1251
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1252
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1253
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1254
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1255
        # and the same instance should be returned on successive calls.
 
1256
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1257
 
 
1258
    def test_reload_pack_names_new_entry(self):
 
1259
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1260
        names = packs.names()
 
1261
        # Add a new pack file into the repository
 
1262
        rev4 = tree.commit('four')
 
1263
        new_names = tree.branch.repository._pack_collection.names()
 
1264
        new_name = set(new_names).difference(names)
 
1265
        self.assertEqual(1, len(new_name))
 
1266
        new_name = new_name.pop()
 
1267
        # The old collection hasn't noticed yet
 
1268
        self.assertEqual(names, packs.names())
 
1269
        self.assertTrue(packs.reload_pack_names())
 
1270
        self.assertEqual(new_names, packs.names())
 
1271
        # And the repository can access the new revision
 
1272
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1273
        self.assertFalse(packs.reload_pack_names())
 
1274
 
 
1275
    def test_reload_pack_names_added_and_removed(self):
 
1276
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1277
        names = packs.names()
 
1278
        # Now repack the whole thing
 
1279
        tree.branch.repository.pack()
 
1280
        new_names = tree.branch.repository._pack_collection.names()
 
1281
        # The other collection hasn't noticed yet
 
1282
        self.assertEqual(names, packs.names())
 
1283
        self.assertTrue(packs.reload_pack_names())
 
1284
        self.assertEqual(new_names, packs.names())
 
1285
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1286
        self.assertFalse(packs.reload_pack_names())
 
1287
 
 
1288
    def test_reload_pack_names_preserves_pending(self):
 
1289
        # TODO: Update this to also test for pending-deleted names
 
1290
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1291
        # We will add one pack (via start_write_group + insert_record_stream),
 
1292
        # and remove another pack (via _remove_pack_from_memory)
 
1293
        orig_names = packs.names()
 
1294
        orig_at_load = packs._packs_at_load
 
1295
        to_remove_name = iter(orig_names).next()
 
1296
        r.start_write_group()
 
1297
        self.addCleanup(r.abort_write_group)
 
1298
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1299
            ('text', 'rev'), (), None, 'content\n')])
 
1300
        new_pack = packs._new_pack
 
1301
        self.assertTrue(new_pack.data_inserted())
 
1302
        new_pack.finish()
 
1303
        packs.allocate(new_pack)
 
1304
        packs._new_pack = None
 
1305
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1306
        packs._remove_pack_from_memory(removed_pack)
 
1307
        names = packs.names()
 
1308
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1309
        new_names = set([x[0][0] for x in new_nodes])
 
1310
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1311
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1312
        self.assertEqual(set([new_pack.name]), new_names)
 
1313
        self.assertEqual([to_remove_name],
 
1314
                         sorted([x[0][0] for x in deleted_nodes]))
 
1315
        packs.reload_pack_names()
 
1316
        reloaded_names = packs.names()
 
1317
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1318
        self.assertEqual(names, reloaded_names)
 
1319
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1320
        new_names = set([x[0][0] for x in new_nodes])
 
1321
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1322
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1323
        self.assertEqual(set([new_pack.name]), new_names)
 
1324
        self.assertEqual([to_remove_name],
 
1325
                         sorted([x[0][0] for x in deleted_nodes]))
 
1326
 
 
1327
    def test_autopack_obsoletes_new_pack(self):
 
1328
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1329
        packs._max_pack_count = lambda x: 1
 
1330
        packs.pack_distribution = lambda x: [10]
 
1331
        r.start_write_group()
 
1332
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1333
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1334
        # This should trigger an autopack, which will combine everything into a
 
1335
        # single pack file.
 
1336
        new_names = r.commit_write_group()
 
1337
        names = packs.names()
 
1338
        self.assertEqual(1, len(names))
 
1339
        self.assertEqual([names[0] + '.pack'],
 
1340
                         packs._pack_transport.list_dir('.'))
 
1341
 
 
1342
    def test_autopack_reloads_and_stops(self):
 
1343
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1344
        # After we have determined what needs to be autopacked, trigger a
 
1345
        # full-pack via the other repo which will cause us to re-evaluate and
 
1346
        # decide we don't need to do anything
 
1347
        orig_execute = packs._execute_pack_operations
 
1348
        def _munged_execute_pack_ops(*args, **kwargs):
 
1349
            tree.branch.repository.pack()
 
1350
            return orig_execute(*args, **kwargs)
 
1351
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1352
        packs._max_pack_count = lambda x: 1
 
1353
        packs.pack_distribution = lambda x: [10]
 
1354
        self.assertFalse(packs.autopack())
 
1355
        self.assertEqual(1, len(packs.names()))
 
1356
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1357
                         packs.names())
 
1358
 
 
1359
    def test__save_pack_names(self):
 
1360
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1361
        names = packs.names()
 
1362
        pack = packs.get_pack_by_name(names[0])
 
1363
        packs._remove_pack_from_memory(pack)
 
1364
        packs._save_pack_names(obsolete_packs=[pack])
 
1365
        cur_packs = packs._pack_transport.list_dir('.')
 
1366
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1367
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1368
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1369
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1370
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1371
 
 
1372
    def test__save_pack_names_already_obsoleted(self):
 
1373
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1374
        names = packs.names()
 
1375
        pack = packs.get_pack_by_name(names[0])
 
1376
        packs._remove_pack_from_memory(pack)
 
1377
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1378
        # the pack directly.
 
1379
        packs._obsolete_packs([pack])
 
1380
        packs._save_pack_names(clear_obsolete_packs=True,
 
1381
                               obsolete_packs=[pack])
 
1382
        cur_packs = packs._pack_transport.list_dir('.')
 
1383
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1384
        # Note that while we set clear_obsolete_packs=True, it should not
 
1385
        # delete a pack file that we have also scheduled for obsoletion.
 
1386
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1387
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1388
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1389
 
 
1390
    def test_pack_no_obsolete_packs_directory(self):
 
1391
        """Bug #314314, don't fail if obsolete_packs directory does
 
1392
        not exist."""
 
1393
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1394
        r.control_transport.rmdir('obsolete_packs')
 
1395
        packs._clear_obsolete_packs()
 
1396
 
 
1397
 
 
1398
class TestPack(TestCaseWithTransport):
 
1399
    """Tests for the Pack object."""
 
1400
 
 
1401
    def assertCurrentlyEqual(self, left, right):
 
1402
        self.assertTrue(left == right)
 
1403
        self.assertTrue(right == left)
 
1404
        self.assertFalse(left != right)
 
1405
        self.assertFalse(right != left)
 
1406
 
 
1407
    def assertCurrentlyNotEqual(self, left, right):
 
1408
        self.assertFalse(left == right)
 
1409
        self.assertFalse(right == left)
 
1410
        self.assertTrue(left != right)
 
1411
        self.assertTrue(right != left)
 
1412
 
 
1413
    def test___eq____ne__(self):
 
1414
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1415
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1416
        self.assertCurrentlyEqual(left, right)
 
1417
        # change all attributes and ensure equality changes as we do.
 
1418
        left.revision_index = 'a'
 
1419
        self.assertCurrentlyNotEqual(left, right)
 
1420
        right.revision_index = 'a'
 
1421
        self.assertCurrentlyEqual(left, right)
 
1422
        left.inventory_index = 'a'
 
1423
        self.assertCurrentlyNotEqual(left, right)
 
1424
        right.inventory_index = 'a'
 
1425
        self.assertCurrentlyEqual(left, right)
 
1426
        left.text_index = 'a'
 
1427
        self.assertCurrentlyNotEqual(left, right)
 
1428
        right.text_index = 'a'
 
1429
        self.assertCurrentlyEqual(left, right)
 
1430
        left.signature_index = 'a'
 
1431
        self.assertCurrentlyNotEqual(left, right)
 
1432
        right.signature_index = 'a'
 
1433
        self.assertCurrentlyEqual(left, right)
 
1434
        left.name = 'a'
 
1435
        self.assertCurrentlyNotEqual(left, right)
 
1436
        right.name = 'a'
 
1437
        self.assertCurrentlyEqual(left, right)
 
1438
        left.transport = 'a'
 
1439
        self.assertCurrentlyNotEqual(left, right)
 
1440
        right.transport = 'a'
 
1441
        self.assertCurrentlyEqual(left, right)
 
1442
 
 
1443
    def test_file_name(self):
 
1444
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1445
        self.assertEqual('a_name.pack', pack.file_name())
 
1446
 
 
1447
 
 
1448
class TestNewPack(TestCaseWithTransport):
 
1449
    """Tests for pack_repo.NewPack."""
 
1450
 
 
1451
    def test_new_instance_attributes(self):
 
1452
        upload_transport = self.get_transport('upload')
 
1453
        pack_transport = self.get_transport('pack')
 
1454
        index_transport = self.get_transport('index')
 
1455
        upload_transport.mkdir('.')
 
1456
        collection = pack_repo.RepositoryPackCollection(
 
1457
            repo=None,
 
1458
            transport=self.get_transport('.'),
 
1459
            index_transport=index_transport,
 
1460
            upload_transport=upload_transport,
 
1461
            pack_transport=pack_transport,
 
1462
            index_builder_class=BTreeBuilder,
 
1463
            index_class=BTreeGraphIndex,
 
1464
            use_chk_index=False)
 
1465
        pack = pack_repo.NewPack(collection)
 
1466
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1467
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1468
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1469
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1470
        self.assertTrue(pack.upload_transport is upload_transport)
 
1471
        self.assertTrue(pack.index_transport is index_transport)
 
1472
        self.assertTrue(pack.pack_transport is pack_transport)
 
1473
        self.assertEqual(None, pack.index_sizes)
 
1474
        self.assertEqual(20, len(pack.random_name))
 
1475
        self.assertIsInstance(pack.random_name, str)
 
1476
        self.assertIsInstance(pack.start_time, float)
 
1477
 
 
1478
 
 
1479
class TestPacker(TestCaseWithTransport):
 
1480
    """Tests for the packs repository Packer class."""
 
1481
 
 
1482
    def test_pack_optimizes_pack_order(self):
 
1483
        builder = self.make_branch_builder('.', format="1.9")
 
1484
        builder.start_series()
 
1485
        builder.build_snapshot('A', None, [
 
1486
            ('add', ('', 'root-id', 'directory', None)),
 
1487
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1488
        builder.build_snapshot('B', ['A'],
 
1489
            [('modify', ('f-id', 'new-content\n'))])
 
1490
        builder.build_snapshot('C', ['B'],
 
1491
            [('modify', ('f-id', 'third-content\n'))])
 
1492
        builder.build_snapshot('D', ['C'],
 
1493
            [('modify', ('f-id', 'fourth-content\n'))])
 
1494
        b = builder.get_branch()
 
1495
        b.lock_read()
 
1496
        builder.finish_series()
 
1497
        self.addCleanup(b.unlock)
 
1498
        # At this point, we should have 4 pack files available
 
1499
        # Because of how they were built, they correspond to
 
1500
        # ['D', 'C', 'B', 'A']
 
1501
        packs = b.repository._pack_collection.packs
 
1502
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1503
                                  packs, 'testing',
 
1504
                                  revision_ids=['B', 'C'])
 
1505
        # Now, when we are copying the B & C revisions, their pack files should
 
1506
        # be moved to the front of the stack
 
1507
        # The new ordering moves B & C to the front of the .packs attribute,
 
1508
        # and leaves the others in the original order.
 
1509
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1510
        new_pack = packer.pack()
 
1511
        self.assertEqual(new_packs, packer.packs)
 
1512
 
 
1513
 
 
1514
class TestOptimisingPacker(TestCaseWithTransport):
 
1515
    """Tests for the OptimisingPacker class."""
 
1516
 
 
1517
    def get_pack_collection(self):
 
1518
        repo = self.make_repository('.')
 
1519
        return repo._pack_collection
 
1520
 
 
1521
    def test_open_pack_will_optimise(self):
 
1522
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1523
                                            [], '.test')
 
1524
        new_pack = packer.open_pack()
 
1525
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1526
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1527
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1528
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1529
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1530
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1531
 
 
1532
 
 
1533
class TestGCCHKPacker(TestCaseWithTransport):
 
1534
 
 
1535
    def make_abc_branch(self):
 
1536
        builder = self.make_branch_builder('source')
 
1537
        builder.start_series()
 
1538
        builder.build_snapshot('A', None, [
 
1539
            ('add', ('', 'root-id', 'directory', None)),
 
1540
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1541
            ])
 
1542
        builder.build_snapshot('B', ['A'], [
 
1543
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1544
        builder.build_snapshot('C', ['B'], [
 
1545
            ('modify', ('file-id', 'new content\n'))])
 
1546
        builder.finish_series()
 
1547
        return builder.get_branch()
 
1548
 
 
1549
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1550
        """a repo with separate packs for a revisions Revision and Inventory.
 
1551
 
 
1552
        There will be one pack file that holds the Revision content, and one
 
1553
        for the Inventory content.
 
1554
 
 
1555
        :return: (repository,
 
1556
                  pack_name_with_rev_A_Revision,
 
1557
                  pack_name_with_rev_A_Inventory,
 
1558
                  pack_name_with_rev_C_content)
 
1559
        """
 
1560
        b_source = self.make_abc_branch()
 
1561
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1562
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1563
        b_stacked.lock_write()
 
1564
        self.addCleanup(b_stacked.unlock)
 
1565
        b_stacked.fetch(b_source, 'B')
 
1566
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1567
        # fill in the A rev.
 
1568
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1569
        repo_not_stacked.lock_write()
 
1570
        self.addCleanup(repo_not_stacked.unlock)
 
1571
        # Now we should have a pack file with A's inventory, but not its
 
1572
        # Revision
 
1573
        self.assertEqual([('A',), ('B',)],
 
1574
                         sorted(repo_not_stacked.inventories.keys()))
 
1575
        self.assertEqual([('B',)],
 
1576
                         sorted(repo_not_stacked.revisions.keys()))
 
1577
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1578
        # We have a couple names here, figure out which has A's inventory
 
1579
        for name in stacked_pack_names:
 
1580
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1581
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1582
            if ('A',) in keys:
 
1583
                inv_a_pack_name = name
 
1584
                break
 
1585
        else:
 
1586
            self.fail('Could not find pack containing A\'s inventory')
 
1587
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1588
        self.assertEqual([('A',), ('B',)],
 
1589
                         sorted(repo_not_stacked.revisions.keys()))
 
1590
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1591
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1592
        self.assertEqual(1, len(rev_a_pack_names))
 
1593
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1594
        # Now fetch 'C', so we have a couple pack files to join
 
1595
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1596
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1597
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1598
        self.assertEqual(1, len(rev_c_pack_names))
 
1599
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1600
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1601
                rev_c_pack_name)
 
1602
 
 
1603
    def test_pack_with_distant_inventories(self):
 
1604
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1605
        # When repacking, it is possible to have an inventory in a different
 
1606
        # pack file than the associated revision. An autopack can then come
 
1607
        # along, and miss that inventory, and complain.
 
1608
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1609
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1610
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1611
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1612
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1613
                    [a_pack, c_pack], '.test-pack')
 
1614
        # This would raise ValueError in bug #437003, but should not raise an
 
1615
        # error once fixed.
 
1616
        packer.pack()
 
1617
 
 
1618
    def test_pack_with_missing_inventory(self):
 
1619
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1620
        # the A inventory to actually be gone from the repository.
 
1621
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1622
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1623
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1624
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1625
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1626
            repo._pack_collection.all_packs(), '.test-pack')
 
1627
        e = self.assertRaises(ValueError, packer.pack)
 
1628
        packer.new_pack.abort()
 
1629
        self.assertContainsRe(str(e),
 
1630
            r"We are missing inventories for revisions: .*'A'")
 
1631
 
 
1632
 
 
1633
class TestCrossFormatPacks(TestCaseWithTransport):
 
1634
 
 
1635
    def log_pack(self, hint=None):
 
1636
        self.calls.append(('pack', hint))
 
1637
        self.orig_pack(hint=hint)
 
1638
        if self.expect_hint:
 
1639
            self.assertTrue(hint)
 
1640
 
 
1641
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1642
        self.expect_hint = expect_pack_called
 
1643
        self.calls = []
 
1644
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1645
        source_tree.lock_write()
 
1646
        self.addCleanup(source_tree.unlock)
 
1647
        tip = source_tree.commit('foo')
 
1648
        target = self.make_repository('target', format=target_fmt)
 
1649
        target.lock_write()
 
1650
        self.addCleanup(target.unlock)
 
1651
        source = source_tree.branch.repository._get_source(target._format)
 
1652
        self.orig_pack = target.pack
 
1653
        self.overrideAttr(target, "pack", self.log_pack)
 
1654
        search = target.search_missing_revision_ids(
 
1655
            source_tree.branch.repository, revision_ids=[tip])
 
1656
        stream = source.get_stream(search)
 
1657
        from_format = source_tree.branch.repository._format
 
1658
        sink = target._get_sink()
 
1659
        sink.insert_stream(stream, from_format, [])
 
1660
        if expect_pack_called:
 
1661
            self.assertLength(1, self.calls)
 
1662
        else:
 
1663
            self.assertLength(0, self.calls)
 
1664
 
 
1665
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1666
        self.expect_hint = expect_pack_called
 
1667
        self.calls = []
 
1668
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1669
        source_tree.lock_write()
 
1670
        self.addCleanup(source_tree.unlock)
 
1671
        tip = source_tree.commit('foo')
 
1672
        target = self.make_repository('target', format=target_fmt)
 
1673
        target.lock_write()
 
1674
        self.addCleanup(target.unlock)
 
1675
        source = source_tree.branch.repository
 
1676
        self.orig_pack = target.pack
 
1677
        self.overrideAttr(target, "pack", self.log_pack)
 
1678
        target.fetch(source)
 
1679
        if expect_pack_called:
 
1680
            self.assertLength(1, self.calls)
 
1681
        else:
 
1682
            self.assertLength(0, self.calls)
 
1683
 
 
1684
    def test_sink_format_hint_no(self):
 
1685
        # When the target format says packing makes no difference, pack is not
 
1686
        # called.
 
1687
        self.run_stream('1.9', 'rich-root-pack', False)
 
1688
 
 
1689
    def test_sink_format_hint_yes(self):
 
1690
        # When the target format says packing makes a difference, pack is
 
1691
        # called.
 
1692
        self.run_stream('1.9', '2a', True)
 
1693
 
 
1694
    def test_sink_format_same_no(self):
 
1695
        # When the formats are the same, pack is not called.
 
1696
        self.run_stream('2a', '2a', False)
 
1697
 
 
1698
    def test_IDS_format_hint_no(self):
 
1699
        # When the target format says packing makes no difference, pack is not
 
1700
        # called.
 
1701
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1702
 
 
1703
    def test_IDS_format_hint_yes(self):
 
1704
        # When the target format says packing makes a difference, pack is
 
1705
        # called.
 
1706
        self.run_fetch('1.9', '2a', True)
 
1707
 
 
1708
    def test_IDS_format_same_no(self):
 
1709
        # When the formats are the same, pack is not called.
 
1710
        self.run_fetch('2a', '2a', False)
 
1711
 
 
1712
 
 
1713
class Test_LazyListJoin(tests.TestCase):
 
1714
 
 
1715
    def test__repr__(self):
 
1716
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1717
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
 
1718
                         repr(lazy))
 
1719
 
 
1720
 
 
1721
class TestFeatures(tests.TestCaseWithTransport):
 
1722
 
 
1723
    def test_open_with_present_feature(self):
 
1724
        self.addCleanup(
 
1725
            repository.RepositoryFormatMetaDir.unregister_feature,
 
1726
            "makes-cheese-sandwich")
 
1727
        repository.RepositoryFormatMetaDir.register_feature(
 
1728
            "makes-cheese-sandwich")
 
1729
        repo = self.make_repository('.')
 
1730
        repo.lock_write()
 
1731
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1732
        repo._format.check_support_status(False)
 
1733
        repo.unlock()
 
1734
 
 
1735
    def test_open_with_missing_required_feature(self):
 
1736
        repo = self.make_repository('.')
 
1737
        repo.lock_write()
 
1738
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1739
        self.assertRaises(errors.MissingFeature,
 
1740
            repo._format.check_support_status, False)