~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Aaron Bentley
  • Date: 2007-06-11 14:59:52 UTC
  • mto: (2520.5.2 bzr.mpbundle)
  • mto: This revision was merged to the branch mainline in revision 2631.
  • Revision ID: abentley@panoramicfeedback.com-20070611145952-cwt4480gphdhen6l
Get installation started

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
26
27
 
 
28
from bzrlib import symbol_versioning
27
29
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
32
 
from bzrlib import (
33
 
    btree_index,
34
 
    symbol_versioning,
35
 
    tests,
36
 
    transport,
37
 
    vf_search,
38
 
    )
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
 
30
import bzrlib.bzrdir as bzrdir
 
31
import bzrlib.errors as errors
 
32
from bzrlib.errors import (NotBranchError,
 
33
                           NoSuchFile,
 
34
                           UnknownFormatError,
 
35
                           UnsupportedFormatError,
 
36
                           )
41
37
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.tests import (
43
 
    TestCase,
44
 
    TestCaseWithTransport,
45
 
    )
 
38
from bzrlib.tests import TestCase, TestCaseWithTransport
 
39
from bzrlib.transport import get_transport
 
40
from bzrlib.transport.memory import MemoryServer
46
41
from bzrlib import (
47
 
    bzrdir,
48
 
    errors,
49
 
    inventory,
50
 
    osutils,
51
42
    repository,
52
 
    revision as _mod_revision,
53
43
    upgrade,
54
 
    versionedfile,
55
 
    vf_repository,
56
44
    workingtree,
57
45
    )
58
 
from bzrlib.repofmt import (
59
 
    groupcompress_repo,
60
 
    knitrepo,
61
 
    knitpack_repo,
62
 
    pack_repo,
63
 
    )
 
46
from bzrlib.repofmt import knitrepo, weaverepo
64
47
 
65
48
 
66
49
class TestDefaultFormat(TestCase):
68
51
    def test_get_set_default_format(self):
69
52
        old_default = bzrdir.format_registry.get('default')
70
53
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.format_registry.get_default()
 
54
        old_format = repository.RepositoryFormat.get_default_format()
72
55
        self.assertTrue(isinstance(old_format, private_default))
73
56
        def make_sample_bzrdir():
74
57
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
86
69
            self.assertEqual(result, 'A bzr repository dir')
87
70
        finally:
88
71
            bzrdir.format_registry.remove('default')
89
 
            bzrdir.format_registry.remove('sample')
90
72
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.format_registry.get_default(),
 
73
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
92
74
                              old_format.__class__)
93
75
 
94
76
 
95
 
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
 
77
class SampleRepositoryFormat(repository.RepositoryFormat):
96
78
    """A sample format
97
79
 
98
 
    this format is initializable, unsupported to aid in testing the
 
80
    this format is initializable, unsupported to aid in testing the 
99
81
    open and open(unsupported=True) routines.
100
82
    """
101
83
 
102
 
    @classmethod
103
 
    def get_format_string(cls):
 
84
    def get_format_string(self):
104
85
        """See RepositoryFormat.get_format_string()."""
105
86
        return "Sample .bzr repository format."
106
87
 
117
98
        return "opened repository."
118
99
 
119
100
 
120
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
121
 
    """A sample format that can not be used in a metadir
122
 
 
123
 
    """
124
 
 
125
 
    def get_format_string(self):
126
 
        raise NotImplementedError
127
 
 
128
 
 
129
101
class TestRepositoryFormat(TestCaseWithTransport):
130
102
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
131
103
 
132
104
    def test_find_format(self):
133
105
        # is the right format object found for a repository?
134
106
        # create a branch with a few known format objects.
135
 
        # this is not quite the same as
 
107
        # this is not quite the same as 
136
108
        self.build_tree(["foo/", "bar/"])
137
109
        def check_format(format, url):
138
110
            dir = format._matchingbzrdir.initialize(url)
139
111
            format.initialize(dir)
140
 
            t = transport.get_transport_from_path(url)
141
 
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
142
 
            self.assertIsInstance(found_format, format.__class__)
143
 
        check_format(repository.format_registry.get_default(), "bar")
144
 
 
 
112
            t = get_transport(url)
 
113
            found_format = repository.RepositoryFormat.find_format(dir)
 
114
            self.failUnless(isinstance(found_format, format.__class__))
 
115
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
116
        
145
117
    def test_find_format_no_repository(self):
146
118
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
147
119
        self.assertRaises(errors.NoRepositoryPresent,
148
 
                          repository.RepositoryFormatMetaDir.find_format,
 
120
                          repository.RepositoryFormat.find_format,
149
121
                          dir)
150
122
 
151
 
    def test_from_string(self):
152
 
        self.assertIsInstance(
153
 
            SampleRepositoryFormat.from_string(
154
 
                "Sample .bzr repository format."),
155
 
            SampleRepositoryFormat)
156
 
        self.assertRaises(ValueError,
157
 
            SampleRepositoryFormat.from_string,
158
 
                "Different .bzr repository format.")
159
 
 
160
123
    def test_find_format_unknown_format(self):
161
124
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
125
        SampleRepositoryFormat().initialize(dir)
163
126
        self.assertRaises(UnknownFormatError,
164
 
                          repository.RepositoryFormatMetaDir.find_format,
 
127
                          repository.RepositoryFormat.find_format,
165
128
                          dir)
166
129
 
167
130
    def test_register_unregister_format(self):
168
 
        # Test deprecated format registration functions
169
131
        format = SampleRepositoryFormat()
170
132
        # make a control dir
171
133
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
172
134
        # make a repo
173
135
        format.initialize(dir)
174
136
        # register a format for it.
175
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
176
 
            repository.RepositoryFormat.register_format, format)
 
137
        repository.RepositoryFormat.register_format(format)
177
138
        # which repository.Open will refuse (not supported)
178
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
179
 
            self.get_url())
 
139
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
180
140
        # but open(unsupported) will work
181
141
        self.assertEqual(format.open(dir), "opened repository.")
182
142
        # unregister the format
183
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
184
 
            repository.RepositoryFormat.unregister_format, format)
185
 
 
186
 
 
187
 
class TestRepositoryFormatRegistry(TestCase):
188
 
 
189
 
    def setUp(self):
190
 
        super(TestRepositoryFormatRegistry, self).setUp()
191
 
        self.registry = repository.RepositoryFormatRegistry()
192
 
 
193
 
    def test_register_unregister_format(self):
194
 
        format = SampleRepositoryFormat()
195
 
        self.registry.register(format)
196
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
197
 
        self.registry.remove(format)
198
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
199
 
 
200
 
    def test_get_all(self):
201
 
        format = SampleRepositoryFormat()
202
 
        self.assertEquals([], self.registry._get_all())
203
 
        self.registry.register(format)
204
 
        self.assertEquals([format], self.registry._get_all())
205
 
 
206
 
    def test_register_extra(self):
207
 
        format = SampleExtraRepositoryFormat()
208
 
        self.assertEquals([], self.registry._get_all())
209
 
        self.registry.register_extra(format)
210
 
        self.assertEquals([format], self.registry._get_all())
211
 
 
212
 
    def test_register_extra_lazy(self):
213
 
        self.assertEquals([], self.registry._get_all())
214
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
215
 
            "SampleExtraRepositoryFormat")
216
 
        formats = self.registry._get_all()
217
 
        self.assertEquals(1, len(formats))
218
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
143
        repository.RepositoryFormat.unregister_format(format)
 
144
 
 
145
 
 
146
class TestFormat6(TestCaseWithTransport):
 
147
 
 
148
    def test_no_ancestry_weave(self):
 
149
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
150
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
151
        # We no longer need to create the ancestry.weave file
 
152
        # since it is *never* used.
 
153
        self.assertRaises(NoSuchFile,
 
154
                          control.transport.get,
 
155
                          'ancestry.weave')
 
156
 
 
157
 
 
158
class TestFormat7(TestCaseWithTransport):
 
159
    
 
160
    def test_disk_layout(self):
 
161
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
162
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
163
        # in case of side effects of locking.
 
164
        repo.lock_write()
 
165
        repo.unlock()
 
166
        # we want:
 
167
        # format 'Bazaar-NG Repository format 7'
 
168
        # lock ''
 
169
        # inventory.weave == empty_weave
 
170
        # empty revision-store directory
 
171
        # empty weaves directory
 
172
        t = control.get_repository_transport(None)
 
173
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
174
                             t.get('format').read())
 
175
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
176
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
177
        self.assertEqualDiff('# bzr weave file v5\n'
 
178
                             'w\n'
 
179
                             'W\n',
 
180
                             t.get('inventory.weave').read())
 
181
 
 
182
    def test_shared_disk_layout(self):
 
183
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
184
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
185
        # we want:
 
186
        # format 'Bazaar-NG Repository format 7'
 
187
        # inventory.weave == empty_weave
 
188
        # empty revision-store directory
 
189
        # empty weaves directory
 
190
        # a 'shared-storage' marker file.
 
191
        # lock is not present when unlocked
 
192
        t = control.get_repository_transport(None)
 
193
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
194
                             t.get('format').read())
 
195
        self.assertEqualDiff('', t.get('shared-storage').read())
 
196
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
197
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
198
        self.assertEqualDiff('# bzr weave file v5\n'
 
199
                             'w\n'
 
200
                             'W\n',
 
201
                             t.get('inventory.weave').read())
 
202
        self.assertFalse(t.has('branch-lock'))
 
203
 
 
204
    def test_creates_lockdir(self):
 
205
        """Make sure it appears to be controlled by a LockDir existence"""
 
206
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
207
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
208
        t = control.get_repository_transport(None)
 
209
        # TODO: Should check there is a 'lock' toplevel directory, 
 
210
        # regardless of contents
 
211
        self.assertFalse(t.has('lock/held/info'))
 
212
        repo.lock_write()
 
213
        try:
 
214
            self.assertTrue(t.has('lock/held/info'))
 
215
        finally:
 
216
            # unlock so we don't get a warning about failing to do so
 
217
            repo.unlock()
 
218
 
 
219
    def test_uses_lockdir(self):
 
220
        """repo format 7 actually locks on lockdir"""
 
221
        base_url = self.get_url()
 
222
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
223
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
224
        t = control.get_repository_transport(None)
 
225
        repo.lock_write()
 
226
        repo.unlock()
 
227
        del repo
 
228
        # make sure the same lock is created by opening it
 
229
        repo = repository.Repository.open(base_url)
 
230
        repo.lock_write()
 
231
        self.assertTrue(t.has('lock/held/info'))
 
232
        repo.unlock()
 
233
        self.assertFalse(t.has('lock/held/info'))
 
234
 
 
235
    def test_shared_no_tree_disk_layout(self):
 
236
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
237
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
238
        repo.set_make_working_trees(False)
 
239
        # we want:
 
240
        # format 'Bazaar-NG Repository format 7'
 
241
        # lock ''
 
242
        # inventory.weave == empty_weave
 
243
        # empty revision-store directory
 
244
        # empty weaves directory
 
245
        # a 'shared-storage' marker file.
 
246
        t = control.get_repository_transport(None)
 
247
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
248
                             t.get('format').read())
 
249
        ## self.assertEqualDiff('', t.get('lock').read())
 
250
        self.assertEqualDiff('', t.get('shared-storage').read())
 
251
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
252
        repo.set_make_working_trees(True)
 
253
        self.assertFalse(t.has('no-working-trees'))
 
254
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
255
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
256
        self.assertEqualDiff('# bzr weave file v5\n'
 
257
                             'w\n'
 
258
                             'W\n',
 
259
                             t.get('inventory.weave').read())
219
260
 
220
261
 
221
262
class TestFormatKnit1(TestCaseWithTransport):
222
 
 
223
 
    def test_attribute__fetch_order(self):
224
 
        """Knits need topological data insertion."""
225
 
        repo = self.make_repository('.',
226
 
                format=bzrdir.format_registry.get('knit')())
227
 
        self.assertEqual('topological', repo._format._fetch_order)
228
 
 
229
 
    def test_attribute__fetch_uses_deltas(self):
230
 
        """Knits reuse deltas."""
231
 
        repo = self.make_repository('.',
232
 
                format=bzrdir.format_registry.get('knit')())
233
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
234
 
 
 
263
    
235
264
    def test_disk_layout(self):
236
265
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
237
266
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
251
280
        # self.assertEqualDiff('', t.get('lock').read())
252
281
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
253
282
        self.check_knits(t)
254
 
        # Check per-file knits.
255
 
        branch = control.create_branch()
256
 
        tree = control.create_workingtree()
257
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
258
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
259
 
        tree.commit('1st post', rev_id='foo')
260
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
261
 
            '\nfoo fulltext 0 81  :')
262
283
 
263
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
284
    def assertHasKnit(self, t, knit_name):
264
285
        """Assert that knit_name exists on t."""
265
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
286
        self.assertEqualDiff('# bzr knit index 8\n',
266
287
                             t.get(knit_name + '.kndx').read())
 
288
        # no default content
 
289
        self.assertTrue(t.has(knit_name + '.knit'))
267
290
 
268
291
    def check_knits(self, t):
269
292
        """check knit content for a repository."""
313
336
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
314
337
        self.check_knits(t)
315
338
 
316
 
    def test_deserialise_sets_root_revision(self):
317
 
        """We must have a inventory.root.revision
318
 
 
319
 
        Old versions of the XML5 serializer did not set the revision_id for
320
 
        the whole inventory. So we grab the one from the expected text. Which
321
 
        is valid when the api is not being abused.
322
 
        """
323
 
        repo = self.make_repository('.',
324
 
                format=bzrdir.format_registry.get('knit')())
325
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
326
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
327
 
        self.assertEqual('test-rev-id', inv.root.revision)
328
 
 
329
 
    def test_deserialise_uses_global_revision_id(self):
330
 
        """If it is set, then we re-use the global revision id"""
331
 
        repo = self.make_repository('.',
332
 
                format=bzrdir.format_registry.get('knit')())
333
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
334
 
                   '</inventory>\n')
335
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
336
 
        # raise an error, rather than silently using one revision_id over the
337
 
        # other.
338
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
339
 
            'test-rev-id', inv_xml)
340
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
341
 
        self.assertEqual('other-rev-id', inv.root.revision)
342
 
 
343
 
    def test_supports_external_lookups(self):
344
 
        repo = self.make_repository('.',
345
 
                format=bzrdir.format_registry.get('knit')())
346
 
        self.assertFalse(repo._format.supports_external_lookups)
347
 
 
348
339
 
349
340
class DummyRepository(object):
350
341
    """A dummy repository for testing."""
351
342
 
352
 
    _format = None
353
343
    _serializer = None
354
344
 
355
345
    def supports_rich_root(self):
356
 
        if self._format is not None:
357
 
            return self._format.rich_root_data
358
346
        return False
359
347
 
360
 
    def get_graph(self):
361
 
        raise NotImplementedError
362
 
 
363
 
    def get_parent_map(self, revision_ids):
364
 
        raise NotImplementedError
365
 
 
366
348
 
367
349
class InterDummy(repository.InterRepository):
368
350
    """An inter-repository optimised code path for DummyRepository.
369
351
 
370
352
    This is for use during testing where we use DummyRepository as repositories
371
353
    so that none of the default regsitered inter-repository classes will
372
 
    MATCH.
 
354
    match.
373
355
    """
374
356
 
375
357
    @staticmethod
376
358
    def is_compatible(repo_source, repo_target):
377
359
        """InterDummy is compatible with DummyRepository."""
378
 
        return (isinstance(repo_source, DummyRepository) and
 
360
        return (isinstance(repo_source, DummyRepository) and 
379
361
            isinstance(repo_target, DummyRepository))
380
362
 
381
363
 
389
371
        # classes do not barf inappropriately when a surprising repository type
390
372
        # is handed to them.
391
373
        dummy_a = DummyRepository()
392
 
        dummy_a._format = RepositoryFormat()
393
 
        dummy_a._format.supports_full_versioned_files = True
394
374
        dummy_b = DummyRepository()
395
 
        dummy_b._format = RepositoryFormat()
396
 
        dummy_b._format.supports_full_versioned_files = True
397
375
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
398
376
 
399
377
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
400
378
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
401
 
 
 
379
        
402
380
        The effective default is now InterSameDataRepository because there is
403
381
        no actual sane default in the presence of incompatible data models.
404
382
        """
405
383
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
406
 
        self.assertEqual(vf_repository.InterSameDataRepository,
 
384
        self.assertEqual(repository.InterSameDataRepository,
407
385
                         inter_repo.__class__)
408
386
        self.assertEqual(repo_a, inter_repo.source)
409
387
        self.assertEqual(repo_b, inter_repo.target)
415
393
        # pair that it returns true on for the is_compatible static method
416
394
        # check
417
395
        dummy_a = DummyRepository()
418
 
        dummy_a._format = RepositoryFormat()
419
396
        dummy_b = DummyRepository()
420
 
        dummy_b._format = RepositoryFormat()
421
397
        repo = self.make_repository('.')
422
398
        # hack dummies to look like repo somewhat.
423
399
        dummy_a._serializer = repo._serializer
424
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
425
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
426
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
427
400
        dummy_b._serializer = repo._serializer
428
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
429
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
430
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
431
401
        repository.InterRepository.register_optimiser(InterDummy)
432
402
        try:
433
403
            # we should get the default for something InterDummy returns False
446
416
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
447
417
 
448
418
 
449
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
450
 
 
451
 
    @classmethod
452
 
    def get_format_string(cls):
453
 
        return "Test Format 1"
454
 
 
455
 
 
456
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
457
 
 
458
 
    @classmethod
459
 
    def get_format_string(cls):
460
 
        return "Test Format 2"
 
419
class TestInterWeaveRepo(TestCaseWithTransport):
 
420
 
 
421
    def test_is_compatible_and_registered(self):
 
422
        # InterWeaveRepo is compatible when either side
 
423
        # is a format 5/6/7 branch
 
424
        from bzrlib.repofmt import knitrepo, weaverepo
 
425
        formats = [weaverepo.RepositoryFormat5(),
 
426
                   weaverepo.RepositoryFormat6(),
 
427
                   weaverepo.RepositoryFormat7()]
 
428
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
429
                                knitrepo.RepositoryFormatKnit1(),
 
430
                                ]
 
431
        repo_a = self.make_repository('a')
 
432
        repo_b = self.make_repository('b')
 
433
        is_compatible = repository.InterWeaveRepo.is_compatible
 
434
        for source in incompatible_formats:
 
435
            # force incompatible left then right
 
436
            repo_a._format = source
 
437
            repo_b._format = formats[0]
 
438
            self.assertFalse(is_compatible(repo_a, repo_b))
 
439
            self.assertFalse(is_compatible(repo_b, repo_a))
 
440
        for source in formats:
 
441
            repo_a._format = source
 
442
            for target in formats:
 
443
                repo_b._format = target
 
444
                self.assertTrue(is_compatible(repo_a, repo_b))
 
445
        self.assertEqual(repository.InterWeaveRepo,
 
446
                         repository.InterRepository.get(repo_a,
 
447
                                                        repo_b).__class__)
461
448
 
462
449
 
463
450
class TestRepositoryConverter(TestCaseWithTransport):
464
451
 
465
452
    def test_convert_empty(self):
466
 
        source_format = TestRepositoryFormat1()
467
 
        target_format = TestRepositoryFormat2()
468
 
        repository.format_registry.register(source_format)
469
 
        self.addCleanup(repository.format_registry.remove,
470
 
            source_format)
471
 
        repository.format_registry.register(target_format)
472
 
        self.addCleanup(repository.format_registry.remove,
473
 
            target_format)
474
 
        t = self.get_transport()
 
453
        t = get_transport(self.get_url('.'))
475
454
        t.mkdir('repository')
476
455
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
477
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
456
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
457
        target_format = knitrepo.RepositoryFormatKnit1()
478
458
        converter = repository.CopyConverter(target_format)
479
459
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
480
460
        try:
485
465
        self.assertTrue(isinstance(target_format, repo._format.__class__))
486
466
 
487
467
 
 
468
class TestMisc(TestCase):
 
469
    
 
470
    def test_unescape_xml(self):
 
471
        """We get some kind of error when malformed entities are passed"""
 
472
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
473
 
 
474
 
488
475
class TestRepositoryFormatKnit3(TestCaseWithTransport):
489
476
 
490
 
    def test_attribute__fetch_order(self):
491
 
        """Knits need topological data insertion."""
492
 
        format = bzrdir.BzrDirMetaFormat1()
493
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
494
 
        repo = self.make_repository('.', format=format)
495
 
        self.assertEqual('topological', repo._format._fetch_order)
496
 
 
497
 
    def test_attribute__fetch_uses_deltas(self):
498
 
        """Knits reuse deltas."""
499
 
        format = bzrdir.BzrDirMetaFormat1()
500
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
501
 
        repo = self.make_repository('.', format=format)
502
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
503
 
 
504
477
    def test_convert(self):
505
478
        """Ensure the upgrade adds weaves for roots"""
506
479
        format = bzrdir.BzrDirMetaFormat1()
508
481
        tree = self.make_branch_and_tree('.', format)
509
482
        tree.commit("Dull commit", rev_id="dull")
510
483
        revision_tree = tree.branch.repository.revision_tree('dull')
511
 
        revision_tree.lock_read()
512
 
        try:
513
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
514
 
                revision_tree.inventory.root.file_id)
515
 
        finally:
516
 
            revision_tree.unlock()
 
484
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
485
            revision_tree.inventory.root.file_id)
517
486
        format = bzrdir.BzrDirMetaFormat1()
518
487
        format.repository_format = knitrepo.RepositoryFormatKnit3()
519
488
        upgrade.Convert('.', format)
520
489
        tree = workingtree.WorkingTree.open('.')
521
490
        revision_tree = tree.branch.repository.revision_tree('dull')
522
 
        revision_tree.lock_read()
523
 
        try:
524
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
525
 
        finally:
526
 
            revision_tree.unlock()
 
491
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
527
492
        tree.commit("Another dull commit", rev_id='dull2')
528
493
        revision_tree = tree.branch.repository.revision_tree('dull2')
529
 
        revision_tree.lock_read()
530
 
        self.addCleanup(revision_tree.unlock)
531
494
        self.assertEqual('dull', revision_tree.inventory.root.revision)
532
495
 
533
 
    def test_supports_external_lookups(self):
534
 
        format = bzrdir.BzrDirMetaFormat1()
535
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
536
 
        repo = self.make_repository('.', format=format)
537
 
        self.assertFalse(repo._format.supports_external_lookups)
538
 
 
539
 
 
540
 
class Test2a(tests.TestCaseWithMemoryTransport):
541
 
 
542
 
    def test_chk_bytes_uses_custom_btree_parser(self):
543
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
544
 
        mt.lock_write()
545
 
        self.addCleanup(mt.unlock)
546
 
        mt.add([''], ['root-id'])
547
 
        mt.commit('first')
548
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
549
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
550
 
        # It should also work if we re-open the repo
551
 
        repo = mt.branch.repository.bzrdir.open_repository()
552
 
        repo.lock_read()
553
 
        self.addCleanup(repo.unlock)
554
 
        index = repo.chk_bytes._index._graph_index._indices[0]
555
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
556
 
 
557
 
    def test_fetch_combines_groups(self):
558
 
        builder = self.make_branch_builder('source', format='2a')
559
 
        builder.start_series()
560
 
        builder.build_snapshot('1', None, [
561
 
            ('add', ('', 'root-id', 'directory', '')),
562
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
563
 
        builder.build_snapshot('2', ['1'], [
564
 
            ('modify', ('file-id', 'content-2\n'))])
565
 
        builder.finish_series()
566
 
        source = builder.get_branch()
567
 
        target = self.make_repository('target', format='2a')
568
 
        target.fetch(source.repository)
569
 
        target.lock_read()
570
 
        self.addCleanup(target.unlock)
571
 
        details = target.texts._index.get_build_details(
572
 
            [('file-id', '1',), ('file-id', '2',)])
573
 
        file_1_details = details[('file-id', '1')]
574
 
        file_2_details = details[('file-id', '2')]
575
 
        # The index, and what to read off disk, should be the same for both
576
 
        # versions of the file.
577
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
578
 
 
579
 
    def test_fetch_combines_groups(self):
580
 
        builder = self.make_branch_builder('source', format='2a')
581
 
        builder.start_series()
582
 
        builder.build_snapshot('1', None, [
583
 
            ('add', ('', 'root-id', 'directory', '')),
584
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
585
 
        builder.build_snapshot('2', ['1'], [
586
 
            ('modify', ('file-id', 'content-2\n'))])
587
 
        builder.finish_series()
588
 
        source = builder.get_branch()
589
 
        target = self.make_repository('target', format='2a')
590
 
        target.fetch(source.repository)
591
 
        target.lock_read()
592
 
        self.addCleanup(target.unlock)
593
 
        details = target.texts._index.get_build_details(
594
 
            [('file-id', '1',), ('file-id', '2',)])
595
 
        file_1_details = details[('file-id', '1')]
596
 
        file_2_details = details[('file-id', '2')]
597
 
        # The index, and what to read off disk, should be the same for both
598
 
        # versions of the file.
599
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
600
 
 
601
 
    def test_fetch_combines_groups(self):
602
 
        builder = self.make_branch_builder('source', format='2a')
603
 
        builder.start_series()
604
 
        builder.build_snapshot('1', None, [
605
 
            ('add', ('', 'root-id', 'directory', '')),
606
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
607
 
        builder.build_snapshot('2', ['1'], [
608
 
            ('modify', ('file-id', 'content-2\n'))])
609
 
        builder.finish_series()
610
 
        source = builder.get_branch()
611
 
        target = self.make_repository('target', format='2a')
612
 
        target.fetch(source.repository)
613
 
        target.lock_read()
614
 
        self.addCleanup(target.unlock)
615
 
        details = target.texts._index.get_build_details(
616
 
            [('file-id', '1',), ('file-id', '2',)])
617
 
        file_1_details = details[('file-id', '1')]
618
 
        file_2_details = details[('file-id', '2')]
619
 
        # The index, and what to read off disk, should be the same for both
620
 
        # versions of the file.
621
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
622
 
 
623
 
    def test_format_pack_compresses_True(self):
624
 
        repo = self.make_repository('repo', format='2a')
625
 
        self.assertTrue(repo._format.pack_compresses)
626
 
 
627
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
628
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
629
 
        tree.lock_write()
630
 
        tree.add([''], ['TREE_ROOT'])
631
 
        revid = tree.commit("foo")
632
 
        tree.unlock()
633
 
        tree.lock_read()
634
 
        self.addCleanup(tree.unlock)
635
 
        inv = tree.branch.repository.get_inventory(revid)
636
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
637
 
        inv.parent_id_basename_to_file_id._ensure_root()
638
 
        inv.id_to_entry._ensure_root()
639
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
640
 
        self.assertEqual(65536,
641
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
642
 
 
643
 
    def test_autopack_unchanged_chk_nodes(self):
644
 
        # at 20 unchanged commits, chk pages are packed that are split into
645
 
        # two groups such that the new pack being made doesn't have all its
646
 
        # pages in the source packs (though they are in the repository).
647
 
        # Use a memory backed repository, we don't need to hit disk for this
648
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
649
 
        tree.lock_write()
650
 
        self.addCleanup(tree.unlock)
651
 
        tree.add([''], ['TREE_ROOT'])
652
 
        for pos in range(20):
653
 
            tree.commit(str(pos))
654
 
 
655
 
    def test_pack_with_hint(self):
656
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
657
 
        tree.lock_write()
658
 
        self.addCleanup(tree.unlock)
659
 
        tree.add([''], ['TREE_ROOT'])
660
 
        # 1 commit to leave untouched
661
 
        tree.commit('1')
662
 
        to_keep = tree.branch.repository._pack_collection.names()
663
 
        # 2 to combine
664
 
        tree.commit('2')
665
 
        tree.commit('3')
666
 
        all = tree.branch.repository._pack_collection.names()
667
 
        combine = list(set(all) - set(to_keep))
668
 
        self.assertLength(3, all)
669
 
        self.assertLength(2, combine)
670
 
        tree.branch.repository.pack(hint=combine)
671
 
        final = tree.branch.repository._pack_collection.names()
672
 
        self.assertLength(2, final)
673
 
        self.assertFalse(combine[0] in final)
674
 
        self.assertFalse(combine[1] in final)
675
 
        self.assertSubset(to_keep, final)
676
 
 
677
 
    def test_stream_source_to_gc(self):
678
 
        source = self.make_repository('source', format='2a')
679
 
        target = self.make_repository('target', format='2a')
680
 
        stream = source._get_source(target._format)
681
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
682
 
 
683
 
    def test_stream_source_to_non_gc(self):
684
 
        source = self.make_repository('source', format='2a')
685
 
        target = self.make_repository('target', format='rich-root-pack')
686
 
        stream = source._get_source(target._format)
687
 
        # We don't want the child GroupCHKStreamSource
688
 
        self.assertIs(type(stream), vf_repository.StreamSource)
689
 
 
690
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
691
 
        source_builder = self.make_branch_builder('source',
692
 
                            format='2a')
693
 
        # We have to build a fairly large tree, so that we are sure the chk
694
 
        # pages will have split into multiple pages.
695
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
696
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
697
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
698
 
                fname = i + j
699
 
                fid = fname + '-id'
700
 
                content = 'content for %s\n' % (fname,)
701
 
                entries.append(('add', (fname, fid, 'file', content)))
702
 
        source_builder.start_series()
703
 
        source_builder.build_snapshot('rev-1', None, entries)
704
 
        # Now change a few of them, so we get a few new pages for the second
705
 
        # revision
706
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
707
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
708
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
709
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
710
 
            ])
711
 
        source_builder.finish_series()
712
 
        source_branch = source_builder.get_branch()
713
 
        source_branch.lock_read()
714
 
        self.addCleanup(source_branch.unlock)
715
 
        target = self.make_repository('target', format='2a')
716
 
        source = source_branch.repository._get_source(target._format)
717
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
718
 
 
719
 
        # On a regular pass, getting the inventories and chk pages for rev-2
720
 
        # would only get the newly created chk pages
721
 
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
722
 
                                    set(['rev-2']))
723
 
        simple_chk_records = []
724
 
        for vf_name, substream in source.get_stream(search):
725
 
            if vf_name == 'chk_bytes':
726
 
                for record in substream:
727
 
                    simple_chk_records.append(record.key)
728
 
            else:
729
 
                for _ in substream:
730
 
                    continue
731
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
732
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
733
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
734
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
735
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
736
 
                         simple_chk_records)
737
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
738
 
        # we should get a much larger set of pages.
739
 
        missing = [('inventories', 'rev-2')]
740
 
        full_chk_records = []
741
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
742
 
            if vf_name == 'inventories':
743
 
                for record in substream:
744
 
                    self.assertEqual(('rev-2',), record.key)
745
 
            elif vf_name == 'chk_bytes':
746
 
                for record in substream:
747
 
                    full_chk_records.append(record.key)
748
 
            else:
749
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
750
 
        # We have 257 records now. This is because we have 1 root page, and 256
751
 
        # leaf pages in a complete listing.
752
 
        self.assertEqual(257, len(full_chk_records))
753
 
        self.assertSubset(simple_chk_records, full_chk_records)
754
 
 
755
 
    def test_inconsistency_fatal(self):
756
 
        repo = self.make_repository('repo', format='2a')
757
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
758
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
759
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
760
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
761
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
762
 
 
763
 
 
764
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
765
 
 
766
 
    def test_source_to_exact_pack_092(self):
767
 
        source = self.make_repository('source', format='pack-0.92')
768
 
        target = self.make_repository('target', format='pack-0.92')
769
 
        stream_source = source._get_source(target._format)
770
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
771
 
 
772
 
    def test_source_to_exact_pack_rich_root_pack(self):
773
 
        source = self.make_repository('source', format='rich-root-pack')
774
 
        target = self.make_repository('target', format='rich-root-pack')
775
 
        stream_source = source._get_source(target._format)
776
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
777
 
 
778
 
    def test_source_to_exact_pack_19(self):
779
 
        source = self.make_repository('source', format='1.9')
780
 
        target = self.make_repository('target', format='1.9')
781
 
        stream_source = source._get_source(target._format)
782
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
783
 
 
784
 
    def test_source_to_exact_pack_19_rich_root(self):
785
 
        source = self.make_repository('source', format='1.9-rich-root')
786
 
        target = self.make_repository('target', format='1.9-rich-root')
787
 
        stream_source = source._get_source(target._format)
788
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
789
 
 
790
 
    def test_source_to_remote_exact_pack_19(self):
791
 
        trans = self.make_smart_server('target')
792
 
        trans.ensure_base()
793
 
        source = self.make_repository('source', format='1.9')
794
 
        target = self.make_repository('target', format='1.9')
795
 
        target = repository.Repository.open(trans.base)
796
 
        stream_source = source._get_source(target._format)
797
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
798
 
 
799
 
    def test_stream_source_to_non_exact(self):
800
 
        source = self.make_repository('source', format='pack-0.92')
801
 
        target = self.make_repository('target', format='1.9')
802
 
        stream = source._get_source(target._format)
803
 
        self.assertIs(type(stream), vf_repository.StreamSource)
804
 
 
805
 
    def test_stream_source_to_non_exact_rich_root(self):
806
 
        source = self.make_repository('source', format='1.9')
807
 
        target = self.make_repository('target', format='1.9-rich-root')
808
 
        stream = source._get_source(target._format)
809
 
        self.assertIs(type(stream), vf_repository.StreamSource)
810
 
 
811
 
    def test_source_to_remote_non_exact_pack_19(self):
812
 
        trans = self.make_smart_server('target')
813
 
        trans.ensure_base()
814
 
        source = self.make_repository('source', format='1.9')
815
 
        target = self.make_repository('target', format='1.6')
816
 
        target = repository.Repository.open(trans.base)
817
 
        stream_source = source._get_source(target._format)
818
 
        self.assertIs(type(stream_source), vf_repository.StreamSource)
819
 
 
820
 
    def test_stream_source_to_knit(self):
821
 
        source = self.make_repository('source', format='pack-0.92')
822
 
        target = self.make_repository('target', format='dirstate')
823
 
        stream = source._get_source(target._format)
824
 
        self.assertIs(type(stream), vf_repository.StreamSource)
825
 
 
826
 
 
827
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
828
 
    """Tests for _find_parent_ids_of_revisions."""
829
 
 
830
 
    def setUp(self):
831
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
832
 
        self.builder = self.make_branch_builder('source')
833
 
        self.builder.start_series()
834
 
        self.builder.build_snapshot('initial', None,
835
 
            [('add', ('', 'tree-root', 'directory', None))])
836
 
        self.repo = self.builder.get_branch().repository
837
 
        self.addCleanup(self.builder.finish_series)
838
 
 
839
 
    def assertParentIds(self, expected_result, rev_set):
840
 
        self.assertEqual(sorted(expected_result),
841
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
842
 
 
843
 
    def test_simple(self):
844
 
        self.builder.build_snapshot('revid1', None, [])
845
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
846
 
        rev_set = ['revid2']
847
 
        self.assertParentIds(['revid1'], rev_set)
848
 
 
849
 
    def test_not_first_parent(self):
850
 
        self.builder.build_snapshot('revid1', None, [])
851
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
852
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
853
 
        rev_set = ['revid3', 'revid2']
854
 
        self.assertParentIds(['revid1'], rev_set)
855
 
 
856
 
    def test_not_null(self):
857
 
        rev_set = ['initial']
858
 
        self.assertParentIds([], rev_set)
859
 
 
860
 
    def test_not_null_set(self):
861
 
        self.builder.build_snapshot('revid1', None, [])
862
 
        rev_set = [_mod_revision.NULL_REVISION]
863
 
        self.assertParentIds([], rev_set)
864
 
 
865
 
    def test_ghost(self):
866
 
        self.builder.build_snapshot('revid1', None, [])
867
 
        rev_set = ['ghost', 'revid1']
868
 
        self.assertParentIds(['initial'], rev_set)
869
 
 
870
 
    def test_ghost_parent(self):
871
 
        self.builder.build_snapshot('revid1', None, [])
872
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
873
 
        rev_set = ['revid2', 'revid1']
874
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
875
 
 
876
 
    def test_righthand_parent(self):
877
 
        self.builder.build_snapshot('revid1', None, [])
878
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
879
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
880
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
881
 
        rev_set = ['revid3', 'revid2a']
882
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
883
 
 
884
 
 
885
 
class TestWithBrokenRepo(TestCaseWithTransport):
886
 
    """These tests seem to be more appropriate as interface tests?"""
887
 
 
888
 
    def make_broken_repository(self):
889
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
890
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
891
 
        # it does, this duplication should be removed.
892
 
        repo = self.make_repository('broken-repo')
893
 
        cleanups = []
894
 
        try:
895
 
            repo.lock_write()
896
 
            cleanups.append(repo.unlock)
897
 
            repo.start_write_group()
898
 
            cleanups.append(repo.commit_write_group)
899
 
            # make rev1a: A well-formed revision, containing 'file1'
900
 
            inv = inventory.Inventory(revision_id='rev1a')
901
 
            inv.root.revision = 'rev1a'
902
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
903
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
904
 
            repo.add_inventory('rev1a', inv, [])
905
 
            revision = _mod_revision.Revision('rev1a',
906
 
                committer='jrandom@example.com', timestamp=0,
907
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
908
 
            repo.add_revision('rev1a', revision, inv)
909
 
 
910
 
            # make rev1b, which has no Revision, but has an Inventory, and
911
 
            # file1
912
 
            inv = inventory.Inventory(revision_id='rev1b')
913
 
            inv.root.revision = 'rev1b'
914
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
915
 
            repo.add_inventory('rev1b', inv, [])
916
 
 
917
 
            # make rev2, with file1 and file2
918
 
            # file2 is sane
919
 
            # file1 has 'rev1b' as an ancestor, even though this is not
920
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
921
 
            inv = inventory.Inventory()
922
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
923
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
924
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
925
 
 
926
 
            # make ghost revision rev1c
927
 
            inv = inventory.Inventory()
928
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
929
 
 
930
 
            # make rev3 with file2
931
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
932
 
            # file2 cannot have rev1c as its ancestor.
933
 
            inv = inventory.Inventory()
934
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
935
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
936
 
            return repo
937
 
        finally:
938
 
            for cleanup in reversed(cleanups):
939
 
                cleanup()
940
 
 
941
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
942
 
        inv.revision_id = revision_id
943
 
        inv.root.revision = revision_id
944
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
945
 
        repo.add_inventory(revision_id, inv, parent_ids)
946
 
        revision = _mod_revision.Revision(revision_id,
947
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
948
 
            timezone=0, message='foo', parent_ids=parent_ids)
949
 
        repo.add_revision(revision_id, revision, inv)
950
 
 
951
 
    def add_file(self, repo, inv, filename, revision, parents):
952
 
        file_id = filename + '-id'
953
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
954
 
        entry.revision = revision
955
 
        entry.text_size = 0
956
 
        inv.add(entry)
957
 
        text_key = (file_id, revision)
958
 
        parent_keys = [(file_id, parent) for parent in parents]
959
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
960
 
 
961
 
    def test_insert_from_broken_repo(self):
962
 
        """Inserting a data stream from a broken repository won't silently
963
 
        corrupt the target repository.
964
 
        """
965
 
        broken_repo = self.make_broken_repository()
966
 
        empty_repo = self.make_repository('empty-repo')
967
 
        try:
968
 
            empty_repo.fetch(broken_repo)
969
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
970
 
            # Test successful: compression parent not being copied leads to
971
 
            # error.
972
 
            return
973
 
        empty_repo.lock_read()
974
 
        self.addCleanup(empty_repo.unlock)
975
 
        text = empty_repo.texts.get_record_stream(
976
 
            [('file2-id', 'rev3')], 'topological', True).next()
977
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
978
 
 
979
 
 
980
 
class TestRepositoryPackCollection(TestCaseWithTransport):
981
 
 
982
 
    def get_format(self):
983
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
984
 
 
985
 
    def get_packs(self):
986
 
        format = self.get_format()
987
 
        repo = self.make_repository('.', format=format)
988
 
        return repo._pack_collection
989
 
 
990
 
    def make_packs_and_alt_repo(self, write_lock=False):
991
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
992
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
993
 
        tree.lock_write()
994
 
        self.addCleanup(tree.unlock)
995
 
        rev1 = tree.commit('one')
996
 
        rev2 = tree.commit('two')
997
 
        rev3 = tree.commit('three')
998
 
        r = repository.Repository.open('.')
999
 
        if write_lock:
1000
 
            r.lock_write()
1001
 
        else:
1002
 
            r.lock_read()
1003
 
        self.addCleanup(r.unlock)
1004
 
        packs = r._pack_collection
1005
 
        packs.ensure_loaded()
1006
 
        return tree, r, packs, [rev1, rev2, rev3]
1007
 
 
1008
 
    def test__clear_obsolete_packs(self):
1009
 
        packs = self.get_packs()
1010
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1011
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1012
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1013
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1014
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1015
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1016
 
        res = packs._clear_obsolete_packs()
1017
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1018
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1019
 
 
1020
 
    def test__clear_obsolete_packs_preserve(self):
1021
 
        packs = self.get_packs()
1022
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1023
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1024
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1025
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1026
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1027
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1028
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1029
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1030
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1031
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1032
 
 
1033
 
    def test__max_pack_count(self):
1034
 
        """The maximum pack count is a function of the number of revisions."""
1035
 
        # no revisions - one pack, so that we can have a revision free repo
1036
 
        # without it blowing up
1037
 
        packs = self.get_packs()
1038
 
        self.assertEqual(1, packs._max_pack_count(0))
1039
 
        # after that the sum of the digits, - check the first 1-9
1040
 
        self.assertEqual(1, packs._max_pack_count(1))
1041
 
        self.assertEqual(2, packs._max_pack_count(2))
1042
 
        self.assertEqual(3, packs._max_pack_count(3))
1043
 
        self.assertEqual(4, packs._max_pack_count(4))
1044
 
        self.assertEqual(5, packs._max_pack_count(5))
1045
 
        self.assertEqual(6, packs._max_pack_count(6))
1046
 
        self.assertEqual(7, packs._max_pack_count(7))
1047
 
        self.assertEqual(8, packs._max_pack_count(8))
1048
 
        self.assertEqual(9, packs._max_pack_count(9))
1049
 
        # check the boundary cases with two digits for the next decade
1050
 
        self.assertEqual(1, packs._max_pack_count(10))
1051
 
        self.assertEqual(2, packs._max_pack_count(11))
1052
 
        self.assertEqual(10, packs._max_pack_count(19))
1053
 
        self.assertEqual(2, packs._max_pack_count(20))
1054
 
        self.assertEqual(3, packs._max_pack_count(21))
1055
 
        # check some arbitrary big numbers
1056
 
        self.assertEqual(25, packs._max_pack_count(112894))
1057
 
 
1058
 
    def test_repr(self):
1059
 
        packs = self.get_packs()
1060
 
        self.assertContainsRe(repr(packs),
1061
 
            'RepositoryPackCollection(.*Repository(.*))')
1062
 
 
1063
 
    def test__obsolete_packs(self):
1064
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1065
 
        names = packs.names()
1066
 
        pack = packs.get_pack_by_name(names[0])
1067
 
        # Schedule this one for removal
1068
 
        packs._remove_pack_from_memory(pack)
1069
 
        # Simulate a concurrent update by renaming the .pack file and one of
1070
 
        # the indices
1071
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1072
 
                               'obsolete_packs/%s.pack' % (names[0],))
1073
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1074
 
                               'obsolete_packs/%s.iix' % (names[0],))
1075
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1076
 
        # are still renamed
1077
 
        packs._obsolete_packs([pack])
1078
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1079
 
                         sorted(packs._pack_transport.list_dir('.')))
1080
 
        # names[0] should not be present in the index anymore
1081
 
        self.assertEqual(names[1:],
1082
 
            sorted(set([osutils.splitext(n)[0] for n in
1083
 
                        packs._index_transport.list_dir('.')])))
1084
 
 
1085
 
    def test__obsolete_packs_missing_directory(self):
1086
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1087
 
        r.control_transport.rmdir('obsolete_packs')
1088
 
        names = packs.names()
1089
 
        pack = packs.get_pack_by_name(names[0])
1090
 
        # Schedule this one for removal
1091
 
        packs._remove_pack_from_memory(pack)
1092
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1093
 
        # are still renamed
1094
 
        packs._obsolete_packs([pack])
1095
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1096
 
                         sorted(packs._pack_transport.list_dir('.')))
1097
 
        # names[0] should not be present in the index anymore
1098
 
        self.assertEqual(names[1:],
1099
 
            sorted(set([osutils.splitext(n)[0] for n in
1100
 
                        packs._index_transport.list_dir('.')])))
1101
 
 
1102
 
    def test_pack_distribution_zero(self):
1103
 
        packs = self.get_packs()
1104
 
        self.assertEqual([0], packs.pack_distribution(0))
1105
 
 
1106
 
    def test_ensure_loaded_unlocked(self):
1107
 
        packs = self.get_packs()
1108
 
        self.assertRaises(errors.ObjectNotLocked,
1109
 
                          packs.ensure_loaded)
1110
 
 
1111
 
    def test_pack_distribution_one_to_nine(self):
1112
 
        packs = self.get_packs()
1113
 
        self.assertEqual([1],
1114
 
            packs.pack_distribution(1))
1115
 
        self.assertEqual([1, 1],
1116
 
            packs.pack_distribution(2))
1117
 
        self.assertEqual([1, 1, 1],
1118
 
            packs.pack_distribution(3))
1119
 
        self.assertEqual([1, 1, 1, 1],
1120
 
            packs.pack_distribution(4))
1121
 
        self.assertEqual([1, 1, 1, 1, 1],
1122
 
            packs.pack_distribution(5))
1123
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1124
 
            packs.pack_distribution(6))
1125
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1126
 
            packs.pack_distribution(7))
1127
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1128
 
            packs.pack_distribution(8))
1129
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1130
 
            packs.pack_distribution(9))
1131
 
 
1132
 
    def test_pack_distribution_stable_at_boundaries(self):
1133
 
        """When there are multi-rev packs the counts are stable."""
1134
 
        packs = self.get_packs()
1135
 
        # in 10s:
1136
 
        self.assertEqual([10], packs.pack_distribution(10))
1137
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1138
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1139
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1140
 
        # 100s
1141
 
        self.assertEqual([100], packs.pack_distribution(100))
1142
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1143
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1144
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1145
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1146
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1147
 
 
1148
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1149
 
        packs = self.get_packs()
1150
 
        existing_packs = [(2000, "big"), (9, "medium")]
1151
 
        # rev count - 2009 -> 2x1000 + 9x1
1152
 
        pack_operations = packs.plan_autopack_combinations(
1153
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1154
 
        self.assertEqual([], pack_operations)
1155
 
 
1156
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1157
 
        packs = self.get_packs()
1158
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1159
 
        # rev count - 2010 -> 2x1000 + 1x10
1160
 
        pack_operations = packs.plan_autopack_combinations(
1161
 
            existing_packs, [1000, 1000, 10])
1162
 
        self.assertEqual([], pack_operations)
1163
 
 
1164
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1165
 
        packs = self.get_packs()
1166
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1167
 
            (1, "single1")]
1168
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1169
 
        pack_operations = packs.plan_autopack_combinations(
1170
 
            existing_packs, [1000, 1000, 10])
1171
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1172
 
 
1173
 
    def test_plan_pack_operations_creates_a_single_op(self):
1174
 
        packs = self.get_packs()
1175
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1176
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1177
 
        # rev count 150 -> 1x100 and 5x10
1178
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1179
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1180
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1181
 
        # we save a pack file with no increased I/O by putting them into the
1182
 
        # same file.
1183
 
        distribution = packs.pack_distribution(150)
1184
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1185
 
                                                           distribution)
1186
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1187
 
 
1188
 
    def test_all_packs_none(self):
1189
 
        format = self.get_format()
1190
 
        tree = self.make_branch_and_tree('.', format=format)
1191
 
        tree.lock_read()
1192
 
        self.addCleanup(tree.unlock)
1193
 
        packs = tree.branch.repository._pack_collection
1194
 
        packs.ensure_loaded()
1195
 
        self.assertEqual([], packs.all_packs())
1196
 
 
1197
 
    def test_all_packs_one(self):
1198
 
        format = self.get_format()
1199
 
        tree = self.make_branch_and_tree('.', format=format)
1200
 
        tree.commit('start')
1201
 
        tree.lock_read()
1202
 
        self.addCleanup(tree.unlock)
1203
 
        packs = tree.branch.repository._pack_collection
1204
 
        packs.ensure_loaded()
1205
 
        self.assertEqual([
1206
 
            packs.get_pack_by_name(packs.names()[0])],
1207
 
            packs.all_packs())
1208
 
 
1209
 
    def test_all_packs_two(self):
1210
 
        format = self.get_format()
1211
 
        tree = self.make_branch_and_tree('.', format=format)
1212
 
        tree.commit('start')
1213
 
        tree.commit('continue')
1214
 
        tree.lock_read()
1215
 
        self.addCleanup(tree.unlock)
1216
 
        packs = tree.branch.repository._pack_collection
1217
 
        packs.ensure_loaded()
1218
 
        self.assertEqual([
1219
 
            packs.get_pack_by_name(packs.names()[0]),
1220
 
            packs.get_pack_by_name(packs.names()[1]),
1221
 
            ], packs.all_packs())
1222
 
 
1223
 
    def test_get_pack_by_name(self):
1224
 
        format = self.get_format()
1225
 
        tree = self.make_branch_and_tree('.', format=format)
1226
 
        tree.commit('start')
1227
 
        tree.lock_read()
1228
 
        self.addCleanup(tree.unlock)
1229
 
        packs = tree.branch.repository._pack_collection
1230
 
        packs.reset()
1231
 
        packs.ensure_loaded()
1232
 
        name = packs.names()[0]
1233
 
        pack_1 = packs.get_pack_by_name(name)
1234
 
        # the pack should be correctly initialised
1235
 
        sizes = packs._names[name]
1236
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1237
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1238
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1239
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1240
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1241
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1242
 
        # and the same instance should be returned on successive calls.
1243
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1244
 
 
1245
 
    def test_reload_pack_names_new_entry(self):
1246
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1247
 
        names = packs.names()
1248
 
        # Add a new pack file into the repository
1249
 
        rev4 = tree.commit('four')
1250
 
        new_names = tree.branch.repository._pack_collection.names()
1251
 
        new_name = set(new_names).difference(names)
1252
 
        self.assertEqual(1, len(new_name))
1253
 
        new_name = new_name.pop()
1254
 
        # The old collection hasn't noticed yet
1255
 
        self.assertEqual(names, packs.names())
1256
 
        self.assertTrue(packs.reload_pack_names())
1257
 
        self.assertEqual(new_names, packs.names())
1258
 
        # And the repository can access the new revision
1259
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1260
 
        self.assertFalse(packs.reload_pack_names())
1261
 
 
1262
 
    def test_reload_pack_names_added_and_removed(self):
1263
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1264
 
        names = packs.names()
1265
 
        # Now repack the whole thing
1266
 
        tree.branch.repository.pack()
1267
 
        new_names = tree.branch.repository._pack_collection.names()
1268
 
        # The other collection hasn't noticed yet
1269
 
        self.assertEqual(names, packs.names())
1270
 
        self.assertTrue(packs.reload_pack_names())
1271
 
        self.assertEqual(new_names, packs.names())
1272
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1273
 
        self.assertFalse(packs.reload_pack_names())
1274
 
 
1275
 
    def test_reload_pack_names_preserves_pending(self):
1276
 
        # TODO: Update this to also test for pending-deleted names
1277
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1278
 
        # We will add one pack (via start_write_group + insert_record_stream),
1279
 
        # and remove another pack (via _remove_pack_from_memory)
1280
 
        orig_names = packs.names()
1281
 
        orig_at_load = packs._packs_at_load
1282
 
        to_remove_name = iter(orig_names).next()
1283
 
        r.start_write_group()
1284
 
        self.addCleanup(r.abort_write_group)
1285
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1286
 
            ('text', 'rev'), (), None, 'content\n')])
1287
 
        new_pack = packs._new_pack
1288
 
        self.assertTrue(new_pack.data_inserted())
1289
 
        new_pack.finish()
1290
 
        packs.allocate(new_pack)
1291
 
        packs._new_pack = None
1292
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1293
 
        packs._remove_pack_from_memory(removed_pack)
1294
 
        names = packs.names()
1295
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1296
 
        new_names = set([x[0][0] for x in new_nodes])
1297
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1298
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1299
 
        self.assertEqual(set([new_pack.name]), new_names)
1300
 
        self.assertEqual([to_remove_name],
1301
 
                         sorted([x[0][0] for x in deleted_nodes]))
1302
 
        packs.reload_pack_names()
1303
 
        reloaded_names = packs.names()
1304
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1305
 
        self.assertEqual(names, reloaded_names)
1306
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1307
 
        new_names = set([x[0][0] for x in new_nodes])
1308
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1309
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1310
 
        self.assertEqual(set([new_pack.name]), new_names)
1311
 
        self.assertEqual([to_remove_name],
1312
 
                         sorted([x[0][0] for x in deleted_nodes]))
1313
 
 
1314
 
    def test_autopack_obsoletes_new_pack(self):
1315
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1316
 
        packs._max_pack_count = lambda x: 1
1317
 
        packs.pack_distribution = lambda x: [10]
1318
 
        r.start_write_group()
1319
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1320
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1321
 
        # This should trigger an autopack, which will combine everything into a
1322
 
        # single pack file.
1323
 
        new_names = r.commit_write_group()
1324
 
        names = packs.names()
1325
 
        self.assertEqual(1, len(names))
1326
 
        self.assertEqual([names[0] + '.pack'],
1327
 
                         packs._pack_transport.list_dir('.'))
1328
 
 
1329
 
    def test_autopack_reloads_and_stops(self):
1330
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1331
 
        # After we have determined what needs to be autopacked, trigger a
1332
 
        # full-pack via the other repo which will cause us to re-evaluate and
1333
 
        # decide we don't need to do anything
1334
 
        orig_execute = packs._execute_pack_operations
1335
 
        def _munged_execute_pack_ops(*args, **kwargs):
1336
 
            tree.branch.repository.pack()
1337
 
            return orig_execute(*args, **kwargs)
1338
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1339
 
        packs._max_pack_count = lambda x: 1
1340
 
        packs.pack_distribution = lambda x: [10]
1341
 
        self.assertFalse(packs.autopack())
1342
 
        self.assertEqual(1, len(packs.names()))
1343
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1344
 
                         packs.names())
1345
 
 
1346
 
    def test__save_pack_names(self):
1347
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1348
 
        names = packs.names()
1349
 
        pack = packs.get_pack_by_name(names[0])
1350
 
        packs._remove_pack_from_memory(pack)
1351
 
        packs._save_pack_names(obsolete_packs=[pack])
1352
 
        cur_packs = packs._pack_transport.list_dir('.')
1353
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1354
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1355
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1356
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1357
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1358
 
 
1359
 
    def test__save_pack_names_already_obsoleted(self):
1360
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1361
 
        names = packs.names()
1362
 
        pack = packs.get_pack_by_name(names[0])
1363
 
        packs._remove_pack_from_memory(pack)
1364
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1365
 
        # the pack directly.
1366
 
        packs._obsolete_packs([pack])
1367
 
        packs._save_pack_names(clear_obsolete_packs=True,
1368
 
                               obsolete_packs=[pack])
1369
 
        cur_packs = packs._pack_transport.list_dir('.')
1370
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1371
 
        # Note that while we set clear_obsolete_packs=True, it should not
1372
 
        # delete a pack file that we have also scheduled for obsoletion.
1373
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1374
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1375
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1376
 
 
1377
 
    def test_pack_no_obsolete_packs_directory(self):
1378
 
        """Bug #314314, don't fail if obsolete_packs directory does
1379
 
        not exist."""
1380
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1381
 
        r.control_transport.rmdir('obsolete_packs')
1382
 
        packs._clear_obsolete_packs()
1383
 
 
1384
 
 
1385
 
class TestPack(TestCaseWithTransport):
1386
 
    """Tests for the Pack object."""
1387
 
 
1388
 
    def assertCurrentlyEqual(self, left, right):
1389
 
        self.assertTrue(left == right)
1390
 
        self.assertTrue(right == left)
1391
 
        self.assertFalse(left != right)
1392
 
        self.assertFalse(right != left)
1393
 
 
1394
 
    def assertCurrentlyNotEqual(self, left, right):
1395
 
        self.assertFalse(left == right)
1396
 
        self.assertFalse(right == left)
1397
 
        self.assertTrue(left != right)
1398
 
        self.assertTrue(right != left)
1399
 
 
1400
 
    def test___eq____ne__(self):
1401
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1402
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1403
 
        self.assertCurrentlyEqual(left, right)
1404
 
        # change all attributes and ensure equality changes as we do.
1405
 
        left.revision_index = 'a'
1406
 
        self.assertCurrentlyNotEqual(left, right)
1407
 
        right.revision_index = 'a'
1408
 
        self.assertCurrentlyEqual(left, right)
1409
 
        left.inventory_index = 'a'
1410
 
        self.assertCurrentlyNotEqual(left, right)
1411
 
        right.inventory_index = 'a'
1412
 
        self.assertCurrentlyEqual(left, right)
1413
 
        left.text_index = 'a'
1414
 
        self.assertCurrentlyNotEqual(left, right)
1415
 
        right.text_index = 'a'
1416
 
        self.assertCurrentlyEqual(left, right)
1417
 
        left.signature_index = 'a'
1418
 
        self.assertCurrentlyNotEqual(left, right)
1419
 
        right.signature_index = 'a'
1420
 
        self.assertCurrentlyEqual(left, right)
1421
 
        left.name = 'a'
1422
 
        self.assertCurrentlyNotEqual(left, right)
1423
 
        right.name = 'a'
1424
 
        self.assertCurrentlyEqual(left, right)
1425
 
        left.transport = 'a'
1426
 
        self.assertCurrentlyNotEqual(left, right)
1427
 
        right.transport = 'a'
1428
 
        self.assertCurrentlyEqual(left, right)
1429
 
 
1430
 
    def test_file_name(self):
1431
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1432
 
        self.assertEqual('a_name.pack', pack.file_name())
1433
 
 
1434
 
 
1435
 
class TestNewPack(TestCaseWithTransport):
1436
 
    """Tests for pack_repo.NewPack."""
1437
 
 
1438
 
    def test_new_instance_attributes(self):
1439
 
        upload_transport = self.get_transport('upload')
1440
 
        pack_transport = self.get_transport('pack')
1441
 
        index_transport = self.get_transport('index')
1442
 
        upload_transport.mkdir('.')
1443
 
        collection = pack_repo.RepositoryPackCollection(
1444
 
            repo=None,
1445
 
            transport=self.get_transport('.'),
1446
 
            index_transport=index_transport,
1447
 
            upload_transport=upload_transport,
1448
 
            pack_transport=pack_transport,
1449
 
            index_builder_class=BTreeBuilder,
1450
 
            index_class=BTreeGraphIndex,
1451
 
            use_chk_index=False)
1452
 
        pack = pack_repo.NewPack(collection)
1453
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1454
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1455
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1456
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1457
 
        self.assertTrue(pack.upload_transport is upload_transport)
1458
 
        self.assertTrue(pack.index_transport is index_transport)
1459
 
        self.assertTrue(pack.pack_transport is pack_transport)
1460
 
        self.assertEqual(None, pack.index_sizes)
1461
 
        self.assertEqual(20, len(pack.random_name))
1462
 
        self.assertIsInstance(pack.random_name, str)
1463
 
        self.assertIsInstance(pack.start_time, float)
1464
 
 
1465
 
 
1466
 
class TestPacker(TestCaseWithTransport):
1467
 
    """Tests for the packs repository Packer class."""
1468
 
 
1469
 
    def test_pack_optimizes_pack_order(self):
1470
 
        builder = self.make_branch_builder('.', format="1.9")
1471
 
        builder.start_series()
1472
 
        builder.build_snapshot('A', None, [
1473
 
            ('add', ('', 'root-id', 'directory', None)),
1474
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1475
 
        builder.build_snapshot('B', ['A'],
1476
 
            [('modify', ('f-id', 'new-content\n'))])
1477
 
        builder.build_snapshot('C', ['B'],
1478
 
            [('modify', ('f-id', 'third-content\n'))])
1479
 
        builder.build_snapshot('D', ['C'],
1480
 
            [('modify', ('f-id', 'fourth-content\n'))])
1481
 
        b = builder.get_branch()
1482
 
        b.lock_read()
1483
 
        builder.finish_series()
1484
 
        self.addCleanup(b.unlock)
1485
 
        # At this point, we should have 4 pack files available
1486
 
        # Because of how they were built, they correspond to
1487
 
        # ['D', 'C', 'B', 'A']
1488
 
        packs = b.repository._pack_collection.packs
1489
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1490
 
                                  packs, 'testing',
1491
 
                                  revision_ids=['B', 'C'])
1492
 
        # Now, when we are copying the B & C revisions, their pack files should
1493
 
        # be moved to the front of the stack
1494
 
        # The new ordering moves B & C to the front of the .packs attribute,
1495
 
        # and leaves the others in the original order.
1496
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1497
 
        new_pack = packer.pack()
1498
 
        self.assertEqual(new_packs, packer.packs)
1499
 
 
1500
 
 
1501
 
class TestOptimisingPacker(TestCaseWithTransport):
1502
 
    """Tests for the OptimisingPacker class."""
1503
 
 
1504
 
    def get_pack_collection(self):
1505
 
        repo = self.make_repository('.')
1506
 
        return repo._pack_collection
1507
 
 
1508
 
    def test_open_pack_will_optimise(self):
1509
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1510
 
                                            [], '.test')
1511
 
        new_pack = packer.open_pack()
1512
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1513
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1514
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1515
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1516
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1517
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1518
 
 
1519
 
 
1520
 
class TestGCCHKPacker(TestCaseWithTransport):
1521
 
 
1522
 
    def make_abc_branch(self):
1523
 
        builder = self.make_branch_builder('source')
1524
 
        builder.start_series()
1525
 
        builder.build_snapshot('A', None, [
1526
 
            ('add', ('', 'root-id', 'directory', None)),
1527
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1528
 
            ])
1529
 
        builder.build_snapshot('B', ['A'], [
1530
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1531
 
        builder.build_snapshot('C', ['B'], [
1532
 
            ('modify', ('file-id', 'new content\n'))])
1533
 
        builder.finish_series()
1534
 
        return builder.get_branch()
1535
 
 
1536
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1537
 
        """a repo with separate packs for a revisions Revision and Inventory.
1538
 
 
1539
 
        There will be one pack file that holds the Revision content, and one
1540
 
        for the Inventory content.
1541
 
 
1542
 
        :return: (repository,
1543
 
                  pack_name_with_rev_A_Revision,
1544
 
                  pack_name_with_rev_A_Inventory,
1545
 
                  pack_name_with_rev_C_content)
1546
 
        """
1547
 
        b_source = self.make_abc_branch()
1548
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1549
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1550
 
        b_stacked.lock_write()
1551
 
        self.addCleanup(b_stacked.unlock)
1552
 
        b_stacked.fetch(b_source, 'B')
1553
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1554
 
        # fill in the A rev.
1555
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1556
 
        repo_not_stacked.lock_write()
1557
 
        self.addCleanup(repo_not_stacked.unlock)
1558
 
        # Now we should have a pack file with A's inventory, but not its
1559
 
        # Revision
1560
 
        self.assertEqual([('A',), ('B',)],
1561
 
                         sorted(repo_not_stacked.inventories.keys()))
1562
 
        self.assertEqual([('B',)],
1563
 
                         sorted(repo_not_stacked.revisions.keys()))
1564
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1565
 
        # We have a couple names here, figure out which has A's inventory
1566
 
        for name in stacked_pack_names:
1567
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1568
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1569
 
            if ('A',) in keys:
1570
 
                inv_a_pack_name = name
1571
 
                break
1572
 
        else:
1573
 
            self.fail('Could not find pack containing A\'s inventory')
1574
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1575
 
        self.assertEqual([('A',), ('B',)],
1576
 
                         sorted(repo_not_stacked.revisions.keys()))
1577
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1578
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1579
 
        self.assertEqual(1, len(rev_a_pack_names))
1580
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1581
 
        # Now fetch 'C', so we have a couple pack files to join
1582
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1583
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1584
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1585
 
        self.assertEqual(1, len(rev_c_pack_names))
1586
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1587
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1588
 
                rev_c_pack_name)
1589
 
 
1590
 
    def test_pack_with_distant_inventories(self):
1591
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1592
 
        # When repacking, it is possible to have an inventory in a different
1593
 
        # pack file than the associated revision. An autopack can then come
1594
 
        # along, and miss that inventory, and complain.
1595
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1596
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1597
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1598
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1599
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1600
 
                    [a_pack, c_pack], '.test-pack')
1601
 
        # This would raise ValueError in bug #437003, but should not raise an
1602
 
        # error once fixed.
1603
 
        packer.pack()
1604
 
 
1605
 
    def test_pack_with_missing_inventory(self):
1606
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1607
 
        # the A inventory to actually be gone from the repository.
1608
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1609
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1610
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1611
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1612
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1613
 
            repo._pack_collection.all_packs(), '.test-pack')
1614
 
        e = self.assertRaises(ValueError, packer.pack)
1615
 
        packer.new_pack.abort()
1616
 
        self.assertContainsRe(str(e),
1617
 
            r"We are missing inventories for revisions: .*'A'")
1618
 
 
1619
 
 
1620
 
class TestCrossFormatPacks(TestCaseWithTransport):
1621
 
 
1622
 
    def log_pack(self, hint=None):
1623
 
        self.calls.append(('pack', hint))
1624
 
        self.orig_pack(hint=hint)
1625
 
        if self.expect_hint:
1626
 
            self.assertTrue(hint)
1627
 
 
1628
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1629
 
        self.expect_hint = expect_pack_called
1630
 
        self.calls = []
1631
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1632
 
        source_tree.lock_write()
1633
 
        self.addCleanup(source_tree.unlock)
1634
 
        tip = source_tree.commit('foo')
1635
 
        target = self.make_repository('target', format=target_fmt)
1636
 
        target.lock_write()
1637
 
        self.addCleanup(target.unlock)
1638
 
        source = source_tree.branch.repository._get_source(target._format)
1639
 
        self.orig_pack = target.pack
1640
 
        self.overrideAttr(target, "pack", self.log_pack)
1641
 
        search = target.search_missing_revision_ids(
1642
 
            source_tree.branch.repository, revision_ids=[tip])
1643
 
        stream = source.get_stream(search)
1644
 
        from_format = source_tree.branch.repository._format
1645
 
        sink = target._get_sink()
1646
 
        sink.insert_stream(stream, from_format, [])
1647
 
        if expect_pack_called:
1648
 
            self.assertLength(1, self.calls)
1649
 
        else:
1650
 
            self.assertLength(0, self.calls)
1651
 
 
1652
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1653
 
        self.expect_hint = expect_pack_called
1654
 
        self.calls = []
1655
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1656
 
        source_tree.lock_write()
1657
 
        self.addCleanup(source_tree.unlock)
1658
 
        tip = source_tree.commit('foo')
1659
 
        target = self.make_repository('target', format=target_fmt)
1660
 
        target.lock_write()
1661
 
        self.addCleanup(target.unlock)
1662
 
        source = source_tree.branch.repository
1663
 
        self.orig_pack = target.pack
1664
 
        self.overrideAttr(target, "pack", self.log_pack)
1665
 
        target.fetch(source)
1666
 
        if expect_pack_called:
1667
 
            self.assertLength(1, self.calls)
1668
 
        else:
1669
 
            self.assertLength(0, self.calls)
1670
 
 
1671
 
    def test_sink_format_hint_no(self):
1672
 
        # When the target format says packing makes no difference, pack is not
1673
 
        # called.
1674
 
        self.run_stream('1.9', 'rich-root-pack', False)
1675
 
 
1676
 
    def test_sink_format_hint_yes(self):
1677
 
        # When the target format says packing makes a difference, pack is
1678
 
        # called.
1679
 
        self.run_stream('1.9', '2a', True)
1680
 
 
1681
 
    def test_sink_format_same_no(self):
1682
 
        # When the formats are the same, pack is not called.
1683
 
        self.run_stream('2a', '2a', False)
1684
 
 
1685
 
    def test_IDS_format_hint_no(self):
1686
 
        # When the target format says packing makes no difference, pack is not
1687
 
        # called.
1688
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1689
 
 
1690
 
    def test_IDS_format_hint_yes(self):
1691
 
        # When the target format says packing makes a difference, pack is
1692
 
        # called.
1693
 
        self.run_fetch('1.9', '2a', True)
1694
 
 
1695
 
    def test_IDS_format_same_no(self):
1696
 
        # When the formats are the same, pack is not called.
1697
 
        self.run_fetch('2a', '2a', False)
1698
 
 
1699
 
 
1700
 
class Test_LazyListJoin(tests.TestCase):
1701
 
 
1702
 
    def test__repr__(self):
1703
 
        lazy = repository._LazyListJoin(['a'], ['b'])
1704
 
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
1705
 
                         repr(lazy))