~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Andrew Bennetts
  • Date: 2009-10-13 05:20:50 UTC
  • mfrom: (4634.52.16 2.0)
  • mto: This revision was merged to the branch mainline in revision 4738.
  • Revision ID: andrew.bennetts@canonical.com-20091013052050-u1w6tv0z7kqhn8d0
Merge 2.0 into lp:bzr, resolving conflicts in NEWS and releasing.txt.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
26
27
 
27
28
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
 
31
                           UnknownFormatError,
 
32
                           UnsupportedFormatError,
 
33
                           )
32
34
from bzrlib import (
33
 
    btree_index,
34
 
    symbol_versioning,
 
35
    graph,
35
36
    tests,
36
 
    transport,
37
 
    vf_search,
38
37
    )
 
38
from bzrlib.branchbuilder import BranchBuilder
39
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
 
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
41
41
from bzrlib.repository import RepositoryFormat
 
42
from bzrlib.smart import server
42
43
from bzrlib.tests import (
43
44
    TestCase,
44
45
    TestCaseWithTransport,
45
 
    )
 
46
    TestSkipped,
 
47
    test_knit,
 
48
    )
 
49
from bzrlib.transport import (
 
50
    fakenfs,
 
51
    get_transport,
 
52
    )
 
53
from bzrlib.transport.memory import MemoryServer
46
54
from bzrlib import (
 
55
    bencode,
47
56
    bzrdir,
48
 
    controldir,
49
57
    errors,
50
58
    inventory,
51
59
    osutils,
 
60
    progress,
52
61
    repository,
53
62
    revision as _mod_revision,
 
63
    symbol_versioning,
54
64
    upgrade,
55
 
    versionedfile,
56
 
    vf_repository,
57
65
    workingtree,
58
66
    )
59
67
from bzrlib.repofmt import (
60
68
    groupcompress_repo,
61
69
    knitrepo,
62
 
    knitpack_repo,
63
70
    pack_repo,
 
71
    weaverepo,
64
72
    )
65
73
 
66
74
 
67
75
class TestDefaultFormat(TestCase):
68
76
 
69
77
    def test_get_set_default_format(self):
70
 
        old_default = controldir.format_registry.get('default')
 
78
        old_default = bzrdir.format_registry.get('default')
71
79
        private_default = old_default().repository_format.__class__
72
 
        old_format = repository.format_registry.get_default()
 
80
        old_format = repository.RepositoryFormat.get_default_format()
73
81
        self.assertTrue(isinstance(old_format, private_default))
74
82
        def make_sample_bzrdir():
75
83
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
76
84
            my_bzrdir.repository_format = SampleRepositoryFormat()
77
85
            return my_bzrdir
78
 
        controldir.format_registry.remove('default')
79
 
        controldir.format_registry.register('sample', make_sample_bzrdir, '')
80
 
        controldir.format_registry.set_default('sample')
 
86
        bzrdir.format_registry.remove('default')
 
87
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
88
        bzrdir.format_registry.set_default('sample')
81
89
        # creating a repository should now create an instrumented dir.
82
90
        try:
83
91
            # the default branch format is used by the meta dir format
86
94
            result = dir.create_repository()
87
95
            self.assertEqual(result, 'A bzr repository dir')
88
96
        finally:
89
 
            controldir.format_registry.remove('default')
90
 
            controldir.format_registry.remove('sample')
91
 
            controldir.format_registry.register('default', old_default, '')
92
 
        self.assertIsInstance(repository.format_registry.get_default(),
 
97
            bzrdir.format_registry.remove('default')
 
98
            bzrdir.format_registry.remove('sample')
 
99
            bzrdir.format_registry.register('default', old_default, '')
 
100
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
93
101
                              old_format.__class__)
94
102
 
95
103
 
96
 
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
 
104
class SampleRepositoryFormat(repository.RepositoryFormat):
97
105
    """A sample format
98
106
 
99
107
    this format is initializable, unsupported to aid in testing the
100
108
    open and open(unsupported=True) routines.
101
109
    """
102
110
 
103
 
    @classmethod
104
 
    def get_format_string(cls):
 
111
    def get_format_string(self):
105
112
        """See RepositoryFormat.get_format_string()."""
106
113
        return "Sample .bzr repository format."
107
114
 
118
125
        return "opened repository."
119
126
 
120
127
 
121
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
122
 
    """A sample format that can not be used in a metadir
123
 
 
124
 
    """
125
 
 
126
 
    def get_format_string(self):
127
 
        raise NotImplementedError
128
 
 
129
 
 
130
128
class TestRepositoryFormat(TestCaseWithTransport):
131
129
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
132
130
 
138
136
        def check_format(format, url):
139
137
            dir = format._matchingbzrdir.initialize(url)
140
138
            format.initialize(dir)
141
 
            t = transport.get_transport_from_path(url)
142
 
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
143
 
            self.assertIsInstance(found_format, format.__class__)
144
 
        check_format(repository.format_registry.get_default(), "bar")
 
139
            t = get_transport(url)
 
140
            found_format = repository.RepositoryFormat.find_format(dir)
 
141
            self.failUnless(isinstance(found_format, format.__class__))
 
142
        check_format(weaverepo.RepositoryFormat7(), "bar")
145
143
 
146
144
    def test_find_format_no_repository(self):
147
145
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
148
146
        self.assertRaises(errors.NoRepositoryPresent,
149
 
                          repository.RepositoryFormatMetaDir.find_format,
 
147
                          repository.RepositoryFormat.find_format,
150
148
                          dir)
151
149
 
152
 
    def test_from_string(self):
153
 
        self.assertIsInstance(
154
 
            SampleRepositoryFormat.from_string(
155
 
                "Sample .bzr repository format."),
156
 
            SampleRepositoryFormat)
157
 
        self.assertRaises(AssertionError,
158
 
            SampleRepositoryFormat.from_string,
159
 
                "Different .bzr repository format.")
160
 
 
161
150
    def test_find_format_unknown_format(self):
162
151
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
163
152
        SampleRepositoryFormat().initialize(dir)
164
153
        self.assertRaises(UnknownFormatError,
165
 
                          repository.RepositoryFormatMetaDir.find_format,
 
154
                          repository.RepositoryFormat.find_format,
166
155
                          dir)
167
156
 
168
 
    def test_find_format_with_features(self):
169
 
        tree = self.make_branch_and_tree('.', format='2a')
170
 
        tree.branch.repository.update_feature_flags({"name": "necessity"})
171
 
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
172
 
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
173
 
        self.assertEquals(found_format.features.get("name"), "necessity")
174
 
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
175
 
            True)
176
 
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
177
 
            "name")
178
 
        repository.RepositoryFormatMetaDir.register_feature("name")
179
 
        found_format.check_support_status(True)
180
 
 
181
157
    def test_register_unregister_format(self):
182
 
        # Test deprecated format registration functions
183
158
        format = SampleRepositoryFormat()
184
159
        # make a control dir
185
160
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
186
161
        # make a repo
187
162
        format.initialize(dir)
188
163
        # register a format for it.
189
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
190
 
            repository.RepositoryFormat.register_format, format)
 
164
        repository.RepositoryFormat.register_format(format)
191
165
        # which repository.Open will refuse (not supported)
192
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
193
 
            self.get_url())
 
166
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
194
167
        # but open(unsupported) will work
195
168
        self.assertEqual(format.open(dir), "opened repository.")
196
169
        # unregister the format
197
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
198
 
            repository.RepositoryFormat.unregister_format, format)
199
 
 
200
 
 
201
 
class TestRepositoryFormatRegistry(TestCase):
202
 
 
203
 
    def setUp(self):
204
 
        super(TestRepositoryFormatRegistry, self).setUp()
205
 
        self.registry = repository.RepositoryFormatRegistry()
206
 
 
207
 
    def test_register_unregister_format(self):
208
 
        format = SampleRepositoryFormat()
209
 
        self.registry.register(format)
210
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
211
 
        self.registry.remove(format)
212
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
213
 
 
214
 
    def test_get_all(self):
215
 
        format = SampleRepositoryFormat()
216
 
        self.assertEquals([], self.registry._get_all())
217
 
        self.registry.register(format)
218
 
        self.assertEquals([format], self.registry._get_all())
219
 
 
220
 
    def test_register_extra(self):
221
 
        format = SampleExtraRepositoryFormat()
222
 
        self.assertEquals([], self.registry._get_all())
223
 
        self.registry.register_extra(format)
224
 
        self.assertEquals([format], self.registry._get_all())
225
 
 
226
 
    def test_register_extra_lazy(self):
227
 
        self.assertEquals([], self.registry._get_all())
228
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
229
 
            "SampleExtraRepositoryFormat")
230
 
        formats = self.registry._get_all()
231
 
        self.assertEquals(1, len(formats))
232
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
170
        repository.RepositoryFormat.unregister_format(format)
 
171
 
 
172
 
 
173
class TestFormat6(TestCaseWithTransport):
 
174
 
 
175
    def test_attribute__fetch_order(self):
 
176
        """Weaves need topological data insertion."""
 
177
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
178
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
179
        self.assertEqual('topological', repo._format._fetch_order)
 
180
 
 
181
    def test_attribute__fetch_uses_deltas(self):
 
182
        """Weaves do not reuse deltas."""
 
183
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
184
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
185
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
186
 
 
187
    def test_attribute__fetch_reconcile(self):
 
188
        """Weave repositories need a reconcile after fetch."""
 
189
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
190
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
191
        self.assertEqual(True, repo._format._fetch_reconcile)
 
192
 
 
193
    def test_no_ancestry_weave(self):
 
194
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
195
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
196
        # We no longer need to create the ancestry.weave file
 
197
        # since it is *never* used.
 
198
        self.assertRaises(NoSuchFile,
 
199
                          control.transport.get,
 
200
                          'ancestry.weave')
 
201
 
 
202
    def test_supports_external_lookups(self):
 
203
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
204
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
205
        self.assertFalse(repo._format.supports_external_lookups)
 
206
 
 
207
 
 
208
class TestFormat7(TestCaseWithTransport):
 
209
 
 
210
    def test_attribute__fetch_order(self):
 
211
        """Weaves need topological data insertion."""
 
212
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
213
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
214
        self.assertEqual('topological', repo._format._fetch_order)
 
215
 
 
216
    def test_attribute__fetch_uses_deltas(self):
 
217
        """Weaves do not reuse deltas."""
 
218
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
219
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
220
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
221
 
 
222
    def test_attribute__fetch_reconcile(self):
 
223
        """Weave repositories need a reconcile after fetch."""
 
224
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
225
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
226
        self.assertEqual(True, repo._format._fetch_reconcile)
 
227
 
 
228
    def test_disk_layout(self):
 
229
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
230
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
231
        # in case of side effects of locking.
 
232
        repo.lock_write()
 
233
        repo.unlock()
 
234
        # we want:
 
235
        # format 'Bazaar-NG Repository format 7'
 
236
        # lock ''
 
237
        # inventory.weave == empty_weave
 
238
        # empty revision-store directory
 
239
        # empty weaves directory
 
240
        t = control.get_repository_transport(None)
 
241
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
242
                             t.get('format').read())
 
243
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
244
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
245
        self.assertEqualDiff('# bzr weave file v5\n'
 
246
                             'w\n'
 
247
                             'W\n',
 
248
                             t.get('inventory.weave').read())
 
249
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
250
        # disk.
 
251
        control.create_branch()
 
252
        tree = control.create_workingtree()
 
253
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
254
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
255
        tree.commit('first post', rev_id='first')
 
256
        self.assertEqualDiff(
 
257
            '# bzr weave file v5\n'
 
258
            'i\n'
 
259
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
260
            'n first\n'
 
261
            '\n'
 
262
            'w\n'
 
263
            '{ 0\n'
 
264
            '. content\n'
 
265
            '}\n'
 
266
            'W\n',
 
267
            t.get('weaves/74/Foo%3ABar.weave').read())
 
268
 
 
269
    def test_shared_disk_layout(self):
 
270
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
271
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
272
        # we want:
 
273
        # format 'Bazaar-NG Repository format 7'
 
274
        # inventory.weave == empty_weave
 
275
        # empty revision-store directory
 
276
        # empty weaves directory
 
277
        # a 'shared-storage' marker file.
 
278
        # lock is not present when unlocked
 
279
        t = control.get_repository_transport(None)
 
280
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
281
                             t.get('format').read())
 
282
        self.assertEqualDiff('', t.get('shared-storage').read())
 
283
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
284
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
285
        self.assertEqualDiff('# bzr weave file v5\n'
 
286
                             'w\n'
 
287
                             'W\n',
 
288
                             t.get('inventory.weave').read())
 
289
        self.assertFalse(t.has('branch-lock'))
 
290
 
 
291
    def test_creates_lockdir(self):
 
292
        """Make sure it appears to be controlled by a LockDir existence"""
 
293
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
294
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
295
        t = control.get_repository_transport(None)
 
296
        # TODO: Should check there is a 'lock' toplevel directory,
 
297
        # regardless of contents
 
298
        self.assertFalse(t.has('lock/held/info'))
 
299
        repo.lock_write()
 
300
        try:
 
301
            self.assertTrue(t.has('lock/held/info'))
 
302
        finally:
 
303
            # unlock so we don't get a warning about failing to do so
 
304
            repo.unlock()
 
305
 
 
306
    def test_uses_lockdir(self):
 
307
        """repo format 7 actually locks on lockdir"""
 
308
        base_url = self.get_url()
 
309
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
310
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
311
        t = control.get_repository_transport(None)
 
312
        repo.lock_write()
 
313
        repo.unlock()
 
314
        del repo
 
315
        # make sure the same lock is created by opening it
 
316
        repo = repository.Repository.open(base_url)
 
317
        repo.lock_write()
 
318
        self.assertTrue(t.has('lock/held/info'))
 
319
        repo.unlock()
 
320
        self.assertFalse(t.has('lock/held/info'))
 
321
 
 
322
    def test_shared_no_tree_disk_layout(self):
 
323
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
324
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
325
        repo.set_make_working_trees(False)
 
326
        # we want:
 
327
        # format 'Bazaar-NG Repository format 7'
 
328
        # lock ''
 
329
        # inventory.weave == empty_weave
 
330
        # empty revision-store directory
 
331
        # empty weaves directory
 
332
        # a 'shared-storage' marker file.
 
333
        t = control.get_repository_transport(None)
 
334
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
335
                             t.get('format').read())
 
336
        ## self.assertEqualDiff('', t.get('lock').read())
 
337
        self.assertEqualDiff('', t.get('shared-storage').read())
 
338
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
339
        repo.set_make_working_trees(True)
 
340
        self.assertFalse(t.has('no-working-trees'))
 
341
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
342
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
343
        self.assertEqualDiff('# bzr weave file v5\n'
 
344
                             'w\n'
 
345
                             'W\n',
 
346
                             t.get('inventory.weave').read())
 
347
 
 
348
    def test_supports_external_lookups(self):
 
349
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
350
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
351
        self.assertFalse(repo._format.supports_external_lookups)
233
352
 
234
353
 
235
354
class TestFormatKnit1(TestCaseWithTransport):
237
356
    def test_attribute__fetch_order(self):
238
357
        """Knits need topological data insertion."""
239
358
        repo = self.make_repository('.',
240
 
                format=controldir.format_registry.get('knit')())
 
359
                format=bzrdir.format_registry.get('knit')())
241
360
        self.assertEqual('topological', repo._format._fetch_order)
242
361
 
243
362
    def test_attribute__fetch_uses_deltas(self):
244
363
        """Knits reuse deltas."""
245
364
        repo = self.make_repository('.',
246
 
                format=controldir.format_registry.get('knit')())
 
365
                format=bzrdir.format_registry.get('knit')())
247
366
        self.assertEqual(True, repo._format._fetch_uses_deltas)
248
367
 
249
368
    def test_disk_layout(self):
335
454
        is valid when the api is not being abused.
336
455
        """
337
456
        repo = self.make_repository('.',
338
 
                format=controldir.format_registry.get('knit')())
 
457
                format=bzrdir.format_registry.get('knit')())
339
458
        inv_xml = '<inventory format="5">\n</inventory>\n'
340
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
459
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
341
460
        self.assertEqual('test-rev-id', inv.root.revision)
342
461
 
343
462
    def test_deserialise_uses_global_revision_id(self):
344
463
        """If it is set, then we re-use the global revision id"""
345
464
        repo = self.make_repository('.',
346
 
                format=controldir.format_registry.get('knit')())
 
465
                format=bzrdir.format_registry.get('knit')())
347
466
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
348
467
                   '</inventory>\n')
349
468
        # Arguably, the deserialise_inventory should detect a mismatch, and
350
469
        # raise an error, rather than silently using one revision_id over the
351
470
        # other.
352
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
471
        self.assertRaises(AssertionError, repo.deserialise_inventory,
353
472
            'test-rev-id', inv_xml)
354
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
473
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
355
474
        self.assertEqual('other-rev-id', inv.root.revision)
356
475
 
357
476
    def test_supports_external_lookups(self):
358
477
        repo = self.make_repository('.',
359
 
                format=controldir.format_registry.get('knit')())
 
478
                format=bzrdir.format_registry.get('knit')())
360
479
        self.assertFalse(repo._format.supports_external_lookups)
361
480
 
362
481
 
403
522
        # classes do not barf inappropriately when a surprising repository type
404
523
        # is handed to them.
405
524
        dummy_a = DummyRepository()
406
 
        dummy_a._format = RepositoryFormat()
407
 
        dummy_a._format.supports_full_versioned_files = True
408
525
        dummy_b = DummyRepository()
409
 
        dummy_b._format = RepositoryFormat()
410
 
        dummy_b._format.supports_full_versioned_files = True
411
526
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
412
527
 
413
528
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
417
532
        no actual sane default in the presence of incompatible data models.
418
533
        """
419
534
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
420
 
        self.assertEqual(vf_repository.InterSameDataRepository,
 
535
        self.assertEqual(repository.InterSameDataRepository,
421
536
                         inter_repo.__class__)
422
537
        self.assertEqual(repo_a, inter_repo.source)
423
538
        self.assertEqual(repo_b, inter_repo.target)
437
552
        dummy_a._serializer = repo._serializer
438
553
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
439
554
        dummy_a._format.rich_root_data = repo._format.rich_root_data
440
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
441
555
        dummy_b._serializer = repo._serializer
442
556
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
443
557
        dummy_b._format.rich_root_data = repo._format.rich_root_data
444
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
445
558
        repository.InterRepository.register_optimiser(InterDummy)
446
559
        try:
447
560
            # we should get the default for something InterDummy returns False
460
573
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
461
574
 
462
575
 
463
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
464
 
 
465
 
    @classmethod
466
 
    def get_format_string(cls):
467
 
        return "Test Format 1"
468
 
 
469
 
 
470
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
471
 
 
472
 
    @classmethod
473
 
    def get_format_string(cls):
474
 
        return "Test Format 2"
 
576
class TestInterWeaveRepo(TestCaseWithTransport):
 
577
 
 
578
    def test_is_compatible_and_registered(self):
 
579
        # InterWeaveRepo is compatible when either side
 
580
        # is a format 5/6/7 branch
 
581
        from bzrlib.repofmt import knitrepo, weaverepo
 
582
        formats = [weaverepo.RepositoryFormat5(),
 
583
                   weaverepo.RepositoryFormat6(),
 
584
                   weaverepo.RepositoryFormat7()]
 
585
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
586
                                knitrepo.RepositoryFormatKnit1(),
 
587
                                ]
 
588
        repo_a = self.make_repository('a')
 
589
        repo_b = self.make_repository('b')
 
590
        is_compatible = repository.InterWeaveRepo.is_compatible
 
591
        for source in incompatible_formats:
 
592
            # force incompatible left then right
 
593
            repo_a._format = source
 
594
            repo_b._format = formats[0]
 
595
            self.assertFalse(is_compatible(repo_a, repo_b))
 
596
            self.assertFalse(is_compatible(repo_b, repo_a))
 
597
        for source in formats:
 
598
            repo_a._format = source
 
599
            for target in formats:
 
600
                repo_b._format = target
 
601
                self.assertTrue(is_compatible(repo_a, repo_b))
 
602
        self.assertEqual(repository.InterWeaveRepo,
 
603
                         repository.InterRepository.get(repo_a,
 
604
                                                        repo_b).__class__)
475
605
 
476
606
 
477
607
class TestRepositoryConverter(TestCaseWithTransport):
478
608
 
479
609
    def test_convert_empty(self):
480
 
        source_format = TestRepositoryFormat1()
481
 
        target_format = TestRepositoryFormat2()
482
 
        repository.format_registry.register(source_format)
483
 
        self.addCleanup(repository.format_registry.remove,
484
 
            source_format)
485
 
        repository.format_registry.register(target_format)
486
 
        self.addCleanup(repository.format_registry.remove,
487
 
            target_format)
488
 
        t = self.get_transport()
 
610
        t = get_transport(self.get_url('.'))
489
611
        t.mkdir('repository')
490
612
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
491
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
613
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
614
        target_format = knitrepo.RepositoryFormatKnit1()
492
615
        converter = repository.CopyConverter(target_format)
493
616
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
494
617
        try:
499
622
        self.assertTrue(isinstance(target_format, repo._format.__class__))
500
623
 
501
624
 
 
625
class TestMisc(TestCase):
 
626
 
 
627
    def test_unescape_xml(self):
 
628
        """We get some kind of error when malformed entities are passed"""
 
629
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
630
 
 
631
 
502
632
class TestRepositoryFormatKnit3(TestCaseWithTransport):
503
633
 
504
634
    def test_attribute__fetch_order(self):
525
655
        revision_tree.lock_read()
526
656
        try:
527
657
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
528
 
                revision_tree.get_root_id())
 
658
                revision_tree.inventory.root.file_id)
529
659
        finally:
530
660
            revision_tree.unlock()
531
661
        format = bzrdir.BzrDirMetaFormat1()
535
665
        revision_tree = tree.branch.repository.revision_tree('dull')
536
666
        revision_tree.lock_read()
537
667
        try:
538
 
            revision_tree.get_file_lines(revision_tree.get_root_id())
 
668
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
539
669
        finally:
540
670
            revision_tree.unlock()
541
671
        tree.commit("Another dull commit", rev_id='dull2')
542
672
        revision_tree = tree.branch.repository.revision_tree('dull2')
543
673
        revision_tree.lock_read()
544
674
        self.addCleanup(revision_tree.unlock)
545
 
        self.assertEqual('dull',
546
 
                revision_tree.get_file_revision(revision_tree.get_root_id()))
 
675
        self.assertEqual('dull', revision_tree.inventory.root.revision)
547
676
 
548
677
    def test_supports_external_lookups(self):
549
678
        format = bzrdir.BzrDirMetaFormat1()
554
683
 
555
684
class Test2a(tests.TestCaseWithMemoryTransport):
556
685
 
557
 
    def test_chk_bytes_uses_custom_btree_parser(self):
558
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
559
 
        mt.lock_write()
560
 
        self.addCleanup(mt.unlock)
561
 
        mt.add([''], ['root-id'])
562
 
        mt.commit('first')
563
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
564
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
565
 
        # It should also work if we re-open the repo
566
 
        repo = mt.branch.repository.bzrdir.open_repository()
567
 
        repo.lock_read()
568
 
        self.addCleanup(repo.unlock)
569
 
        index = repo.chk_bytes._index._graph_index._indices[0]
570
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
571
 
 
572
686
    def test_fetch_combines_groups(self):
573
687
        builder = self.make_branch_builder('source', format='2a')
574
688
        builder.start_series()
700
814
        target = self.make_repository('target', format='rich-root-pack')
701
815
        stream = source._get_source(target._format)
702
816
        # We don't want the child GroupCHKStreamSource
703
 
        self.assertIs(type(stream), vf_repository.StreamSource)
 
817
        self.assertIs(type(stream), repository.StreamSource)
704
818
 
705
819
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
706
820
        source_builder = self.make_branch_builder('source',
733
847
 
734
848
        # On a regular pass, getting the inventories and chk pages for rev-2
735
849
        # would only get the newly created chk pages
736
 
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
850
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
737
851
                                    set(['rev-2']))
738
852
        simple_chk_records = []
739
853
        for vf_name, substream in source.get_stream(search):
782
896
        source = self.make_repository('source', format='pack-0.92')
783
897
        target = self.make_repository('target', format='pack-0.92')
784
898
        stream_source = source._get_source(target._format)
785
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
899
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
786
900
 
787
901
    def test_source_to_exact_pack_rich_root_pack(self):
788
902
        source = self.make_repository('source', format='rich-root-pack')
789
903
        target = self.make_repository('target', format='rich-root-pack')
790
904
        stream_source = source._get_source(target._format)
791
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
905
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
792
906
 
793
907
    def test_source_to_exact_pack_19(self):
794
908
        source = self.make_repository('source', format='1.9')
795
909
        target = self.make_repository('target', format='1.9')
796
910
        stream_source = source._get_source(target._format)
797
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
911
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
798
912
 
799
913
    def test_source_to_exact_pack_19_rich_root(self):
800
914
        source = self.make_repository('source', format='1.9-rich-root')
801
915
        target = self.make_repository('target', format='1.9-rich-root')
802
916
        stream_source = source._get_source(target._format)
803
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
917
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
804
918
 
805
919
    def test_source_to_remote_exact_pack_19(self):
806
920
        trans = self.make_smart_server('target')
809
923
        target = self.make_repository('target', format='1.9')
810
924
        target = repository.Repository.open(trans.base)
811
925
        stream_source = source._get_source(target._format)
812
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
926
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
813
927
 
814
928
    def test_stream_source_to_non_exact(self):
815
929
        source = self.make_repository('source', format='pack-0.92')
816
930
        target = self.make_repository('target', format='1.9')
817
931
        stream = source._get_source(target._format)
818
 
        self.assertIs(type(stream), vf_repository.StreamSource)
 
932
        self.assertIs(type(stream), repository.StreamSource)
819
933
 
820
934
    def test_stream_source_to_non_exact_rich_root(self):
821
935
        source = self.make_repository('source', format='1.9')
822
936
        target = self.make_repository('target', format='1.9-rich-root')
823
937
        stream = source._get_source(target._format)
824
 
        self.assertIs(type(stream), vf_repository.StreamSource)
 
938
        self.assertIs(type(stream), repository.StreamSource)
825
939
 
826
940
    def test_source_to_remote_non_exact_pack_19(self):
827
941
        trans = self.make_smart_server('target')
830
944
        target = self.make_repository('target', format='1.6')
831
945
        target = repository.Repository.open(trans.base)
832
946
        stream_source = source._get_source(target._format)
833
 
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
947
        self.assertIs(type(stream_source), repository.StreamSource)
834
948
 
835
949
    def test_stream_source_to_knit(self):
836
950
        source = self.make_repository('source', format='pack-0.92')
837
951
        target = self.make_repository('target', format='dirstate')
838
952
        stream = source._get_source(target._format)
839
 
        self.assertIs(type(stream), vf_repository.StreamSource)
 
953
        self.assertIs(type(stream), repository.StreamSource)
840
954
 
841
955
 
842
956
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
844
958
 
845
959
    def setUp(self):
846
960
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
847
 
        self.builder = self.make_branch_builder('source')
 
961
        self.builder = self.make_branch_builder('source',
 
962
            format='development6-rich-root')
848
963
        self.builder.start_series()
849
964
        self.builder.build_snapshot('initial', None,
850
965
            [('add', ('', 'tree-root', 'directory', None))])
920
1035
            revision = _mod_revision.Revision('rev1a',
921
1036
                committer='jrandom@example.com', timestamp=0,
922
1037
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
923
 
            repo.add_revision('rev1a', revision, inv)
 
1038
            repo.add_revision('rev1a',revision, inv)
924
1039
 
925
1040
            # make rev1b, which has no Revision, but has an Inventory, and
926
1041
            # file1
961
1076
        revision = _mod_revision.Revision(revision_id,
962
1077
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
963
1078
            timezone=0, message='foo', parent_ids=parent_ids)
964
 
        repo.add_revision(revision_id, revision, inv)
 
1079
        repo.add_revision(revision_id,revision, inv)
965
1080
 
966
1081
    def add_file(self, repo, inv, filename, revision, parents):
967
1082
        file_id = filename + '-id'
995
1110
class TestRepositoryPackCollection(TestCaseWithTransport):
996
1111
 
997
1112
    def get_format(self):
998
 
        return controldir.format_registry.make_bzrdir('pack-0.92')
 
1113
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
999
1114
 
1000
1115
    def get_packs(self):
1001
1116
        format = self.get_format()
1020
1135
        packs.ensure_loaded()
1021
1136
        return tree, r, packs, [rev1, rev2, rev3]
1022
1137
 
1023
 
    def test__clear_obsolete_packs(self):
1024
 
        packs = self.get_packs()
1025
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1026
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1027
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1028
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1029
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1030
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1031
 
        res = packs._clear_obsolete_packs()
1032
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1033
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1034
 
 
1035
 
    def test__clear_obsolete_packs_preserve(self):
1036
 
        packs = self.get_packs()
1037
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1038
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1039
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1040
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1041
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1042
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1043
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1044
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1045
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1046
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1047
 
 
1048
1138
    def test__max_pack_count(self):
1049
1139
        """The maximum pack count is a function of the number of revisions."""
1050
1140
        # no revisions - one pack, so that we can have a revision free repo
1070
1160
        # check some arbitrary big numbers
1071
1161
        self.assertEqual(25, packs._max_pack_count(112894))
1072
1162
 
1073
 
    def test_repr(self):
1074
 
        packs = self.get_packs()
1075
 
        self.assertContainsRe(repr(packs),
1076
 
            'RepositoryPackCollection(.*Repository(.*))')
1077
 
 
1078
 
    def test__obsolete_packs(self):
1079
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1080
 
        names = packs.names()
1081
 
        pack = packs.get_pack_by_name(names[0])
1082
 
        # Schedule this one for removal
1083
 
        packs._remove_pack_from_memory(pack)
1084
 
        # Simulate a concurrent update by renaming the .pack file and one of
1085
 
        # the indices
1086
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1087
 
                               'obsolete_packs/%s.pack' % (names[0],))
1088
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1089
 
                               'obsolete_packs/%s.iix' % (names[0],))
1090
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1091
 
        # are still renamed
1092
 
        packs._obsolete_packs([pack])
1093
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1094
 
                         sorted(packs._pack_transport.list_dir('.')))
1095
 
        # names[0] should not be present in the index anymore
1096
 
        self.assertEqual(names[1:],
1097
 
            sorted(set([osutils.splitext(n)[0] for n in
1098
 
                        packs._index_transport.list_dir('.')])))
1099
 
 
1100
 
    def test__obsolete_packs_missing_directory(self):
1101
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1102
 
        r.control_transport.rmdir('obsolete_packs')
1103
 
        names = packs.names()
1104
 
        pack = packs.get_pack_by_name(names[0])
1105
 
        # Schedule this one for removal
1106
 
        packs._remove_pack_from_memory(pack)
1107
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1108
 
        # are still renamed
1109
 
        packs._obsolete_packs([pack])
1110
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1111
 
                         sorted(packs._pack_transport.list_dir('.')))
1112
 
        # names[0] should not be present in the index anymore
1113
 
        self.assertEqual(names[1:],
1114
 
            sorted(set([osutils.splitext(n)[0] for n in
1115
 
                        packs._index_transport.list_dir('.')])))
1116
 
 
1117
1163
    def test_pack_distribution_zero(self):
1118
1164
        packs = self.get_packs()
1119
1165
        self.assertEqual([0], packs.pack_distribution(0))
1287
1333
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1288
1334
        self.assertFalse(packs.reload_pack_names())
1289
1335
 
1290
 
    def test_reload_pack_names_preserves_pending(self):
1291
 
        # TODO: Update this to also test for pending-deleted names
1292
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1293
 
        # We will add one pack (via start_write_group + insert_record_stream),
1294
 
        # and remove another pack (via _remove_pack_from_memory)
1295
 
        orig_names = packs.names()
1296
 
        orig_at_load = packs._packs_at_load
1297
 
        to_remove_name = iter(orig_names).next()
1298
 
        r.start_write_group()
1299
 
        self.addCleanup(r.abort_write_group)
1300
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1301
 
            ('text', 'rev'), (), None, 'content\n')])
1302
 
        new_pack = packs._new_pack
1303
 
        self.assertTrue(new_pack.data_inserted())
1304
 
        new_pack.finish()
1305
 
        packs.allocate(new_pack)
1306
 
        packs._new_pack = None
1307
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1308
 
        packs._remove_pack_from_memory(removed_pack)
1309
 
        names = packs.names()
1310
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1311
 
        new_names = set([x[0][0] for x in new_nodes])
1312
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1313
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1314
 
        self.assertEqual(set([new_pack.name]), new_names)
1315
 
        self.assertEqual([to_remove_name],
1316
 
                         sorted([x[0][0] for x in deleted_nodes]))
1317
 
        packs.reload_pack_names()
1318
 
        reloaded_names = packs.names()
1319
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1320
 
        self.assertEqual(names, reloaded_names)
1321
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1322
 
        new_names = set([x[0][0] for x in new_nodes])
1323
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1324
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1325
 
        self.assertEqual(set([new_pack.name]), new_names)
1326
 
        self.assertEqual([to_remove_name],
1327
 
                         sorted([x[0][0] for x in deleted_nodes]))
1328
 
 
1329
 
    def test_autopack_obsoletes_new_pack(self):
1330
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1331
 
        packs._max_pack_count = lambda x: 1
1332
 
        packs.pack_distribution = lambda x: [10]
1333
 
        r.start_write_group()
1334
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1335
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1336
 
        # This should trigger an autopack, which will combine everything into a
1337
 
        # single pack file.
1338
 
        new_names = r.commit_write_group()
1339
 
        names = packs.names()
1340
 
        self.assertEqual(1, len(names))
1341
 
        self.assertEqual([names[0] + '.pack'],
1342
 
                         packs._pack_transport.list_dir('.'))
1343
 
 
1344
1336
    def test_autopack_reloads_and_stops(self):
1345
1337
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1346
1338
        # After we have determined what needs to be autopacked, trigger a
1358
1350
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1359
1351
                         packs.names())
1360
1352
 
1361
 
    def test__save_pack_names(self):
1362
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1363
 
        names = packs.names()
1364
 
        pack = packs.get_pack_by_name(names[0])
1365
 
        packs._remove_pack_from_memory(pack)
1366
 
        packs._save_pack_names(obsolete_packs=[pack])
1367
 
        cur_packs = packs._pack_transport.list_dir('.')
1368
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1369
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1370
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1371
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1372
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1373
 
 
1374
 
    def test__save_pack_names_already_obsoleted(self):
1375
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1376
 
        names = packs.names()
1377
 
        pack = packs.get_pack_by_name(names[0])
1378
 
        packs._remove_pack_from_memory(pack)
1379
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1380
 
        # the pack directly.
1381
 
        packs._obsolete_packs([pack])
1382
 
        packs._save_pack_names(clear_obsolete_packs=True,
1383
 
                               obsolete_packs=[pack])
1384
 
        cur_packs = packs._pack_transport.list_dir('.')
1385
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1386
 
        # Note that while we set clear_obsolete_packs=True, it should not
1387
 
        # delete a pack file that we have also scheduled for obsoletion.
1388
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1389
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1390
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1391
 
 
1392
 
    def test_pack_no_obsolete_packs_directory(self):
1393
 
        """Bug #314314, don't fail if obsolete_packs directory does
1394
 
        not exist."""
1395
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1396
 
        r.control_transport.rmdir('obsolete_packs')
1397
 
        packs._clear_obsolete_packs()
1398
 
 
1399
1353
 
1400
1354
class TestPack(TestCaseWithTransport):
1401
1355
    """Tests for the Pack object."""
1465
1419
            index_class=BTreeGraphIndex,
1466
1420
            use_chk_index=False)
1467
1421
        pack = pack_repo.NewPack(collection)
1468
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1469
1422
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1470
1423
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1471
1424
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1501
1454
        # Because of how they were built, they correspond to
1502
1455
        # ['D', 'C', 'B', 'A']
1503
1456
        packs = b.repository._pack_collection.packs
1504
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1457
        packer = pack_repo.Packer(b.repository._pack_collection,
1505
1458
                                  packs, 'testing',
1506
1459
                                  revision_ids=['B', 'C'])
1507
1460
        # Now, when we are copying the B & C revisions, their pack files should
1521
1474
        return repo._pack_collection
1522
1475
 
1523
1476
    def test_open_pack_will_optimise(self):
1524
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1477
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1525
1478
                                            [], '.test')
1526
1479
        new_pack = packer.open_pack()
1527
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1528
1480
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1529
1481
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1530
1482
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1532
1484
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1533
1485
 
1534
1486
 
1535
 
class TestGCCHKPacker(TestCaseWithTransport):
1536
 
 
1537
 
    def make_abc_branch(self):
1538
 
        builder = self.make_branch_builder('source')
1539
 
        builder.start_series()
1540
 
        builder.build_snapshot('A', None, [
1541
 
            ('add', ('', 'root-id', 'directory', None)),
1542
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1543
 
            ])
1544
 
        builder.build_snapshot('B', ['A'], [
1545
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1546
 
        builder.build_snapshot('C', ['B'], [
1547
 
            ('modify', ('file-id', 'new content\n'))])
1548
 
        builder.finish_series()
1549
 
        return builder.get_branch()
1550
 
 
1551
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1552
 
        """a repo with separate packs for a revisions Revision and Inventory.
1553
 
 
1554
 
        There will be one pack file that holds the Revision content, and one
1555
 
        for the Inventory content.
1556
 
 
1557
 
        :return: (repository,
1558
 
                  pack_name_with_rev_A_Revision,
1559
 
                  pack_name_with_rev_A_Inventory,
1560
 
                  pack_name_with_rev_C_content)
1561
 
        """
1562
 
        b_source = self.make_abc_branch()
1563
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1564
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1565
 
        b_stacked.lock_write()
1566
 
        self.addCleanup(b_stacked.unlock)
1567
 
        b_stacked.fetch(b_source, 'B')
1568
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1569
 
        # fill in the A rev.
1570
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1571
 
        repo_not_stacked.lock_write()
1572
 
        self.addCleanup(repo_not_stacked.unlock)
1573
 
        # Now we should have a pack file with A's inventory, but not its
1574
 
        # Revision
1575
 
        self.assertEqual([('A',), ('B',)],
1576
 
                         sorted(repo_not_stacked.inventories.keys()))
1577
 
        self.assertEqual([('B',)],
1578
 
                         sorted(repo_not_stacked.revisions.keys()))
1579
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1580
 
        # We have a couple names here, figure out which has A's inventory
1581
 
        for name in stacked_pack_names:
1582
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1583
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1584
 
            if ('A',) in keys:
1585
 
                inv_a_pack_name = name
1586
 
                break
1587
 
        else:
1588
 
            self.fail('Could not find pack containing A\'s inventory')
1589
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1590
 
        self.assertEqual([('A',), ('B',)],
1591
 
                         sorted(repo_not_stacked.revisions.keys()))
1592
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1593
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1594
 
        self.assertEqual(1, len(rev_a_pack_names))
1595
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1596
 
        # Now fetch 'C', so we have a couple pack files to join
1597
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1598
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1599
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1600
 
        self.assertEqual(1, len(rev_c_pack_names))
1601
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1602
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1603
 
                rev_c_pack_name)
1604
 
 
1605
 
    def test_pack_with_distant_inventories(self):
1606
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1607
 
        # When repacking, it is possible to have an inventory in a different
1608
 
        # pack file than the associated revision. An autopack can then come
1609
 
        # along, and miss that inventory, and complain.
1610
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1611
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1612
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1613
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1614
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1615
 
                    [a_pack, c_pack], '.test-pack')
1616
 
        # This would raise ValueError in bug #437003, but should not raise an
1617
 
        # error once fixed.
1618
 
        packer.pack()
1619
 
 
1620
 
    def test_pack_with_missing_inventory(self):
1621
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1622
 
        # the A inventory to actually be gone from the repository.
1623
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1624
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1625
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1626
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1627
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1628
 
            repo._pack_collection.all_packs(), '.test-pack')
1629
 
        e = self.assertRaises(ValueError, packer.pack)
1630
 
        packer.new_pack.abort()
1631
 
        self.assertContainsRe(str(e),
1632
 
            r"We are missing inventories for revisions: .*'A'")
1633
 
 
1634
 
 
1635
1487
class TestCrossFormatPacks(TestCaseWithTransport):
1636
1488
 
1637
1489
    def log_pack(self, hint=None):
1652
1504
        self.addCleanup(target.unlock)
1653
1505
        source = source_tree.branch.repository._get_source(target._format)
1654
1506
        self.orig_pack = target.pack
1655
 
        self.overrideAttr(target, "pack", self.log_pack)
 
1507
        target.pack = self.log_pack
1656
1508
        search = target.search_missing_revision_ids(
1657
 
            source_tree.branch.repository, revision_ids=[tip])
 
1509
            source_tree.branch.repository, tip)
1658
1510
        stream = source.get_stream(search)
1659
1511
        from_format = source_tree.branch.repository._format
1660
1512
        sink = target._get_sink()
1676
1528
        self.addCleanup(target.unlock)
1677
1529
        source = source_tree.branch.repository
1678
1530
        self.orig_pack = target.pack
1679
 
        self.overrideAttr(target, "pack", self.log_pack)
 
1531
        target.pack = self.log_pack
1680
1532
        target.fetch(source)
1681
1533
        if expect_pack_called:
1682
1534
            self.assertLength(1, self.calls)
1710
1562
    def test_IDS_format_same_no(self):
1711
1563
        # When the formats are the same, pack is not called.
1712
1564
        self.run_fetch('2a', '2a', False)
1713
 
 
1714
 
 
1715
 
class Test_LazyListJoin(tests.TestCase):
1716
 
 
1717
 
    def test__repr__(self):
1718
 
        lazy = repository._LazyListJoin(['a'], ['b'])
1719
 
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
1720
 
                         repr(lazy))
1721
 
 
1722
 
 
1723
 
class TestFeatures(tests.TestCaseWithTransport):
1724
 
 
1725
 
    def test_open_with_present_feature(self):
1726
 
        self.addCleanup(
1727
 
            repository.RepositoryFormatMetaDir.unregister_feature,
1728
 
            "makes-cheese-sandwich")
1729
 
        repository.RepositoryFormatMetaDir.register_feature(
1730
 
            "makes-cheese-sandwich")
1731
 
        repo = self.make_repository('.')
1732
 
        repo.lock_write()
1733
 
        repo._format.features["makes-cheese-sandwich"] = "required"
1734
 
        repo._format.check_support_status(False)
1735
 
        repo.unlock()
1736
 
 
1737
 
    def test_open_with_missing_required_feature(self):
1738
 
        repo = self.make_repository('.')
1739
 
        repo.lock_write()
1740
 
        repo._format.features["makes-cheese-sandwich"] = "required"
1741
 
        self.assertRaises(errors.MissingFeature,
1742
 
            repo._format.check_support_status, False)