~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Vincent Ladeuil
  • Date: 2016-02-01 19:26:41 UTC
  • mto: This revision was merged to the branch mainline in revision 6616.
  • Revision ID: v.ladeuil+lp@free.fr-20160201192641-mzn90m51rydhw00n
Open trunk again as 2.8b1

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
27
import bzrlib
29
 
from bzrlib.errors import (NotBranchError,
30
 
                           NoSuchFile,
31
 
                           UnknownFormatError,
32
 
                           UnsupportedFormatError,
33
 
                           )
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
34
32
from bzrlib import (
35
 
    graph,
 
33
    btree_index,
 
34
    symbol_versioning,
36
35
    tests,
 
36
    transport,
 
37
    vf_search,
37
38
    )
38
 
from bzrlib.branchbuilder import BranchBuilder
39
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
40
from bzrlib.index import GraphIndex
41
41
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.smart import server
43
42
from bzrlib.tests import (
44
43
    TestCase,
45
44
    TestCaseWithTransport,
46
 
    TestSkipped,
47
 
    test_knit,
48
 
    )
49
 
from bzrlib.transport import (
50
 
    fakenfs,
51
 
    get_transport,
52
 
    )
53
 
from bzrlib.transport.memory import MemoryServer
 
45
    )
54
46
from bzrlib import (
55
 
    bencode,
56
47
    bzrdir,
 
48
    controldir,
57
49
    errors,
58
50
    inventory,
59
51
    osutils,
60
 
    progress,
61
52
    repository,
62
53
    revision as _mod_revision,
63
 
    symbol_versioning,
64
54
    upgrade,
 
55
    versionedfile,
 
56
    vf_repository,
65
57
    workingtree,
66
58
    )
67
59
from bzrlib.repofmt import (
68
60
    groupcompress_repo,
69
61
    knitrepo,
 
62
    knitpack_repo,
70
63
    pack_repo,
71
 
    weaverepo,
72
64
    )
73
65
 
74
66
 
75
67
class TestDefaultFormat(TestCase):
76
68
 
77
69
    def test_get_set_default_format(self):
78
 
        old_default = bzrdir.format_registry.get('default')
 
70
        old_default = controldir.format_registry.get('default')
79
71
        private_default = old_default().repository_format.__class__
80
 
        old_format = repository.RepositoryFormat.get_default_format()
 
72
        old_format = repository.format_registry.get_default()
81
73
        self.assertTrue(isinstance(old_format, private_default))
82
74
        def make_sample_bzrdir():
83
75
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
84
76
            my_bzrdir.repository_format = SampleRepositoryFormat()
85
77
            return my_bzrdir
86
 
        bzrdir.format_registry.remove('default')
87
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
88
 
        bzrdir.format_registry.set_default('sample')
 
78
        controldir.format_registry.remove('default')
 
79
        controldir.format_registry.register('sample', make_sample_bzrdir, '')
 
80
        controldir.format_registry.set_default('sample')
89
81
        # creating a repository should now create an instrumented dir.
90
82
        try:
91
83
            # the default branch format is used by the meta dir format
94
86
            result = dir.create_repository()
95
87
            self.assertEqual(result, 'A bzr repository dir')
96
88
        finally:
97
 
            bzrdir.format_registry.remove('default')
98
 
            bzrdir.format_registry.remove('sample')
99
 
            bzrdir.format_registry.register('default', old_default, '')
100
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
89
            controldir.format_registry.remove('default')
 
90
            controldir.format_registry.remove('sample')
 
91
            controldir.format_registry.register('default', old_default, '')
 
92
        self.assertIsInstance(repository.format_registry.get_default(),
101
93
                              old_format.__class__)
102
94
 
103
95
 
104
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
96
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
105
97
    """A sample format
106
98
 
107
99
    this format is initializable, unsupported to aid in testing the
108
100
    open and open(unsupported=True) routines.
109
101
    """
110
102
 
111
 
    def get_format_string(self):
 
103
    @classmethod
 
104
    def get_format_string(cls):
112
105
        """See RepositoryFormat.get_format_string()."""
113
106
        return "Sample .bzr repository format."
114
107
 
125
118
        return "opened repository."
126
119
 
127
120
 
 
121
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
122
    """A sample format that can not be used in a metadir
 
123
 
 
124
    """
 
125
 
 
126
    def get_format_string(self):
 
127
        raise NotImplementedError
 
128
 
 
129
 
128
130
class TestRepositoryFormat(TestCaseWithTransport):
129
131
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
130
132
 
136
138
        def check_format(format, url):
137
139
            dir = format._matchingbzrdir.initialize(url)
138
140
            format.initialize(dir)
139
 
            t = get_transport(url)
140
 
            found_format = repository.RepositoryFormat.find_format(dir)
141
 
            self.failUnless(isinstance(found_format, format.__class__))
142
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
141
            t = transport.get_transport_from_path(url)
 
142
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
 
143
            self.assertIsInstance(found_format, format.__class__)
 
144
        check_format(repository.format_registry.get_default(), "bar")
143
145
 
144
146
    def test_find_format_no_repository(self):
145
147
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
148
        self.assertRaises(errors.NoRepositoryPresent,
147
 
                          repository.RepositoryFormat.find_format,
 
149
                          repository.RepositoryFormatMetaDir.find_format,
148
150
                          dir)
149
151
 
 
152
    def test_from_string(self):
 
153
        self.assertIsInstance(
 
154
            SampleRepositoryFormat.from_string(
 
155
                "Sample .bzr repository format."),
 
156
            SampleRepositoryFormat)
 
157
        self.assertRaises(AssertionError,
 
158
            SampleRepositoryFormat.from_string,
 
159
                "Different .bzr repository format.")
 
160
 
150
161
    def test_find_format_unknown_format(self):
151
162
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
152
163
        SampleRepositoryFormat().initialize(dir)
153
164
        self.assertRaises(UnknownFormatError,
154
 
                          repository.RepositoryFormat.find_format,
 
165
                          repository.RepositoryFormatMetaDir.find_format,
155
166
                          dir)
156
167
 
 
168
    def test_find_format_with_features(self):
 
169
        tree = self.make_branch_and_tree('.', format='2a')
 
170
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
171
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
172
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
 
173
        self.assertEqual(found_format.features.get("name"), "necessity")
 
174
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
175
            True)
 
176
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
 
177
            "name")
 
178
        repository.RepositoryFormatMetaDir.register_feature("name")
 
179
        found_format.check_support_status(True)
 
180
 
 
181
 
 
182
class TestRepositoryFormatRegistry(TestCase):
 
183
 
 
184
    def setUp(self):
 
185
        super(TestRepositoryFormatRegistry, self).setUp()
 
186
        self.registry = repository.RepositoryFormatRegistry()
 
187
 
157
188
    def test_register_unregister_format(self):
158
189
        format = SampleRepositoryFormat()
159
 
        # make a control dir
160
 
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
161
 
        # make a repo
162
 
        format.initialize(dir)
163
 
        # register a format for it.
164
 
        repository.RepositoryFormat.register_format(format)
165
 
        # which repository.Open will refuse (not supported)
166
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
167
 
        # but open(unsupported) will work
168
 
        self.assertEqual(format.open(dir), "opened repository.")
169
 
        # unregister the format
170
 
        repository.RepositoryFormat.unregister_format(format)
171
 
 
172
 
 
173
 
class TestFormat6(TestCaseWithTransport):
174
 
 
175
 
    def test_attribute__fetch_order(self):
176
 
        """Weaves need topological data insertion."""
177
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
178
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
179
 
        self.assertEqual('topological', repo._format._fetch_order)
180
 
 
181
 
    def test_attribute__fetch_uses_deltas(self):
182
 
        """Weaves do not reuse deltas."""
183
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
184
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
185
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
186
 
 
187
 
    def test_attribute__fetch_reconcile(self):
188
 
        """Weave repositories need a reconcile after fetch."""
189
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
190
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
191
 
        self.assertEqual(True, repo._format._fetch_reconcile)
192
 
 
193
 
    def test_no_ancestry_weave(self):
194
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
196
 
        # We no longer need to create the ancestry.weave file
197
 
        # since it is *never* used.
198
 
        self.assertRaises(NoSuchFile,
199
 
                          control.transport.get,
200
 
                          'ancestry.weave')
201
 
 
202
 
    def test_supports_external_lookups(self):
203
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
205
 
        self.assertFalse(repo._format.supports_external_lookups)
206
 
 
207
 
 
208
 
class TestFormat7(TestCaseWithTransport):
209
 
 
210
 
    def test_attribute__fetch_order(self):
211
 
        """Weaves need topological data insertion."""
212
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
213
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
214
 
        self.assertEqual('topological', repo._format._fetch_order)
215
 
 
216
 
    def test_attribute__fetch_uses_deltas(self):
217
 
        """Weaves do not reuse deltas."""
218
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
219
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
220
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
221
 
 
222
 
    def test_attribute__fetch_reconcile(self):
223
 
        """Weave repositories need a reconcile after fetch."""
224
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
225
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
226
 
        self.assertEqual(True, repo._format._fetch_reconcile)
227
 
 
228
 
    def test_disk_layout(self):
229
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
230
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
231
 
        # in case of side effects of locking.
232
 
        repo.lock_write()
233
 
        repo.unlock()
234
 
        # we want:
235
 
        # format 'Bazaar-NG Repository format 7'
236
 
        # lock ''
237
 
        # inventory.weave == empty_weave
238
 
        # empty revision-store directory
239
 
        # empty weaves directory
240
 
        t = control.get_repository_transport(None)
241
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
242
 
                             t.get('format').read())
243
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
244
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
245
 
        self.assertEqualDiff('# bzr weave file v5\n'
246
 
                             'w\n'
247
 
                             'W\n',
248
 
                             t.get('inventory.weave').read())
249
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
250
 
        # disk.
251
 
        control.create_branch()
252
 
        tree = control.create_workingtree()
253
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
256
 
        self.assertEqualDiff(
257
 
            '# bzr weave file v5\n'
258
 
            'i\n'
259
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
260
 
            'n first\n'
261
 
            '\n'
262
 
            'w\n'
263
 
            '{ 0\n'
264
 
            '. content\n'
265
 
            '}\n'
266
 
            'W\n',
267
 
            t.get('weaves/74/Foo%3ABar.weave').read())
268
 
 
269
 
    def test_shared_disk_layout(self):
270
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
271
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
272
 
        # we want:
273
 
        # format 'Bazaar-NG Repository format 7'
274
 
        # inventory.weave == empty_weave
275
 
        # empty revision-store directory
276
 
        # empty weaves directory
277
 
        # a 'shared-storage' marker file.
278
 
        # lock is not present when unlocked
279
 
        t = control.get_repository_transport(None)
280
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
281
 
                             t.get('format').read())
282
 
        self.assertEqualDiff('', t.get('shared-storage').read())
283
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
284
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
285
 
        self.assertEqualDiff('# bzr weave file v5\n'
286
 
                             'w\n'
287
 
                             'W\n',
288
 
                             t.get('inventory.weave').read())
289
 
        self.assertFalse(t.has('branch-lock'))
290
 
 
291
 
    def test_creates_lockdir(self):
292
 
        """Make sure it appears to be controlled by a LockDir existence"""
293
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
294
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
295
 
        t = control.get_repository_transport(None)
296
 
        # TODO: Should check there is a 'lock' toplevel directory,
297
 
        # regardless of contents
298
 
        self.assertFalse(t.has('lock/held/info'))
299
 
        repo.lock_write()
300
 
        try:
301
 
            self.assertTrue(t.has('lock/held/info'))
302
 
        finally:
303
 
            # unlock so we don't get a warning about failing to do so
304
 
            repo.unlock()
305
 
 
306
 
    def test_uses_lockdir(self):
307
 
        """repo format 7 actually locks on lockdir"""
308
 
        base_url = self.get_url()
309
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
310
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
311
 
        t = control.get_repository_transport(None)
312
 
        repo.lock_write()
313
 
        repo.unlock()
314
 
        del repo
315
 
        # make sure the same lock is created by opening it
316
 
        repo = repository.Repository.open(base_url)
317
 
        repo.lock_write()
318
 
        self.assertTrue(t.has('lock/held/info'))
319
 
        repo.unlock()
320
 
        self.assertFalse(t.has('lock/held/info'))
321
 
 
322
 
    def test_shared_no_tree_disk_layout(self):
323
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
324
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
325
 
        repo.set_make_working_trees(False)
326
 
        # we want:
327
 
        # format 'Bazaar-NG Repository format 7'
328
 
        # lock ''
329
 
        # inventory.weave == empty_weave
330
 
        # empty revision-store directory
331
 
        # empty weaves directory
332
 
        # a 'shared-storage' marker file.
333
 
        t = control.get_repository_transport(None)
334
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
335
 
                             t.get('format').read())
336
 
        ## self.assertEqualDiff('', t.get('lock').read())
337
 
        self.assertEqualDiff('', t.get('shared-storage').read())
338
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
339
 
        repo.set_make_working_trees(True)
340
 
        self.assertFalse(t.has('no-working-trees'))
341
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
342
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
343
 
        self.assertEqualDiff('# bzr weave file v5\n'
344
 
                             'w\n'
345
 
                             'W\n',
346
 
                             t.get('inventory.weave').read())
347
 
 
348
 
    def test_supports_external_lookups(self):
349
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
350
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
351
 
        self.assertFalse(repo._format.supports_external_lookups)
 
190
        self.registry.register(format)
 
191
        self.assertEqual(format, self.registry.get("Sample .bzr repository format."))
 
192
        self.registry.remove(format)
 
193
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
194
 
 
195
    def test_get_all(self):
 
196
        format = SampleRepositoryFormat()
 
197
        self.assertEqual([], self.registry._get_all())
 
198
        self.registry.register(format)
 
199
        self.assertEqual([format], self.registry._get_all())
 
200
 
 
201
    def test_register_extra(self):
 
202
        format = SampleExtraRepositoryFormat()
 
203
        self.assertEqual([], self.registry._get_all())
 
204
        self.registry.register_extra(format)
 
205
        self.assertEqual([format], self.registry._get_all())
 
206
 
 
207
    def test_register_extra_lazy(self):
 
208
        self.assertEqual([], self.registry._get_all())
 
209
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
210
            "SampleExtraRepositoryFormat")
 
211
        formats = self.registry._get_all()
 
212
        self.assertEqual(1, len(formats))
 
213
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
352
214
 
353
215
 
354
216
class TestFormatKnit1(TestCaseWithTransport):
356
218
    def test_attribute__fetch_order(self):
357
219
        """Knits need topological data insertion."""
358
220
        repo = self.make_repository('.',
359
 
                format=bzrdir.format_registry.get('knit')())
 
221
                format=controldir.format_registry.get('knit')())
360
222
        self.assertEqual('topological', repo._format._fetch_order)
361
223
 
362
224
    def test_attribute__fetch_uses_deltas(self):
363
225
        """Knits reuse deltas."""
364
226
        repo = self.make_repository('.',
365
 
                format=bzrdir.format_registry.get('knit')())
 
227
                format=controldir.format_registry.get('knit')())
366
228
        self.assertEqual(True, repo._format._fetch_uses_deltas)
367
229
 
368
230
    def test_disk_layout(self):
454
316
        is valid when the api is not being abused.
455
317
        """
456
318
        repo = self.make_repository('.',
457
 
                format=bzrdir.format_registry.get('knit')())
 
319
                format=controldir.format_registry.get('knit')())
458
320
        inv_xml = '<inventory format="5">\n</inventory>\n'
459
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
321
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
460
322
        self.assertEqual('test-rev-id', inv.root.revision)
461
323
 
462
324
    def test_deserialise_uses_global_revision_id(self):
463
325
        """If it is set, then we re-use the global revision id"""
464
326
        repo = self.make_repository('.',
465
 
                format=bzrdir.format_registry.get('knit')())
 
327
                format=controldir.format_registry.get('knit')())
466
328
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
467
329
                   '</inventory>\n')
468
330
        # Arguably, the deserialise_inventory should detect a mismatch, and
469
331
        # raise an error, rather than silently using one revision_id over the
470
332
        # other.
471
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
333
        self.assertRaises(AssertionError, repo._deserialise_inventory,
472
334
            'test-rev-id', inv_xml)
473
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
335
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
474
336
        self.assertEqual('other-rev-id', inv.root.revision)
475
337
 
476
338
    def test_supports_external_lookups(self):
477
339
        repo = self.make_repository('.',
478
 
                format=bzrdir.format_registry.get('knit')())
 
340
                format=controldir.format_registry.get('knit')())
479
341
        self.assertFalse(repo._format.supports_external_lookups)
480
342
 
481
343
 
486
348
    _serializer = None
487
349
 
488
350
    def supports_rich_root(self):
 
351
        if self._format is not None:
 
352
            return self._format.rich_root_data
489
353
        return False
490
354
 
491
355
    def get_graph(self):
520
384
        # classes do not barf inappropriately when a surprising repository type
521
385
        # is handed to them.
522
386
        dummy_a = DummyRepository()
 
387
        dummy_a._format = RepositoryFormat()
 
388
        dummy_a._format.supports_full_versioned_files = True
523
389
        dummy_b = DummyRepository()
 
390
        dummy_b._format = RepositoryFormat()
 
391
        dummy_b._format.supports_full_versioned_files = True
524
392
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
525
393
 
526
394
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
530
398
        no actual sane default in the presence of incompatible data models.
531
399
        """
532
400
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
533
 
        self.assertEqual(repository.InterSameDataRepository,
 
401
        self.assertEqual(vf_repository.InterSameDataRepository,
534
402
                         inter_repo.__class__)
535
403
        self.assertEqual(repo_a, inter_repo.source)
536
404
        self.assertEqual(repo_b, inter_repo.target)
542
410
        # pair that it returns true on for the is_compatible static method
543
411
        # check
544
412
        dummy_a = DummyRepository()
 
413
        dummy_a._format = RepositoryFormat()
545
414
        dummy_b = DummyRepository()
 
415
        dummy_b._format = RepositoryFormat()
546
416
        repo = self.make_repository('.')
547
417
        # hack dummies to look like repo somewhat.
548
418
        dummy_a._serializer = repo._serializer
 
419
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
420
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
421
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
549
422
        dummy_b._serializer = repo._serializer
 
423
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
424
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
425
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
550
426
        repository.InterRepository.register_optimiser(InterDummy)
551
427
        try:
552
428
            # we should get the default for something InterDummy returns False
565
441
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
566
442
 
567
443
 
568
 
class TestInterWeaveRepo(TestCaseWithTransport):
569
 
 
570
 
    def test_is_compatible_and_registered(self):
571
 
        # InterWeaveRepo is compatible when either side
572
 
        # is a format 5/6/7 branch
573
 
        from bzrlib.repofmt import knitrepo, weaverepo
574
 
        formats = [weaverepo.RepositoryFormat5(),
575
 
                   weaverepo.RepositoryFormat6(),
576
 
                   weaverepo.RepositoryFormat7()]
577
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
578
 
                                knitrepo.RepositoryFormatKnit1(),
579
 
                                ]
580
 
        repo_a = self.make_repository('a')
581
 
        repo_b = self.make_repository('b')
582
 
        is_compatible = repository.InterWeaveRepo.is_compatible
583
 
        for source in incompatible_formats:
584
 
            # force incompatible left then right
585
 
            repo_a._format = source
586
 
            repo_b._format = formats[0]
587
 
            self.assertFalse(is_compatible(repo_a, repo_b))
588
 
            self.assertFalse(is_compatible(repo_b, repo_a))
589
 
        for source in formats:
590
 
            repo_a._format = source
591
 
            for target in formats:
592
 
                repo_b._format = target
593
 
                self.assertTrue(is_compatible(repo_a, repo_b))
594
 
        self.assertEqual(repository.InterWeaveRepo,
595
 
                         repository.InterRepository.get(repo_a,
596
 
                                                        repo_b).__class__)
 
444
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
445
 
 
446
    @classmethod
 
447
    def get_format_string(cls):
 
448
        return "Test Format 1"
 
449
 
 
450
 
 
451
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
452
 
 
453
    @classmethod
 
454
    def get_format_string(cls):
 
455
        return "Test Format 2"
597
456
 
598
457
 
599
458
class TestRepositoryConverter(TestCaseWithTransport):
600
459
 
601
460
    def test_convert_empty(self):
602
 
        t = get_transport(self.get_url('.'))
 
461
        source_format = TestRepositoryFormat1()
 
462
        target_format = TestRepositoryFormat2()
 
463
        repository.format_registry.register(source_format)
 
464
        self.addCleanup(repository.format_registry.remove,
 
465
            source_format)
 
466
        repository.format_registry.register(target_format)
 
467
        self.addCleanup(repository.format_registry.remove,
 
468
            target_format)
 
469
        t = self.get_transport()
603
470
        t.mkdir('repository')
604
471
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
605
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
606
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
472
        repo = TestRepositoryFormat1().initialize(repo_dir)
607
473
        converter = repository.CopyConverter(target_format)
608
474
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
609
475
        try:
614
480
        self.assertTrue(isinstance(target_format, repo._format.__class__))
615
481
 
616
482
 
617
 
class TestMisc(TestCase):
618
 
 
619
 
    def test_unescape_xml(self):
620
 
        """We get some kind of error when malformed entities are passed"""
621
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
622
 
 
623
 
 
624
483
class TestRepositoryFormatKnit3(TestCaseWithTransport):
625
484
 
626
485
    def test_attribute__fetch_order(self):
647
506
        revision_tree.lock_read()
648
507
        try:
649
508
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
650
 
                revision_tree.inventory.root.file_id)
 
509
                revision_tree.get_root_id())
651
510
        finally:
652
511
            revision_tree.unlock()
653
512
        format = bzrdir.BzrDirMetaFormat1()
657
516
        revision_tree = tree.branch.repository.revision_tree('dull')
658
517
        revision_tree.lock_read()
659
518
        try:
660
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
519
            revision_tree.get_file_lines(revision_tree.get_root_id())
661
520
        finally:
662
521
            revision_tree.unlock()
663
522
        tree.commit("Another dull commit", rev_id='dull2')
664
523
        revision_tree = tree.branch.repository.revision_tree('dull2')
665
524
        revision_tree.lock_read()
666
525
        self.addCleanup(revision_tree.unlock)
667
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
526
        self.assertEqual('dull',
 
527
                revision_tree.get_file_revision(revision_tree.get_root_id()))
668
528
 
669
529
    def test_supports_external_lookups(self):
670
530
        format = bzrdir.BzrDirMetaFormat1()
673
533
        self.assertFalse(repo._format.supports_external_lookups)
674
534
 
675
535
 
676
 
class Test2a(TestCaseWithTransport):
 
536
class Test2a(tests.TestCaseWithMemoryTransport):
 
537
 
 
538
    def test_chk_bytes_uses_custom_btree_parser(self):
 
539
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
540
        mt.lock_write()
 
541
        self.addCleanup(mt.unlock)
 
542
        mt.add([''], ['root-id'])
 
543
        mt.commit('first')
 
544
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
545
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
546
        # It should also work if we re-open the repo
 
547
        repo = mt.branch.repository.bzrdir.open_repository()
 
548
        repo.lock_read()
 
549
        self.addCleanup(repo.unlock)
 
550
        index = repo.chk_bytes._index._graph_index._indices[0]
 
551
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
552
 
 
553
    def test_fetch_combines_groups(self):
 
554
        builder = self.make_branch_builder('source', format='2a')
 
555
        builder.start_series()
 
556
        builder.build_snapshot('1', None, [
 
557
            ('add', ('', 'root-id', 'directory', '')),
 
558
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
559
        builder.build_snapshot('2', ['1'], [
 
560
            ('modify', ('file-id', 'content-2\n'))])
 
561
        builder.finish_series()
 
562
        source = builder.get_branch()
 
563
        target = self.make_repository('target', format='2a')
 
564
        target.fetch(source.repository)
 
565
        target.lock_read()
 
566
        self.addCleanup(target.unlock)
 
567
        details = target.texts._index.get_build_details(
 
568
            [('file-id', '1',), ('file-id', '2',)])
 
569
        file_1_details = details[('file-id', '1')]
 
570
        file_2_details = details[('file-id', '2')]
 
571
        # The index, and what to read off disk, should be the same for both
 
572
        # versions of the file.
 
573
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
574
 
 
575
    def test_fetch_combines_groups(self):
 
576
        builder = self.make_branch_builder('source', format='2a')
 
577
        builder.start_series()
 
578
        builder.build_snapshot('1', None, [
 
579
            ('add', ('', 'root-id', 'directory', '')),
 
580
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
581
        builder.build_snapshot('2', ['1'], [
 
582
            ('modify', ('file-id', 'content-2\n'))])
 
583
        builder.finish_series()
 
584
        source = builder.get_branch()
 
585
        target = self.make_repository('target', format='2a')
 
586
        target.fetch(source.repository)
 
587
        target.lock_read()
 
588
        self.addCleanup(target.unlock)
 
589
        details = target.texts._index.get_build_details(
 
590
            [('file-id', '1',), ('file-id', '2',)])
 
591
        file_1_details = details[('file-id', '1')]
 
592
        file_2_details = details[('file-id', '2')]
 
593
        # The index, and what to read off disk, should be the same for both
 
594
        # versions of the file.
 
595
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
596
 
 
597
    def test_fetch_combines_groups(self):
 
598
        builder = self.make_branch_builder('source', format='2a')
 
599
        builder.start_series()
 
600
        builder.build_snapshot('1', None, [
 
601
            ('add', ('', 'root-id', 'directory', '')),
 
602
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
603
        builder.build_snapshot('2', ['1'], [
 
604
            ('modify', ('file-id', 'content-2\n'))])
 
605
        builder.finish_series()
 
606
        source = builder.get_branch()
 
607
        target = self.make_repository('target', format='2a')
 
608
        target.fetch(source.repository)
 
609
        target.lock_read()
 
610
        self.addCleanup(target.unlock)
 
611
        details = target.texts._index.get_build_details(
 
612
            [('file-id', '1',), ('file-id', '2',)])
 
613
        file_1_details = details[('file-id', '1')]
 
614
        file_2_details = details[('file-id', '2')]
 
615
        # The index, and what to read off disk, should be the same for both
 
616
        # versions of the file.
 
617
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
677
618
 
678
619
    def test_format_pack_compresses_True(self):
679
620
        repo = self.make_repository('repo', format='2a')
680
621
        self.assertTrue(repo._format.pack_compresses)
681
622
 
682
623
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
624
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
625
        tree.lock_write()
 
626
        tree.add([''], ['TREE_ROOT'])
684
627
        revid = tree.commit("foo")
 
628
        tree.unlock()
685
629
        tree.lock_read()
686
630
        self.addCleanup(tree.unlock)
687
631
        inv = tree.branch.repository.get_inventory(revid)
696
640
        # at 20 unchanged commits, chk pages are packed that are split into
697
641
        # two groups such that the new pack being made doesn't have all its
698
642
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
643
        # Use a memory backed repository, we don't need to hit disk for this
 
644
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
645
        tree.lock_write()
 
646
        self.addCleanup(tree.unlock)
 
647
        tree.add([''], ['TREE_ROOT'])
700
648
        for pos in range(20):
701
649
            tree.commit(str(pos))
702
650
 
703
651
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
652
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
653
        tree.lock_write()
 
654
        self.addCleanup(tree.unlock)
 
655
        tree.add([''], ['TREE_ROOT'])
705
656
        # 1 commit to leave untouched
706
657
        tree.commit('1')
707
658
        to_keep = tree.branch.repository._pack_collection.names()
730
681
        target = self.make_repository('target', format='rich-root-pack')
731
682
        stream = source._get_source(target._format)
732
683
        # We don't want the child GroupCHKStreamSource
733
 
        self.assertIs(type(stream), repository.StreamSource)
 
684
        self.assertIs(type(stream), vf_repository.StreamSource)
734
685
 
735
686
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
736
687
        source_builder = self.make_branch_builder('source',
763
714
 
764
715
        # On a regular pass, getting the inventories and chk pages for rev-2
765
716
        # would only get the newly created chk pages
766
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
717
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
767
718
                                    set(['rev-2']))
768
719
        simple_chk_records = []
769
720
        for vf_name, substream in source.get_stream(search):
812
763
        source = self.make_repository('source', format='pack-0.92')
813
764
        target = self.make_repository('target', format='pack-0.92')
814
765
        stream_source = source._get_source(target._format)
815
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
766
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
816
767
 
817
768
    def test_source_to_exact_pack_rich_root_pack(self):
818
769
        source = self.make_repository('source', format='rich-root-pack')
819
770
        target = self.make_repository('target', format='rich-root-pack')
820
771
        stream_source = source._get_source(target._format)
821
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
772
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
822
773
 
823
774
    def test_source_to_exact_pack_19(self):
824
775
        source = self.make_repository('source', format='1.9')
825
776
        target = self.make_repository('target', format='1.9')
826
777
        stream_source = source._get_source(target._format)
827
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
778
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
828
779
 
829
780
    def test_source_to_exact_pack_19_rich_root(self):
830
781
        source = self.make_repository('source', format='1.9-rich-root')
831
782
        target = self.make_repository('target', format='1.9-rich-root')
832
783
        stream_source = source._get_source(target._format)
833
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
784
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
834
785
 
835
786
    def test_source_to_remote_exact_pack_19(self):
836
787
        trans = self.make_smart_server('target')
839
790
        target = self.make_repository('target', format='1.9')
840
791
        target = repository.Repository.open(trans.base)
841
792
        stream_source = source._get_source(target._format)
842
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
793
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
843
794
 
844
795
    def test_stream_source_to_non_exact(self):
845
796
        source = self.make_repository('source', format='pack-0.92')
846
797
        target = self.make_repository('target', format='1.9')
847
798
        stream = source._get_source(target._format)
848
 
        self.assertIs(type(stream), repository.StreamSource)
 
799
        self.assertIs(type(stream), vf_repository.StreamSource)
849
800
 
850
801
    def test_stream_source_to_non_exact_rich_root(self):
851
802
        source = self.make_repository('source', format='1.9')
852
803
        target = self.make_repository('target', format='1.9-rich-root')
853
804
        stream = source._get_source(target._format)
854
 
        self.assertIs(type(stream), repository.StreamSource)
 
805
        self.assertIs(type(stream), vf_repository.StreamSource)
855
806
 
856
807
    def test_source_to_remote_non_exact_pack_19(self):
857
808
        trans = self.make_smart_server('target')
860
811
        target = self.make_repository('target', format='1.6')
861
812
        target = repository.Repository.open(trans.base)
862
813
        stream_source = source._get_source(target._format)
863
 
        self.assertIs(type(stream_source), repository.StreamSource)
 
814
        self.assertIs(type(stream_source), vf_repository.StreamSource)
864
815
 
865
816
    def test_stream_source_to_knit(self):
866
817
        source = self.make_repository('source', format='pack-0.92')
867
818
        target = self.make_repository('target', format='dirstate')
868
819
        stream = source._get_source(target._format)
869
 
        self.assertIs(type(stream), repository.StreamSource)
 
820
        self.assertIs(type(stream), vf_repository.StreamSource)
870
821
 
871
822
 
872
823
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
874
825
 
875
826
    def setUp(self):
876
827
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
877
 
        self.builder = self.make_branch_builder('source',
878
 
            format='development6-rich-root')
 
828
        self.builder = self.make_branch_builder('source')
879
829
        self.builder.start_series()
880
830
        self.builder.build_snapshot('initial', None,
881
831
            [('add', ('', 'tree-root', 'directory', None))])
946
896
            inv = inventory.Inventory(revision_id='rev1a')
947
897
            inv.root.revision = 'rev1a'
948
898
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
899
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
900
            repo.add_inventory('rev1a', inv, [])
950
901
            revision = _mod_revision.Revision('rev1a',
951
902
                committer='jrandom@example.com', timestamp=0,
952
903
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
953
 
            repo.add_revision('rev1a',revision, inv)
 
904
            repo.add_revision('rev1a', revision, inv)
954
905
 
955
906
            # make rev1b, which has no Revision, but has an Inventory, and
956
907
            # file1
986
937
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
938
        inv.revision_id = revision_id
988
939
        inv.root.revision = revision_id
 
940
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
941
        repo.add_inventory(revision_id, inv, parent_ids)
990
942
        revision = _mod_revision.Revision(revision_id,
991
943
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
992
944
            timezone=0, message='foo', parent_ids=parent_ids)
993
 
        repo.add_revision(revision_id,revision, inv)
 
945
        repo.add_revision(revision_id, revision, inv)
994
946
 
995
947
    def add_file(self, repo, inv, filename, revision, parents):
996
948
        file_id = filename + '-id'
1008
960
        """
1009
961
        broken_repo = self.make_broken_repository()
1010
962
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
 
963
        try:
 
964
            empty_repo.fetch(broken_repo)
 
965
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
966
            # Test successful: compression parent not being copied leads to
 
967
            # error.
 
968
            return
 
969
        empty_repo.lock_read()
 
970
        self.addCleanup(empty_repo.unlock)
 
971
        text = empty_repo.texts.get_record_stream(
 
972
            [('file2-id', 'rev3')], 'topological', True).next()
 
973
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1019
974
 
1020
975
 
1021
976
class TestRepositoryPackCollection(TestCaseWithTransport):
1022
977
 
1023
978
    def get_format(self):
1024
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
979
        return controldir.format_registry.make_bzrdir('pack-0.92')
1025
980
 
1026
981
    def get_packs(self):
1027
982
        format = self.get_format()
1030
985
 
1031
986
    def make_packs_and_alt_repo(self, write_lock=False):
1032
987
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
 
988
        tree = self.make_branch_and_tree('.', format=self.get_format())
1034
989
        tree.lock_write()
1035
990
        self.addCleanup(tree.unlock)
1036
991
        rev1 = tree.commit('one')
1046
1001
        packs.ensure_loaded()
1047
1002
        return tree, r, packs, [rev1, rev2, rev3]
1048
1003
 
 
1004
    def test__clear_obsolete_packs(self):
 
1005
        packs = self.get_packs()
 
1006
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1007
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1008
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1009
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1010
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1011
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1012
        res = packs._clear_obsolete_packs()
 
1013
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1014
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1015
 
 
1016
    def test__clear_obsolete_packs_preserve(self):
 
1017
        packs = self.get_packs()
 
1018
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1019
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1020
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1021
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1022
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1023
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1024
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1025
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1026
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1027
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1028
 
1049
1029
    def test__max_pack_count(self):
1050
1030
        """The maximum pack count is a function of the number of revisions."""
1051
1031
        # no revisions - one pack, so that we can have a revision free repo
1071
1051
        # check some arbitrary big numbers
1072
1052
        self.assertEqual(25, packs._max_pack_count(112894))
1073
1053
 
 
1054
    def test_repr(self):
 
1055
        packs = self.get_packs()
 
1056
        self.assertContainsRe(repr(packs),
 
1057
            'RepositoryPackCollection(.*Repository(.*))')
 
1058
 
 
1059
    def test__obsolete_packs(self):
 
1060
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1061
        names = packs.names()
 
1062
        pack = packs.get_pack_by_name(names[0])
 
1063
        # Schedule this one for removal
 
1064
        packs._remove_pack_from_memory(pack)
 
1065
        # Simulate a concurrent update by renaming the .pack file and one of
 
1066
        # the indices
 
1067
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1068
                               'obsolete_packs/%s.pack' % (names[0],))
 
1069
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1070
                               'obsolete_packs/%s.iix' % (names[0],))
 
1071
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1072
        # are still renamed
 
1073
        packs._obsolete_packs([pack])
 
1074
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1075
                         sorted(packs._pack_transport.list_dir('.')))
 
1076
        # names[0] should not be present in the index anymore
 
1077
        self.assertEqual(names[1:],
 
1078
            sorted(set([osutils.splitext(n)[0] for n in
 
1079
                        packs._index_transport.list_dir('.')])))
 
1080
 
 
1081
    def test__obsolete_packs_missing_directory(self):
 
1082
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1083
        r.control_transport.rmdir('obsolete_packs')
 
1084
        names = packs.names()
 
1085
        pack = packs.get_pack_by_name(names[0])
 
1086
        # Schedule this one for removal
 
1087
        packs._remove_pack_from_memory(pack)
 
1088
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1089
        # are still renamed
 
1090
        packs._obsolete_packs([pack])
 
1091
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1092
                         sorted(packs._pack_transport.list_dir('.')))
 
1093
        # names[0] should not be present in the index anymore
 
1094
        self.assertEqual(names[1:],
 
1095
            sorted(set([osutils.splitext(n)[0] for n in
 
1096
                        packs._index_transport.list_dir('.')])))
 
1097
 
1074
1098
    def test_pack_distribution_zero(self):
1075
1099
        packs = self.get_packs()
1076
1100
        self.assertEqual([0], packs.pack_distribution(0))
1244
1268
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1269
        self.assertFalse(packs.reload_pack_names())
1246
1270
 
 
1271
    def test_reload_pack_names_preserves_pending(self):
 
1272
        # TODO: Update this to also test for pending-deleted names
 
1273
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1274
        # We will add one pack (via start_write_group + insert_record_stream),
 
1275
        # and remove another pack (via _remove_pack_from_memory)
 
1276
        orig_names = packs.names()
 
1277
        orig_at_load = packs._packs_at_load
 
1278
        to_remove_name = iter(orig_names).next()
 
1279
        r.start_write_group()
 
1280
        self.addCleanup(r.abort_write_group)
 
1281
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1282
            ('text', 'rev'), (), None, 'content\n')])
 
1283
        new_pack = packs._new_pack
 
1284
        self.assertTrue(new_pack.data_inserted())
 
1285
        new_pack.finish()
 
1286
        packs.allocate(new_pack)
 
1287
        packs._new_pack = None
 
1288
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1289
        packs._remove_pack_from_memory(removed_pack)
 
1290
        names = packs.names()
 
1291
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1292
        new_names = set([x[0][0] for x in new_nodes])
 
1293
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1294
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1295
        self.assertEqual(set([new_pack.name]), new_names)
 
1296
        self.assertEqual([to_remove_name],
 
1297
                         sorted([x[0][0] for x in deleted_nodes]))
 
1298
        packs.reload_pack_names()
 
1299
        reloaded_names = packs.names()
 
1300
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1301
        self.assertEqual(names, reloaded_names)
 
1302
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1303
        new_names = set([x[0][0] for x in new_nodes])
 
1304
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1305
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1306
        self.assertEqual(set([new_pack.name]), new_names)
 
1307
        self.assertEqual([to_remove_name],
 
1308
                         sorted([x[0][0] for x in deleted_nodes]))
 
1309
 
 
1310
    def test_autopack_obsoletes_new_pack(self):
 
1311
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1312
        packs._max_pack_count = lambda x: 1
 
1313
        packs.pack_distribution = lambda x: [10]
 
1314
        r.start_write_group()
 
1315
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1316
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1317
        # This should trigger an autopack, which will combine everything into a
 
1318
        # single pack file.
 
1319
        new_names = r.commit_write_group()
 
1320
        names = packs.names()
 
1321
        self.assertEqual(1, len(names))
 
1322
        self.assertEqual([names[0] + '.pack'],
 
1323
                         packs._pack_transport.list_dir('.'))
 
1324
 
1247
1325
    def test_autopack_reloads_and_stops(self):
1248
1326
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1327
        # After we have determined what needs to be autopacked, trigger a
1261
1339
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1262
1340
                         packs.names())
1263
1341
 
 
1342
    def test__save_pack_names(self):
 
1343
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1344
        names = packs.names()
 
1345
        pack = packs.get_pack_by_name(names[0])
 
1346
        packs._remove_pack_from_memory(pack)
 
1347
        packs._save_pack_names(obsolete_packs=[pack])
 
1348
        cur_packs = packs._pack_transport.list_dir('.')
 
1349
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1350
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1351
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1352
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1353
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1354
 
 
1355
    def test__save_pack_names_already_obsoleted(self):
 
1356
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1357
        names = packs.names()
 
1358
        pack = packs.get_pack_by_name(names[0])
 
1359
        packs._remove_pack_from_memory(pack)
 
1360
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1361
        # the pack directly.
 
1362
        packs._obsolete_packs([pack])
 
1363
        packs._save_pack_names(clear_obsolete_packs=True,
 
1364
                               obsolete_packs=[pack])
 
1365
        cur_packs = packs._pack_transport.list_dir('.')
 
1366
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1367
        # Note that while we set clear_obsolete_packs=True, it should not
 
1368
        # delete a pack file that we have also scheduled for obsoletion.
 
1369
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1370
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1371
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1372
 
 
1373
    def test_pack_no_obsolete_packs_directory(self):
 
1374
        """Bug #314314, don't fail if obsolete_packs directory does
 
1375
        not exist."""
 
1376
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1377
        r.control_transport.rmdir('obsolete_packs')
 
1378
        packs._clear_obsolete_packs()
 
1379
 
1264
1380
 
1265
1381
class TestPack(TestCaseWithTransport):
1266
1382
    """Tests for the Pack object."""
1330
1446
            index_class=BTreeGraphIndex,
1331
1447
            use_chk_index=False)
1332
1448
        pack = pack_repo.NewPack(collection)
 
1449
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1333
1450
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1334
1451
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1335
1452
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1346
1463
    """Tests for the packs repository Packer class."""
1347
1464
 
1348
1465
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
 
1466
        builder = self.make_branch_builder('.', format="1.9")
1350
1467
        builder.start_series()
1351
1468
        builder.build_snapshot('A', None, [
1352
1469
            ('add', ('', 'root-id', 'directory', None)),
1365
1482
        # Because of how they were built, they correspond to
1366
1483
        # ['D', 'C', 'B', 'A']
1367
1484
        packs = b.repository._pack_collection.packs
1368
 
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1485
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1369
1486
                                  packs, 'testing',
1370
1487
                                  revision_ids=['B', 'C'])
1371
1488
        # Now, when we are copying the B & C revisions, their pack files should
1385
1502
        return repo._pack_collection
1386
1503
 
1387
1504
    def test_open_pack_will_optimise(self):
1388
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1505
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1389
1506
                                            [], '.test')
1390
1507
        new_pack = packer.open_pack()
 
1508
        self.addCleanup(new_pack.abort) # ensure cleanup
1391
1509
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1392
1510
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1393
1511
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1395
1513
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1396
1514
 
1397
1515
 
 
1516
class TestGCCHKPacker(TestCaseWithTransport):
 
1517
 
 
1518
    def make_abc_branch(self):
 
1519
        builder = self.make_branch_builder('source')
 
1520
        builder.start_series()
 
1521
        builder.build_snapshot('A', None, [
 
1522
            ('add', ('', 'root-id', 'directory', None)),
 
1523
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1524
            ])
 
1525
        builder.build_snapshot('B', ['A'], [
 
1526
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1527
        builder.build_snapshot('C', ['B'], [
 
1528
            ('modify', ('file-id', 'new content\n'))])
 
1529
        builder.finish_series()
 
1530
        return builder.get_branch()
 
1531
 
 
1532
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1533
        """a repo with separate packs for a revisions Revision and Inventory.
 
1534
 
 
1535
        There will be one pack file that holds the Revision content, and one
 
1536
        for the Inventory content.
 
1537
 
 
1538
        :return: (repository,
 
1539
                  pack_name_with_rev_A_Revision,
 
1540
                  pack_name_with_rev_A_Inventory,
 
1541
                  pack_name_with_rev_C_content)
 
1542
        """
 
1543
        b_source = self.make_abc_branch()
 
1544
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1545
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1546
        b_stacked.lock_write()
 
1547
        self.addCleanup(b_stacked.unlock)
 
1548
        b_stacked.fetch(b_source, 'B')
 
1549
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1550
        # fill in the A rev.
 
1551
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1552
        repo_not_stacked.lock_write()
 
1553
        self.addCleanup(repo_not_stacked.unlock)
 
1554
        # Now we should have a pack file with A's inventory, but not its
 
1555
        # Revision
 
1556
        self.assertEqual([('A',), ('B',)],
 
1557
                         sorted(repo_not_stacked.inventories.keys()))
 
1558
        self.assertEqual([('B',)],
 
1559
                         sorted(repo_not_stacked.revisions.keys()))
 
1560
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1561
        # We have a couple names here, figure out which has A's inventory
 
1562
        for name in stacked_pack_names:
 
1563
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1564
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1565
            if ('A',) in keys:
 
1566
                inv_a_pack_name = name
 
1567
                break
 
1568
        else:
 
1569
            self.fail('Could not find pack containing A\'s inventory')
 
1570
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1571
        self.assertEqual([('A',), ('B',)],
 
1572
                         sorted(repo_not_stacked.revisions.keys()))
 
1573
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1574
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1575
        self.assertEqual(1, len(rev_a_pack_names))
 
1576
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1577
        # Now fetch 'C', so we have a couple pack files to join
 
1578
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1579
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1580
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1581
        self.assertEqual(1, len(rev_c_pack_names))
 
1582
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1583
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1584
                rev_c_pack_name)
 
1585
 
 
1586
    def test_pack_with_distant_inventories(self):
 
1587
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1588
        # When repacking, it is possible to have an inventory in a different
 
1589
        # pack file than the associated revision. An autopack can then come
 
1590
        # along, and miss that inventory, and complain.
 
1591
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1592
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1593
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1594
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1595
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1596
                    [a_pack, c_pack], '.test-pack')
 
1597
        # This would raise ValueError in bug #437003, but should not raise an
 
1598
        # error once fixed.
 
1599
        packer.pack()
 
1600
 
 
1601
    def test_pack_with_missing_inventory(self):
 
1602
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1603
        # the A inventory to actually be gone from the repository.
 
1604
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1605
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1606
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1607
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1608
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1609
            repo._pack_collection.all_packs(), '.test-pack')
 
1610
        e = self.assertRaises(ValueError, packer.pack)
 
1611
        packer.new_pack.abort()
 
1612
        self.assertContainsRe(str(e),
 
1613
            r"We are missing inventories for revisions: .*'A'")
 
1614
 
 
1615
 
1398
1616
class TestCrossFormatPacks(TestCaseWithTransport):
1399
1617
 
1400
1618
    def log_pack(self, hint=None):
1415
1633
        self.addCleanup(target.unlock)
1416
1634
        source = source_tree.branch.repository._get_source(target._format)
1417
1635
        self.orig_pack = target.pack
1418
 
        target.pack = self.log_pack
 
1636
        self.overrideAttr(target, "pack", self.log_pack)
1419
1637
        search = target.search_missing_revision_ids(
1420
 
            source_tree.branch.repository, tip)
 
1638
            source_tree.branch.repository, revision_ids=[tip])
1421
1639
        stream = source.get_stream(search)
1422
1640
        from_format = source_tree.branch.repository._format
1423
1641
        sink = target._get_sink()
1439
1657
        self.addCleanup(target.unlock)
1440
1658
        source = source_tree.branch.repository
1441
1659
        self.orig_pack = target.pack
1442
 
        target.pack = self.log_pack
 
1660
        self.overrideAttr(target, "pack", self.log_pack)
1443
1661
        target.fetch(source)
1444
1662
        if expect_pack_called:
1445
1663
            self.assertLength(1, self.calls)
1473
1691
    def test_IDS_format_same_no(self):
1474
1692
        # When the formats are the same, pack is not called.
1475
1693
        self.run_fetch('2a', '2a', False)
 
1694
 
 
1695
 
 
1696
class Test_LazyListJoin(tests.TestCase):
 
1697
 
 
1698
    def test__repr__(self):
 
1699
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1700
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
 
1701
                         repr(lazy))
 
1702
 
 
1703
 
 
1704
class TestFeatures(tests.TestCaseWithTransport):
 
1705
 
 
1706
    def test_open_with_present_feature(self):
 
1707
        self.addCleanup(
 
1708
            repository.RepositoryFormatMetaDir.unregister_feature,
 
1709
            "makes-cheese-sandwich")
 
1710
        repository.RepositoryFormatMetaDir.register_feature(
 
1711
            "makes-cheese-sandwich")
 
1712
        repo = self.make_repository('.')
 
1713
        repo.lock_write()
 
1714
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1715
        repo._format.check_support_status(False)
 
1716
        repo.unlock()
 
1717
 
 
1718
    def test_open_with_missing_required_feature(self):
 
1719
        repo = self.make_repository('.')
 
1720
        repo.lock_write()
 
1721
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1722
        self.assertRaises(errors.MissingFeature,
 
1723
            repo._format.check_support_status, False)