~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2012-02-20 14:15:25 UTC
  • mto: (6471.1.4 iter-child-entries)
  • mto: This revision was merged to the branch mainline in revision 6472.
  • Revision ID: jelmer@samba.org-20120220141525-9azkfei62st8yc7w
Use inventories directly in fewer places.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
 
from bzrlib import symbol_versioning
29
27
import bzrlib
30
 
import bzrlib.bzrdir as bzrdir
31
 
import bzrlib.errors as errors
32
 
from bzrlib.errors import (NotBranchError,
33
 
                           NoSuchFile,
34
 
                           UnknownFormatError,
35
 
                           UnsupportedFormatError,
36
 
                           )
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from bzrlib import (
 
33
    btree_index,
 
34
    symbol_versioning,
 
35
    tests,
 
36
    transport,
 
37
    vf_search,
 
38
    )
 
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
40
from bzrlib.index import GraphIndex
37
41
from bzrlib.repository import RepositoryFormat
38
42
from bzrlib.tests import (
39
43
    TestCase,
40
44
    TestCaseWithTransport,
41
 
    test_knit,
42
45
    )
43
 
from bzrlib.transport import get_transport
44
 
from bzrlib.transport.memory import MemoryServer
45
 
from bzrlib.util import bencode
46
46
from bzrlib import (
 
47
    bzrdir,
 
48
    errors,
 
49
    inventory,
 
50
    osutils,
47
51
    repository,
 
52
    revision as _mod_revision,
48
53
    upgrade,
 
54
    versionedfile,
 
55
    vf_repository,
49
56
    workingtree,
50
57
    )
51
 
from bzrlib.repofmt import knitrepo, weaverepo
 
58
from bzrlib.repofmt import (
 
59
    groupcompress_repo,
 
60
    knitrepo,
 
61
    knitpack_repo,
 
62
    pack_repo,
 
63
    )
52
64
 
53
65
 
54
66
class TestDefaultFormat(TestCase):
56
68
    def test_get_set_default_format(self):
57
69
        old_default = bzrdir.format_registry.get('default')
58
70
        private_default = old_default().repository_format.__class__
59
 
        old_format = repository.RepositoryFormat.get_default_format()
 
71
        old_format = repository.format_registry.get_default()
60
72
        self.assertTrue(isinstance(old_format, private_default))
61
73
        def make_sample_bzrdir():
62
74
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
76
88
            bzrdir.format_registry.remove('default')
77
89
            bzrdir.format_registry.remove('sample')
78
90
            bzrdir.format_registry.register('default', old_default, '')
79
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
91
        self.assertIsInstance(repository.format_registry.get_default(),
80
92
                              old_format.__class__)
81
93
 
82
94
 
83
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
95
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
84
96
    """A sample format
85
97
 
86
 
    this format is initializable, unsupported to aid in testing the 
 
98
    this format is initializable, unsupported to aid in testing the
87
99
    open and open(unsupported=True) routines.
88
100
    """
89
101
 
90
 
    def get_format_string(self):
 
102
    @classmethod
 
103
    def get_format_string(cls):
91
104
        """See RepositoryFormat.get_format_string()."""
92
105
        return "Sample .bzr repository format."
93
106
 
104
117
        return "opened repository."
105
118
 
106
119
 
 
120
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
121
    """A sample format that can not be used in a metadir
 
122
 
 
123
    """
 
124
 
 
125
    def get_format_string(self):
 
126
        raise NotImplementedError
 
127
 
 
128
 
107
129
class TestRepositoryFormat(TestCaseWithTransport):
108
130
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
109
131
 
110
132
    def test_find_format(self):
111
133
        # is the right format object found for a repository?
112
134
        # create a branch with a few known format objects.
113
 
        # this is not quite the same as 
 
135
        # this is not quite the same as
114
136
        self.build_tree(["foo/", "bar/"])
115
137
        def check_format(format, url):
116
138
            dir = format._matchingbzrdir.initialize(url)
117
139
            format.initialize(dir)
118
 
            t = get_transport(url)
119
 
            found_format = repository.RepositoryFormat.find_format(dir)
120
 
            self.failUnless(isinstance(found_format, format.__class__))
121
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
122
 
        
 
140
            t = transport.get_transport_from_path(url)
 
141
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
 
142
            self.assertIsInstance(found_format, format.__class__)
 
143
        check_format(repository.format_registry.get_default(), "bar")
 
144
 
123
145
    def test_find_format_no_repository(self):
124
146
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
125
147
        self.assertRaises(errors.NoRepositoryPresent,
126
 
                          repository.RepositoryFormat.find_format,
 
148
                          repository.RepositoryFormatMetaDir.find_format,
127
149
                          dir)
128
150
 
 
151
    def test_from_string(self):
 
152
        self.assertIsInstance(
 
153
            SampleRepositoryFormat.from_string(
 
154
                "Sample .bzr repository format."),
 
155
            SampleRepositoryFormat)
 
156
        self.assertRaises(AssertionError,
 
157
            SampleRepositoryFormat.from_string,
 
158
                "Different .bzr repository format.")
 
159
 
129
160
    def test_find_format_unknown_format(self):
130
161
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
131
162
        SampleRepositoryFormat().initialize(dir)
132
163
        self.assertRaises(UnknownFormatError,
133
 
                          repository.RepositoryFormat.find_format,
 
164
                          repository.RepositoryFormatMetaDir.find_format,
134
165
                          dir)
135
166
 
 
167
    def test_find_format_with_features(self):
 
168
        tree = self.make_branch_and_tree('.', format='2a')
 
169
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
170
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
171
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
 
172
        self.assertEquals(found_format.features.get("name"), "necessity")
 
173
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
174
            True)
 
175
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
 
176
            "name")
 
177
        repository.RepositoryFormatMetaDir.register_feature("name")
 
178
        found_format.check_support_status(True)
 
179
 
136
180
    def test_register_unregister_format(self):
 
181
        # Test deprecated format registration functions
137
182
        format = SampleRepositoryFormat()
138
183
        # make a control dir
139
184
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
140
185
        # make a repo
141
186
        format.initialize(dir)
142
187
        # register a format for it.
143
 
        repository.RepositoryFormat.register_format(format)
 
188
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
189
            repository.RepositoryFormat.register_format, format)
144
190
        # which repository.Open will refuse (not supported)
145
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
191
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
 
192
            self.get_url())
146
193
        # but open(unsupported) will work
147
194
        self.assertEqual(format.open(dir), "opened repository.")
148
195
        # unregister the format
149
 
        repository.RepositoryFormat.unregister_format(format)
150
 
 
151
 
 
152
 
class TestFormat6(TestCaseWithTransport):
153
 
 
154
 
    def test_no_ancestry_weave(self):
155
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
156
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
157
 
        # We no longer need to create the ancestry.weave file
158
 
        # since it is *never* used.
159
 
        self.assertRaises(NoSuchFile,
160
 
                          control.transport.get,
161
 
                          'ancestry.weave')
162
 
 
163
 
 
164
 
class TestFormat7(TestCaseWithTransport):
165
 
    
166
 
    def test_disk_layout(self):
167
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
168
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
169
 
        # in case of side effects of locking.
170
 
        repo.lock_write()
171
 
        repo.unlock()
172
 
        # we want:
173
 
        # format 'Bazaar-NG Repository format 7'
174
 
        # lock ''
175
 
        # inventory.weave == empty_weave
176
 
        # empty revision-store directory
177
 
        # empty weaves directory
178
 
        t = control.get_repository_transport(None)
179
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
180
 
                             t.get('format').read())
181
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
182
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
183
 
        self.assertEqualDiff('# bzr weave file v5\n'
184
 
                             'w\n'
185
 
                             'W\n',
186
 
                             t.get('inventory.weave').read())
187
 
 
188
 
    def test_shared_disk_layout(self):
189
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
190
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
191
 
        # we want:
192
 
        # format 'Bazaar-NG Repository format 7'
193
 
        # inventory.weave == empty_weave
194
 
        # empty revision-store directory
195
 
        # empty weaves directory
196
 
        # a 'shared-storage' marker file.
197
 
        # lock is not present when unlocked
198
 
        t = control.get_repository_transport(None)
199
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
200
 
                             t.get('format').read())
201
 
        self.assertEqualDiff('', t.get('shared-storage').read())
202
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
203
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
204
 
        self.assertEqualDiff('# bzr weave file v5\n'
205
 
                             'w\n'
206
 
                             'W\n',
207
 
                             t.get('inventory.weave').read())
208
 
        self.assertFalse(t.has('branch-lock'))
209
 
 
210
 
    def test_creates_lockdir(self):
211
 
        """Make sure it appears to be controlled by a LockDir existence"""
212
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
213
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
214
 
        t = control.get_repository_transport(None)
215
 
        # TODO: Should check there is a 'lock' toplevel directory, 
216
 
        # regardless of contents
217
 
        self.assertFalse(t.has('lock/held/info'))
218
 
        repo.lock_write()
219
 
        try:
220
 
            self.assertTrue(t.has('lock/held/info'))
221
 
        finally:
222
 
            # unlock so we don't get a warning about failing to do so
223
 
            repo.unlock()
224
 
 
225
 
    def test_uses_lockdir(self):
226
 
        """repo format 7 actually locks on lockdir"""
227
 
        base_url = self.get_url()
228
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
229
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
230
 
        t = control.get_repository_transport(None)
231
 
        repo.lock_write()
232
 
        repo.unlock()
233
 
        del repo
234
 
        # make sure the same lock is created by opening it
235
 
        repo = repository.Repository.open(base_url)
236
 
        repo.lock_write()
237
 
        self.assertTrue(t.has('lock/held/info'))
238
 
        repo.unlock()
239
 
        self.assertFalse(t.has('lock/held/info'))
240
 
 
241
 
    def test_shared_no_tree_disk_layout(self):
242
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
243
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
244
 
        repo.set_make_working_trees(False)
245
 
        # we want:
246
 
        # format 'Bazaar-NG Repository format 7'
247
 
        # lock ''
248
 
        # inventory.weave == empty_weave
249
 
        # empty revision-store directory
250
 
        # empty weaves directory
251
 
        # a 'shared-storage' marker file.
252
 
        t = control.get_repository_transport(None)
253
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
254
 
                             t.get('format').read())
255
 
        ## self.assertEqualDiff('', t.get('lock').read())
256
 
        self.assertEqualDiff('', t.get('shared-storage').read())
257
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
258
 
        repo.set_make_working_trees(True)
259
 
        self.assertFalse(t.has('no-working-trees'))
260
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
261
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
262
 
        self.assertEqualDiff('# bzr weave file v5\n'
263
 
                             'w\n'
264
 
                             'W\n',
265
 
                             t.get('inventory.weave').read())
 
196
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
197
            repository.RepositoryFormat.unregister_format, format)
 
198
 
 
199
 
 
200
class TestRepositoryFormatRegistry(TestCase):
 
201
 
 
202
    def setUp(self):
 
203
        super(TestRepositoryFormatRegistry, self).setUp()
 
204
        self.registry = repository.RepositoryFormatRegistry()
 
205
 
 
206
    def test_register_unregister_format(self):
 
207
        format = SampleRepositoryFormat()
 
208
        self.registry.register(format)
 
209
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
 
210
        self.registry.remove(format)
 
211
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
212
 
 
213
    def test_get_all(self):
 
214
        format = SampleRepositoryFormat()
 
215
        self.assertEquals([], self.registry._get_all())
 
216
        self.registry.register(format)
 
217
        self.assertEquals([format], self.registry._get_all())
 
218
 
 
219
    def test_register_extra(self):
 
220
        format = SampleExtraRepositoryFormat()
 
221
        self.assertEquals([], self.registry._get_all())
 
222
        self.registry.register_extra(format)
 
223
        self.assertEquals([format], self.registry._get_all())
 
224
 
 
225
    def test_register_extra_lazy(self):
 
226
        self.assertEquals([], self.registry._get_all())
 
227
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
228
            "SampleExtraRepositoryFormat")
 
229
        formats = self.registry._get_all()
 
230
        self.assertEquals(1, len(formats))
 
231
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
266
232
 
267
233
 
268
234
class TestFormatKnit1(TestCaseWithTransport):
269
 
    
 
235
 
 
236
    def test_attribute__fetch_order(self):
 
237
        """Knits need topological data insertion."""
 
238
        repo = self.make_repository('.',
 
239
                format=bzrdir.format_registry.get('knit')())
 
240
        self.assertEqual('topological', repo._format._fetch_order)
 
241
 
 
242
    def test_attribute__fetch_uses_deltas(self):
 
243
        """Knits reuse deltas."""
 
244
        repo = self.make_repository('.',
 
245
                format=bzrdir.format_registry.get('knit')())
 
246
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
247
 
270
248
    def test_disk_layout(self):
271
249
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
272
250
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
286
264
        # self.assertEqualDiff('', t.get('lock').read())
287
265
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
288
266
        self.check_knits(t)
 
267
        # Check per-file knits.
 
268
        branch = control.create_branch()
 
269
        tree = control.create_workingtree()
 
270
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
271
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
272
        tree.commit('1st post', rev_id='foo')
 
273
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
274
            '\nfoo fulltext 0 81  :')
289
275
 
290
 
    def assertHasKnit(self, t, knit_name):
 
276
    def assertHasKnit(self, t, knit_name, extra_content=''):
291
277
        """Assert that knit_name exists on t."""
292
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
278
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
293
279
                             t.get(knit_name + '.kndx').read())
294
 
        # no default content
295
 
        self.assertTrue(t.has(knit_name + '.knit'))
296
280
 
297
281
    def check_knits(self, t):
298
282
        """check knit content for a repository."""
342
326
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
343
327
        self.check_knits(t)
344
328
 
345
 
 
346
 
class KnitRepositoryStreamTests(test_knit.KnitTests):
347
 
    """Tests for knitrepo._get_stream_as_bytes."""
348
 
 
349
 
    def test_get_stream_as_bytes(self):
350
 
        # Make a simple knit
351
 
        k1 = self.make_test_knit()
352
 
        k1.add_lines('text-a', [], test_knit.split_lines(test_knit.TEXT_1))
353
 
        
354
 
        # Serialise it, check the output.
355
 
        bytes = knitrepo._get_stream_as_bytes(k1, ['text-a'])
356
 
        data = bencode.bdecode(bytes)
357
 
        format, record = data
358
 
        self.assertEqual('knit-plain', format)
359
 
        self.assertEqual(['text-a', ['fulltext'], []], record[:3])
360
 
        self.assertRecordContentEqual(k1, 'text-a', record[3])
361
 
 
362
 
    def test_get_stream_as_bytes_all(self):
363
 
        """Get a serialised data stream for all the records in a knit.
364
 
 
365
 
        Much like test_get_stream_all, except for get_stream_as_bytes.
 
329
    def test_deserialise_sets_root_revision(self):
 
330
        """We must have a inventory.root.revision
 
331
 
 
332
        Old versions of the XML5 serializer did not set the revision_id for
 
333
        the whole inventory. So we grab the one from the expected text. Which
 
334
        is valid when the api is not being abused.
366
335
        """
367
 
        k1 = self.make_test_knit()
368
 
        # Insert the same data as BasicKnitTests.test_knit_join, as they seem
369
 
        # to cover a range of cases (no parents, one parent, multiple parents).
370
 
        test_data = [
371
 
            ('text-a', [], test_knit.TEXT_1),
372
 
            ('text-b', ['text-a'], test_knit.TEXT_1),
373
 
            ('text-c', [], test_knit.TEXT_1),
374
 
            ('text-d', ['text-c'], test_knit.TEXT_1),
375
 
            ('text-m', ['text-b', 'text-d'], test_knit.TEXT_1),
376
 
           ]
377
 
        expected_data_list = [
378
 
            # version, options, parents
379
 
            ('text-a', ['fulltext'], []),
380
 
            ('text-b', ['line-delta'], ['text-a']),
381
 
            ('text-c', ['fulltext'], []),
382
 
            ('text-d', ['line-delta'], ['text-c']),
383
 
            ('text-m', ['line-delta'], ['text-b', 'text-d']),
384
 
            ]
385
 
        for version_id, parents, lines in test_data:
386
 
            k1.add_lines(version_id, parents, test_knit.split_lines(lines))
387
 
 
388
 
        bytes = knitrepo._get_stream_as_bytes(
389
 
            k1, ['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
390
 
 
391
 
        data = bencode.bdecode(bytes)
392
 
        format = data.pop(0)
393
 
        self.assertEqual('knit-plain', format)
394
 
 
395
 
        for expected, actual in zip(expected_data_list, data):
396
 
            expected_version = expected[0]
397
 
            expected_options = expected[1]
398
 
            expected_parents = expected[2]
399
 
            version, options, parents, bytes = actual
400
 
            self.assertEqual(expected_version, version)
401
 
            self.assertEqual(expected_options, options)
402
 
            self.assertEqual(expected_parents, parents)
403
 
            self.assertRecordContentEqual(k1, version, bytes)
 
336
        repo = self.make_repository('.',
 
337
                format=bzrdir.format_registry.get('knit')())
 
338
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
339
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
340
        self.assertEqual('test-rev-id', inv.root.revision)
 
341
 
 
342
    def test_deserialise_uses_global_revision_id(self):
 
343
        """If it is set, then we re-use the global revision id"""
 
344
        repo = self.make_repository('.',
 
345
                format=bzrdir.format_registry.get('knit')())
 
346
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
347
                   '</inventory>\n')
 
348
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
349
        # raise an error, rather than silently using one revision_id over the
 
350
        # other.
 
351
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
352
            'test-rev-id', inv_xml)
 
353
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
354
        self.assertEqual('other-rev-id', inv.root.revision)
 
355
 
 
356
    def test_supports_external_lookups(self):
 
357
        repo = self.make_repository('.',
 
358
                format=bzrdir.format_registry.get('knit')())
 
359
        self.assertFalse(repo._format.supports_external_lookups)
404
360
 
405
361
 
406
362
class DummyRepository(object):
407
363
    """A dummy repository for testing."""
408
364
 
 
365
    _format = None
409
366
    _serializer = None
410
367
 
411
368
    def supports_rich_root(self):
 
369
        if self._format is not None:
 
370
            return self._format.rich_root_data
412
371
        return False
413
372
 
 
373
    def get_graph(self):
 
374
        raise NotImplementedError
 
375
 
 
376
    def get_parent_map(self, revision_ids):
 
377
        raise NotImplementedError
 
378
 
414
379
 
415
380
class InterDummy(repository.InterRepository):
416
381
    """An inter-repository optimised code path for DummyRepository.
417
382
 
418
383
    This is for use during testing where we use DummyRepository as repositories
419
384
    so that none of the default regsitered inter-repository classes will
420
 
    match.
 
385
    MATCH.
421
386
    """
422
387
 
423
388
    @staticmethod
424
389
    def is_compatible(repo_source, repo_target):
425
390
        """InterDummy is compatible with DummyRepository."""
426
 
        return (isinstance(repo_source, DummyRepository) and 
 
391
        return (isinstance(repo_source, DummyRepository) and
427
392
            isinstance(repo_target, DummyRepository))
428
393
 
429
394
 
437
402
        # classes do not barf inappropriately when a surprising repository type
438
403
        # is handed to them.
439
404
        dummy_a = DummyRepository()
 
405
        dummy_a._format = RepositoryFormat()
 
406
        dummy_a._format.supports_full_versioned_files = True
440
407
        dummy_b = DummyRepository()
 
408
        dummy_b._format = RepositoryFormat()
 
409
        dummy_b._format.supports_full_versioned_files = True
441
410
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
442
411
 
443
412
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
444
413
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
445
 
        
 
414
 
446
415
        The effective default is now InterSameDataRepository because there is
447
416
        no actual sane default in the presence of incompatible data models.
448
417
        """
449
418
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
450
 
        self.assertEqual(repository.InterSameDataRepository,
 
419
        self.assertEqual(vf_repository.InterSameDataRepository,
451
420
                         inter_repo.__class__)
452
421
        self.assertEqual(repo_a, inter_repo.source)
453
422
        self.assertEqual(repo_b, inter_repo.target)
459
428
        # pair that it returns true on for the is_compatible static method
460
429
        # check
461
430
        dummy_a = DummyRepository()
 
431
        dummy_a._format = RepositoryFormat()
462
432
        dummy_b = DummyRepository()
 
433
        dummy_b._format = RepositoryFormat()
463
434
        repo = self.make_repository('.')
464
435
        # hack dummies to look like repo somewhat.
465
436
        dummy_a._serializer = repo._serializer
 
437
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
438
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
439
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
466
440
        dummy_b._serializer = repo._serializer
 
441
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
442
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
443
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
467
444
        repository.InterRepository.register_optimiser(InterDummy)
468
445
        try:
469
446
            # we should get the default for something InterDummy returns False
482
459
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
483
460
 
484
461
 
485
 
class TestInterWeaveRepo(TestCaseWithTransport):
486
 
 
487
 
    def test_is_compatible_and_registered(self):
488
 
        # InterWeaveRepo is compatible when either side
489
 
        # is a format 5/6/7 branch
490
 
        from bzrlib.repofmt import knitrepo, weaverepo
491
 
        formats = [weaverepo.RepositoryFormat5(),
492
 
                   weaverepo.RepositoryFormat6(),
493
 
                   weaverepo.RepositoryFormat7()]
494
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
495
 
                                knitrepo.RepositoryFormatKnit1(),
496
 
                                ]
497
 
        repo_a = self.make_repository('a')
498
 
        repo_b = self.make_repository('b')
499
 
        is_compatible = repository.InterWeaveRepo.is_compatible
500
 
        for source in incompatible_formats:
501
 
            # force incompatible left then right
502
 
            repo_a._format = source
503
 
            repo_b._format = formats[0]
504
 
            self.assertFalse(is_compatible(repo_a, repo_b))
505
 
            self.assertFalse(is_compatible(repo_b, repo_a))
506
 
        for source in formats:
507
 
            repo_a._format = source
508
 
            for target in formats:
509
 
                repo_b._format = target
510
 
                self.assertTrue(is_compatible(repo_a, repo_b))
511
 
        self.assertEqual(repository.InterWeaveRepo,
512
 
                         repository.InterRepository.get(repo_a,
513
 
                                                        repo_b).__class__)
 
462
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
463
 
 
464
    @classmethod
 
465
    def get_format_string(cls):
 
466
        return "Test Format 1"
 
467
 
 
468
 
 
469
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
470
 
 
471
    @classmethod
 
472
    def get_format_string(cls):
 
473
        return "Test Format 2"
514
474
 
515
475
 
516
476
class TestRepositoryConverter(TestCaseWithTransport):
517
477
 
518
478
    def test_convert_empty(self):
519
 
        t = get_transport(self.get_url('.'))
 
479
        source_format = TestRepositoryFormat1()
 
480
        target_format = TestRepositoryFormat2()
 
481
        repository.format_registry.register(source_format)
 
482
        self.addCleanup(repository.format_registry.remove,
 
483
            source_format)
 
484
        repository.format_registry.register(target_format)
 
485
        self.addCleanup(repository.format_registry.remove,
 
486
            target_format)
 
487
        t = self.get_transport()
520
488
        t.mkdir('repository')
521
489
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
522
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
523
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
490
        repo = TestRepositoryFormat1().initialize(repo_dir)
524
491
        converter = repository.CopyConverter(target_format)
525
492
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
526
493
        try:
531
498
        self.assertTrue(isinstance(target_format, repo._format.__class__))
532
499
 
533
500
 
534
 
class TestMisc(TestCase):
535
 
    
536
 
    def test_unescape_xml(self):
537
 
        """We get some kind of error when malformed entities are passed"""
538
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
539
 
 
540
 
 
541
501
class TestRepositoryFormatKnit3(TestCaseWithTransport):
542
502
 
 
503
    def test_attribute__fetch_order(self):
 
504
        """Knits need topological data insertion."""
 
505
        format = bzrdir.BzrDirMetaFormat1()
 
506
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
507
        repo = self.make_repository('.', format=format)
 
508
        self.assertEqual('topological', repo._format._fetch_order)
 
509
 
 
510
    def test_attribute__fetch_uses_deltas(self):
 
511
        """Knits reuse deltas."""
 
512
        format = bzrdir.BzrDirMetaFormat1()
 
513
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
514
        repo = self.make_repository('.', format=format)
 
515
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
516
 
543
517
    def test_convert(self):
544
518
        """Ensure the upgrade adds weaves for roots"""
545
519
        format = bzrdir.BzrDirMetaFormat1()
547
521
        tree = self.make_branch_and_tree('.', format)
548
522
        tree.commit("Dull commit", rev_id="dull")
549
523
        revision_tree = tree.branch.repository.revision_tree('dull')
550
 
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
551
 
            revision_tree.inventory.root.file_id)
 
524
        revision_tree.lock_read()
 
525
        try:
 
526
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
527
                revision_tree.get_root_id())
 
528
        finally:
 
529
            revision_tree.unlock()
552
530
        format = bzrdir.BzrDirMetaFormat1()
553
531
        format.repository_format = knitrepo.RepositoryFormatKnit3()
554
532
        upgrade.Convert('.', format)
555
533
        tree = workingtree.WorkingTree.open('.')
556
534
        revision_tree = tree.branch.repository.revision_tree('dull')
557
 
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
535
        revision_tree.lock_read()
 
536
        try:
 
537
            revision_tree.get_file_lines(revision_tree.get_root_id())
 
538
        finally:
 
539
            revision_tree.unlock()
558
540
        tree.commit("Another dull commit", rev_id='dull2')
559
541
        revision_tree = tree.branch.repository.revision_tree('dull2')
560
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
561
 
 
 
542
        revision_tree.lock_read()
 
543
        self.addCleanup(revision_tree.unlock)
 
544
        self.assertEqual('dull',
 
545
                revision_tree.get_file_revision(revision_tree.get_root_id()))
 
546
 
 
547
    def test_supports_external_lookups(self):
 
548
        format = bzrdir.BzrDirMetaFormat1()
 
549
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
550
        repo = self.make_repository('.', format=format)
 
551
        self.assertFalse(repo._format.supports_external_lookups)
 
552
 
 
553
 
 
554
class Test2a(tests.TestCaseWithMemoryTransport):
 
555
 
 
556
    def test_chk_bytes_uses_custom_btree_parser(self):
 
557
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
558
        mt.lock_write()
 
559
        self.addCleanup(mt.unlock)
 
560
        mt.add([''], ['root-id'])
 
561
        mt.commit('first')
 
562
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
563
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
564
        # It should also work if we re-open the repo
 
565
        repo = mt.branch.repository.bzrdir.open_repository()
 
566
        repo.lock_read()
 
567
        self.addCleanup(repo.unlock)
 
568
        index = repo.chk_bytes._index._graph_index._indices[0]
 
569
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
570
 
 
571
    def test_fetch_combines_groups(self):
 
572
        builder = self.make_branch_builder('source', format='2a')
 
573
        builder.start_series()
 
574
        builder.build_snapshot('1', None, [
 
575
            ('add', ('', 'root-id', 'directory', '')),
 
576
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
577
        builder.build_snapshot('2', ['1'], [
 
578
            ('modify', ('file-id', 'content-2\n'))])
 
579
        builder.finish_series()
 
580
        source = builder.get_branch()
 
581
        target = self.make_repository('target', format='2a')
 
582
        target.fetch(source.repository)
 
583
        target.lock_read()
 
584
        self.addCleanup(target.unlock)
 
585
        details = target.texts._index.get_build_details(
 
586
            [('file-id', '1',), ('file-id', '2',)])
 
587
        file_1_details = details[('file-id', '1')]
 
588
        file_2_details = details[('file-id', '2')]
 
589
        # The index, and what to read off disk, should be the same for both
 
590
        # versions of the file.
 
591
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
592
 
 
593
    def test_fetch_combines_groups(self):
 
594
        builder = self.make_branch_builder('source', format='2a')
 
595
        builder.start_series()
 
596
        builder.build_snapshot('1', None, [
 
597
            ('add', ('', 'root-id', 'directory', '')),
 
598
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
599
        builder.build_snapshot('2', ['1'], [
 
600
            ('modify', ('file-id', 'content-2\n'))])
 
601
        builder.finish_series()
 
602
        source = builder.get_branch()
 
603
        target = self.make_repository('target', format='2a')
 
604
        target.fetch(source.repository)
 
605
        target.lock_read()
 
606
        self.addCleanup(target.unlock)
 
607
        details = target.texts._index.get_build_details(
 
608
            [('file-id', '1',), ('file-id', '2',)])
 
609
        file_1_details = details[('file-id', '1')]
 
610
        file_2_details = details[('file-id', '2')]
 
611
        # The index, and what to read off disk, should be the same for both
 
612
        # versions of the file.
 
613
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
614
 
 
615
    def test_fetch_combines_groups(self):
 
616
        builder = self.make_branch_builder('source', format='2a')
 
617
        builder.start_series()
 
618
        builder.build_snapshot('1', None, [
 
619
            ('add', ('', 'root-id', 'directory', '')),
 
620
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
621
        builder.build_snapshot('2', ['1'], [
 
622
            ('modify', ('file-id', 'content-2\n'))])
 
623
        builder.finish_series()
 
624
        source = builder.get_branch()
 
625
        target = self.make_repository('target', format='2a')
 
626
        target.fetch(source.repository)
 
627
        target.lock_read()
 
628
        self.addCleanup(target.unlock)
 
629
        details = target.texts._index.get_build_details(
 
630
            [('file-id', '1',), ('file-id', '2',)])
 
631
        file_1_details = details[('file-id', '1')]
 
632
        file_2_details = details[('file-id', '2')]
 
633
        # The index, and what to read off disk, should be the same for both
 
634
        # versions of the file.
 
635
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
636
 
 
637
    def test_format_pack_compresses_True(self):
 
638
        repo = self.make_repository('repo', format='2a')
 
639
        self.assertTrue(repo._format.pack_compresses)
 
640
 
 
641
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
642
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
643
        tree.lock_write()
 
644
        tree.add([''], ['TREE_ROOT'])
 
645
        revid = tree.commit("foo")
 
646
        tree.unlock()
 
647
        tree.lock_read()
 
648
        self.addCleanup(tree.unlock)
 
649
        inv = tree.branch.repository.get_inventory(revid)
 
650
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
651
        inv.parent_id_basename_to_file_id._ensure_root()
 
652
        inv.id_to_entry._ensure_root()
 
653
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
654
        self.assertEqual(65536,
 
655
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
656
 
 
657
    def test_autopack_unchanged_chk_nodes(self):
 
658
        # at 20 unchanged commits, chk pages are packed that are split into
 
659
        # two groups such that the new pack being made doesn't have all its
 
660
        # pages in the source packs (though they are in the repository).
 
661
        # Use a memory backed repository, we don't need to hit disk for this
 
662
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
663
        tree.lock_write()
 
664
        self.addCleanup(tree.unlock)
 
665
        tree.add([''], ['TREE_ROOT'])
 
666
        for pos in range(20):
 
667
            tree.commit(str(pos))
 
668
 
 
669
    def test_pack_with_hint(self):
 
670
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
671
        tree.lock_write()
 
672
        self.addCleanup(tree.unlock)
 
673
        tree.add([''], ['TREE_ROOT'])
 
674
        # 1 commit to leave untouched
 
675
        tree.commit('1')
 
676
        to_keep = tree.branch.repository._pack_collection.names()
 
677
        # 2 to combine
 
678
        tree.commit('2')
 
679
        tree.commit('3')
 
680
        all = tree.branch.repository._pack_collection.names()
 
681
        combine = list(set(all) - set(to_keep))
 
682
        self.assertLength(3, all)
 
683
        self.assertLength(2, combine)
 
684
        tree.branch.repository.pack(hint=combine)
 
685
        final = tree.branch.repository._pack_collection.names()
 
686
        self.assertLength(2, final)
 
687
        self.assertFalse(combine[0] in final)
 
688
        self.assertFalse(combine[1] in final)
 
689
        self.assertSubset(to_keep, final)
 
690
 
 
691
    def test_stream_source_to_gc(self):
 
692
        source = self.make_repository('source', format='2a')
 
693
        target = self.make_repository('target', format='2a')
 
694
        stream = source._get_source(target._format)
 
695
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
696
 
 
697
    def test_stream_source_to_non_gc(self):
 
698
        source = self.make_repository('source', format='2a')
 
699
        target = self.make_repository('target', format='rich-root-pack')
 
700
        stream = source._get_source(target._format)
 
701
        # We don't want the child GroupCHKStreamSource
 
702
        self.assertIs(type(stream), vf_repository.StreamSource)
 
703
 
 
704
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
705
        source_builder = self.make_branch_builder('source',
 
706
                            format='2a')
 
707
        # We have to build a fairly large tree, so that we are sure the chk
 
708
        # pages will have split into multiple pages.
 
709
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
710
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
711
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
712
                fname = i + j
 
713
                fid = fname + '-id'
 
714
                content = 'content for %s\n' % (fname,)
 
715
                entries.append(('add', (fname, fid, 'file', content)))
 
716
        source_builder.start_series()
 
717
        source_builder.build_snapshot('rev-1', None, entries)
 
718
        # Now change a few of them, so we get a few new pages for the second
 
719
        # revision
 
720
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
721
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
722
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
723
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
724
            ])
 
725
        source_builder.finish_series()
 
726
        source_branch = source_builder.get_branch()
 
727
        source_branch.lock_read()
 
728
        self.addCleanup(source_branch.unlock)
 
729
        target = self.make_repository('target', format='2a')
 
730
        source = source_branch.repository._get_source(target._format)
 
731
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
732
 
 
733
        # On a regular pass, getting the inventories and chk pages for rev-2
 
734
        # would only get the newly created chk pages
 
735
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
736
                                    set(['rev-2']))
 
737
        simple_chk_records = []
 
738
        for vf_name, substream in source.get_stream(search):
 
739
            if vf_name == 'chk_bytes':
 
740
                for record in substream:
 
741
                    simple_chk_records.append(record.key)
 
742
            else:
 
743
                for _ in substream:
 
744
                    continue
 
745
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
746
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
747
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
748
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
749
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
750
                         simple_chk_records)
 
751
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
752
        # we should get a much larger set of pages.
 
753
        missing = [('inventories', 'rev-2')]
 
754
        full_chk_records = []
 
755
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
756
            if vf_name == 'inventories':
 
757
                for record in substream:
 
758
                    self.assertEqual(('rev-2',), record.key)
 
759
            elif vf_name == 'chk_bytes':
 
760
                for record in substream:
 
761
                    full_chk_records.append(record.key)
 
762
            else:
 
763
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
764
        # We have 257 records now. This is because we have 1 root page, and 256
 
765
        # leaf pages in a complete listing.
 
766
        self.assertEqual(257, len(full_chk_records))
 
767
        self.assertSubset(simple_chk_records, full_chk_records)
 
768
 
 
769
    def test_inconsistency_fatal(self):
 
770
        repo = self.make_repository('repo', format='2a')
 
771
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
772
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
773
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
774
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
775
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
776
 
 
777
 
 
778
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
779
 
 
780
    def test_source_to_exact_pack_092(self):
 
781
        source = self.make_repository('source', format='pack-0.92')
 
782
        target = self.make_repository('target', format='pack-0.92')
 
783
        stream_source = source._get_source(target._format)
 
784
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
785
 
 
786
    def test_source_to_exact_pack_rich_root_pack(self):
 
787
        source = self.make_repository('source', format='rich-root-pack')
 
788
        target = self.make_repository('target', format='rich-root-pack')
 
789
        stream_source = source._get_source(target._format)
 
790
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
791
 
 
792
    def test_source_to_exact_pack_19(self):
 
793
        source = self.make_repository('source', format='1.9')
 
794
        target = self.make_repository('target', format='1.9')
 
795
        stream_source = source._get_source(target._format)
 
796
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
797
 
 
798
    def test_source_to_exact_pack_19_rich_root(self):
 
799
        source = self.make_repository('source', format='1.9-rich-root')
 
800
        target = self.make_repository('target', format='1.9-rich-root')
 
801
        stream_source = source._get_source(target._format)
 
802
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
803
 
 
804
    def test_source_to_remote_exact_pack_19(self):
 
805
        trans = self.make_smart_server('target')
 
806
        trans.ensure_base()
 
807
        source = self.make_repository('source', format='1.9')
 
808
        target = self.make_repository('target', format='1.9')
 
809
        target = repository.Repository.open(trans.base)
 
810
        stream_source = source._get_source(target._format)
 
811
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
812
 
 
813
    def test_stream_source_to_non_exact(self):
 
814
        source = self.make_repository('source', format='pack-0.92')
 
815
        target = self.make_repository('target', format='1.9')
 
816
        stream = source._get_source(target._format)
 
817
        self.assertIs(type(stream), vf_repository.StreamSource)
 
818
 
 
819
    def test_stream_source_to_non_exact_rich_root(self):
 
820
        source = self.make_repository('source', format='1.9')
 
821
        target = self.make_repository('target', format='1.9-rich-root')
 
822
        stream = source._get_source(target._format)
 
823
        self.assertIs(type(stream), vf_repository.StreamSource)
 
824
 
 
825
    def test_source_to_remote_non_exact_pack_19(self):
 
826
        trans = self.make_smart_server('target')
 
827
        trans.ensure_base()
 
828
        source = self.make_repository('source', format='1.9')
 
829
        target = self.make_repository('target', format='1.6')
 
830
        target = repository.Repository.open(trans.base)
 
831
        stream_source = source._get_source(target._format)
 
832
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
833
 
 
834
    def test_stream_source_to_knit(self):
 
835
        source = self.make_repository('source', format='pack-0.92')
 
836
        target = self.make_repository('target', format='dirstate')
 
837
        stream = source._get_source(target._format)
 
838
        self.assertIs(type(stream), vf_repository.StreamSource)
 
839
 
 
840
 
 
841
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
842
    """Tests for _find_parent_ids_of_revisions."""
 
843
 
 
844
    def setUp(self):
 
845
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
846
        self.builder = self.make_branch_builder('source')
 
847
        self.builder.start_series()
 
848
        self.builder.build_snapshot('initial', None,
 
849
            [('add', ('', 'tree-root', 'directory', None))])
 
850
        self.repo = self.builder.get_branch().repository
 
851
        self.addCleanup(self.builder.finish_series)
 
852
 
 
853
    def assertParentIds(self, expected_result, rev_set):
 
854
        self.assertEqual(sorted(expected_result),
 
855
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
856
 
 
857
    def test_simple(self):
 
858
        self.builder.build_snapshot('revid1', None, [])
 
859
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
860
        rev_set = ['revid2']
 
861
        self.assertParentIds(['revid1'], rev_set)
 
862
 
 
863
    def test_not_first_parent(self):
 
864
        self.builder.build_snapshot('revid1', None, [])
 
865
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
866
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
867
        rev_set = ['revid3', 'revid2']
 
868
        self.assertParentIds(['revid1'], rev_set)
 
869
 
 
870
    def test_not_null(self):
 
871
        rev_set = ['initial']
 
872
        self.assertParentIds([], rev_set)
 
873
 
 
874
    def test_not_null_set(self):
 
875
        self.builder.build_snapshot('revid1', None, [])
 
876
        rev_set = [_mod_revision.NULL_REVISION]
 
877
        self.assertParentIds([], rev_set)
 
878
 
 
879
    def test_ghost(self):
 
880
        self.builder.build_snapshot('revid1', None, [])
 
881
        rev_set = ['ghost', 'revid1']
 
882
        self.assertParentIds(['initial'], rev_set)
 
883
 
 
884
    def test_ghost_parent(self):
 
885
        self.builder.build_snapshot('revid1', None, [])
 
886
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
887
        rev_set = ['revid2', 'revid1']
 
888
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
889
 
 
890
    def test_righthand_parent(self):
 
891
        self.builder.build_snapshot('revid1', None, [])
 
892
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
893
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
894
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
895
        rev_set = ['revid3', 'revid2a']
 
896
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
897
 
 
898
 
 
899
class TestWithBrokenRepo(TestCaseWithTransport):
 
900
    """These tests seem to be more appropriate as interface tests?"""
 
901
 
 
902
    def make_broken_repository(self):
 
903
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
904
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
905
        # it does, this duplication should be removed.
 
906
        repo = self.make_repository('broken-repo')
 
907
        cleanups = []
 
908
        try:
 
909
            repo.lock_write()
 
910
            cleanups.append(repo.unlock)
 
911
            repo.start_write_group()
 
912
            cleanups.append(repo.commit_write_group)
 
913
            # make rev1a: A well-formed revision, containing 'file1'
 
914
            inv = inventory.Inventory(revision_id='rev1a')
 
915
            inv.root.revision = 'rev1a'
 
916
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
917
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
918
            repo.add_inventory('rev1a', inv, [])
 
919
            revision = _mod_revision.Revision('rev1a',
 
920
                committer='jrandom@example.com', timestamp=0,
 
921
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
922
            repo.add_revision('rev1a', revision, inv)
 
923
 
 
924
            # make rev1b, which has no Revision, but has an Inventory, and
 
925
            # file1
 
926
            inv = inventory.Inventory(revision_id='rev1b')
 
927
            inv.root.revision = 'rev1b'
 
928
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
929
            repo.add_inventory('rev1b', inv, [])
 
930
 
 
931
            # make rev2, with file1 and file2
 
932
            # file2 is sane
 
933
            # file1 has 'rev1b' as an ancestor, even though this is not
 
934
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
935
            inv = inventory.Inventory()
 
936
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
937
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
938
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
939
 
 
940
            # make ghost revision rev1c
 
941
            inv = inventory.Inventory()
 
942
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
943
 
 
944
            # make rev3 with file2
 
945
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
946
            # file2 cannot have rev1c as its ancestor.
 
947
            inv = inventory.Inventory()
 
948
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
949
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
950
            return repo
 
951
        finally:
 
952
            for cleanup in reversed(cleanups):
 
953
                cleanup()
 
954
 
 
955
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
956
        inv.revision_id = revision_id
 
957
        inv.root.revision = revision_id
 
958
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
959
        repo.add_inventory(revision_id, inv, parent_ids)
 
960
        revision = _mod_revision.Revision(revision_id,
 
961
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
962
            timezone=0, message='foo', parent_ids=parent_ids)
 
963
        repo.add_revision(revision_id, revision, inv)
 
964
 
 
965
    def add_file(self, repo, inv, filename, revision, parents):
 
966
        file_id = filename + '-id'
 
967
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
968
        entry.revision = revision
 
969
        entry.text_size = 0
 
970
        inv.add(entry)
 
971
        text_key = (file_id, revision)
 
972
        parent_keys = [(file_id, parent) for parent in parents]
 
973
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
974
 
 
975
    def test_insert_from_broken_repo(self):
 
976
        """Inserting a data stream from a broken repository won't silently
 
977
        corrupt the target repository.
 
978
        """
 
979
        broken_repo = self.make_broken_repository()
 
980
        empty_repo = self.make_repository('empty-repo')
 
981
        try:
 
982
            empty_repo.fetch(broken_repo)
 
983
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
984
            # Test successful: compression parent not being copied leads to
 
985
            # error.
 
986
            return
 
987
        empty_repo.lock_read()
 
988
        self.addCleanup(empty_repo.unlock)
 
989
        text = empty_repo.texts.get_record_stream(
 
990
            [('file2-id', 'rev3')], 'topological', True).next()
 
991
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
992
 
 
993
 
 
994
class TestRepositoryPackCollection(TestCaseWithTransport):
 
995
 
 
996
    def get_format(self):
 
997
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
998
 
 
999
    def get_packs(self):
 
1000
        format = self.get_format()
 
1001
        repo = self.make_repository('.', format=format)
 
1002
        return repo._pack_collection
 
1003
 
 
1004
    def make_packs_and_alt_repo(self, write_lock=False):
 
1005
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1006
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1007
        tree.lock_write()
 
1008
        self.addCleanup(tree.unlock)
 
1009
        rev1 = tree.commit('one')
 
1010
        rev2 = tree.commit('two')
 
1011
        rev3 = tree.commit('three')
 
1012
        r = repository.Repository.open('.')
 
1013
        if write_lock:
 
1014
            r.lock_write()
 
1015
        else:
 
1016
            r.lock_read()
 
1017
        self.addCleanup(r.unlock)
 
1018
        packs = r._pack_collection
 
1019
        packs.ensure_loaded()
 
1020
        return tree, r, packs, [rev1, rev2, rev3]
 
1021
 
 
1022
    def test__clear_obsolete_packs(self):
 
1023
        packs = self.get_packs()
 
1024
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1025
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1026
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1027
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1028
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1029
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1030
        res = packs._clear_obsolete_packs()
 
1031
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1032
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1033
 
 
1034
    def test__clear_obsolete_packs_preserve(self):
 
1035
        packs = self.get_packs()
 
1036
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1037
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1038
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1039
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1040
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1041
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1042
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1043
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1044
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1045
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1046
 
 
1047
    def test__max_pack_count(self):
 
1048
        """The maximum pack count is a function of the number of revisions."""
 
1049
        # no revisions - one pack, so that we can have a revision free repo
 
1050
        # without it blowing up
 
1051
        packs = self.get_packs()
 
1052
        self.assertEqual(1, packs._max_pack_count(0))
 
1053
        # after that the sum of the digits, - check the first 1-9
 
1054
        self.assertEqual(1, packs._max_pack_count(1))
 
1055
        self.assertEqual(2, packs._max_pack_count(2))
 
1056
        self.assertEqual(3, packs._max_pack_count(3))
 
1057
        self.assertEqual(4, packs._max_pack_count(4))
 
1058
        self.assertEqual(5, packs._max_pack_count(5))
 
1059
        self.assertEqual(6, packs._max_pack_count(6))
 
1060
        self.assertEqual(7, packs._max_pack_count(7))
 
1061
        self.assertEqual(8, packs._max_pack_count(8))
 
1062
        self.assertEqual(9, packs._max_pack_count(9))
 
1063
        # check the boundary cases with two digits for the next decade
 
1064
        self.assertEqual(1, packs._max_pack_count(10))
 
1065
        self.assertEqual(2, packs._max_pack_count(11))
 
1066
        self.assertEqual(10, packs._max_pack_count(19))
 
1067
        self.assertEqual(2, packs._max_pack_count(20))
 
1068
        self.assertEqual(3, packs._max_pack_count(21))
 
1069
        # check some arbitrary big numbers
 
1070
        self.assertEqual(25, packs._max_pack_count(112894))
 
1071
 
 
1072
    def test_repr(self):
 
1073
        packs = self.get_packs()
 
1074
        self.assertContainsRe(repr(packs),
 
1075
            'RepositoryPackCollection(.*Repository(.*))')
 
1076
 
 
1077
    def test__obsolete_packs(self):
 
1078
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1079
        names = packs.names()
 
1080
        pack = packs.get_pack_by_name(names[0])
 
1081
        # Schedule this one for removal
 
1082
        packs._remove_pack_from_memory(pack)
 
1083
        # Simulate a concurrent update by renaming the .pack file and one of
 
1084
        # the indices
 
1085
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1086
                               'obsolete_packs/%s.pack' % (names[0],))
 
1087
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1088
                               'obsolete_packs/%s.iix' % (names[0],))
 
1089
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1090
        # are still renamed
 
1091
        packs._obsolete_packs([pack])
 
1092
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1093
                         sorted(packs._pack_transport.list_dir('.')))
 
1094
        # names[0] should not be present in the index anymore
 
1095
        self.assertEqual(names[1:],
 
1096
            sorted(set([osutils.splitext(n)[0] for n in
 
1097
                        packs._index_transport.list_dir('.')])))
 
1098
 
 
1099
    def test__obsolete_packs_missing_directory(self):
 
1100
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1101
        r.control_transport.rmdir('obsolete_packs')
 
1102
        names = packs.names()
 
1103
        pack = packs.get_pack_by_name(names[0])
 
1104
        # Schedule this one for removal
 
1105
        packs._remove_pack_from_memory(pack)
 
1106
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1107
        # are still renamed
 
1108
        packs._obsolete_packs([pack])
 
1109
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1110
                         sorted(packs._pack_transport.list_dir('.')))
 
1111
        # names[0] should not be present in the index anymore
 
1112
        self.assertEqual(names[1:],
 
1113
            sorted(set([osutils.splitext(n)[0] for n in
 
1114
                        packs._index_transport.list_dir('.')])))
 
1115
 
 
1116
    def test_pack_distribution_zero(self):
 
1117
        packs = self.get_packs()
 
1118
        self.assertEqual([0], packs.pack_distribution(0))
 
1119
 
 
1120
    def test_ensure_loaded_unlocked(self):
 
1121
        packs = self.get_packs()
 
1122
        self.assertRaises(errors.ObjectNotLocked,
 
1123
                          packs.ensure_loaded)
 
1124
 
 
1125
    def test_pack_distribution_one_to_nine(self):
 
1126
        packs = self.get_packs()
 
1127
        self.assertEqual([1],
 
1128
            packs.pack_distribution(1))
 
1129
        self.assertEqual([1, 1],
 
1130
            packs.pack_distribution(2))
 
1131
        self.assertEqual([1, 1, 1],
 
1132
            packs.pack_distribution(3))
 
1133
        self.assertEqual([1, 1, 1, 1],
 
1134
            packs.pack_distribution(4))
 
1135
        self.assertEqual([1, 1, 1, 1, 1],
 
1136
            packs.pack_distribution(5))
 
1137
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1138
            packs.pack_distribution(6))
 
1139
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1140
            packs.pack_distribution(7))
 
1141
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1142
            packs.pack_distribution(8))
 
1143
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1144
            packs.pack_distribution(9))
 
1145
 
 
1146
    def test_pack_distribution_stable_at_boundaries(self):
 
1147
        """When there are multi-rev packs the counts are stable."""
 
1148
        packs = self.get_packs()
 
1149
        # in 10s:
 
1150
        self.assertEqual([10], packs.pack_distribution(10))
 
1151
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1152
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1153
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1154
        # 100s
 
1155
        self.assertEqual([100], packs.pack_distribution(100))
 
1156
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1157
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1158
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1159
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1160
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1161
 
 
1162
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1163
        packs = self.get_packs()
 
1164
        existing_packs = [(2000, "big"), (9, "medium")]
 
1165
        # rev count - 2009 -> 2x1000 + 9x1
 
1166
        pack_operations = packs.plan_autopack_combinations(
 
1167
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1168
        self.assertEqual([], pack_operations)
 
1169
 
 
1170
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1171
        packs = self.get_packs()
 
1172
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1173
        # rev count - 2010 -> 2x1000 + 1x10
 
1174
        pack_operations = packs.plan_autopack_combinations(
 
1175
            existing_packs, [1000, 1000, 10])
 
1176
        self.assertEqual([], pack_operations)
 
1177
 
 
1178
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1179
        packs = self.get_packs()
 
1180
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1181
            (1, "single1")]
 
1182
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1183
        pack_operations = packs.plan_autopack_combinations(
 
1184
            existing_packs, [1000, 1000, 10])
 
1185
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1186
 
 
1187
    def test_plan_pack_operations_creates_a_single_op(self):
 
1188
        packs = self.get_packs()
 
1189
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1190
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1191
        # rev count 150 -> 1x100 and 5x10
 
1192
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1193
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1194
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1195
        # we save a pack file with no increased I/O by putting them into the
 
1196
        # same file.
 
1197
        distribution = packs.pack_distribution(150)
 
1198
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1199
                                                           distribution)
 
1200
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1201
 
 
1202
    def test_all_packs_none(self):
 
1203
        format = self.get_format()
 
1204
        tree = self.make_branch_and_tree('.', format=format)
 
1205
        tree.lock_read()
 
1206
        self.addCleanup(tree.unlock)
 
1207
        packs = tree.branch.repository._pack_collection
 
1208
        packs.ensure_loaded()
 
1209
        self.assertEqual([], packs.all_packs())
 
1210
 
 
1211
    def test_all_packs_one(self):
 
1212
        format = self.get_format()
 
1213
        tree = self.make_branch_and_tree('.', format=format)
 
1214
        tree.commit('start')
 
1215
        tree.lock_read()
 
1216
        self.addCleanup(tree.unlock)
 
1217
        packs = tree.branch.repository._pack_collection
 
1218
        packs.ensure_loaded()
 
1219
        self.assertEqual([
 
1220
            packs.get_pack_by_name(packs.names()[0])],
 
1221
            packs.all_packs())
 
1222
 
 
1223
    def test_all_packs_two(self):
 
1224
        format = self.get_format()
 
1225
        tree = self.make_branch_and_tree('.', format=format)
 
1226
        tree.commit('start')
 
1227
        tree.commit('continue')
 
1228
        tree.lock_read()
 
1229
        self.addCleanup(tree.unlock)
 
1230
        packs = tree.branch.repository._pack_collection
 
1231
        packs.ensure_loaded()
 
1232
        self.assertEqual([
 
1233
            packs.get_pack_by_name(packs.names()[0]),
 
1234
            packs.get_pack_by_name(packs.names()[1]),
 
1235
            ], packs.all_packs())
 
1236
 
 
1237
    def test_get_pack_by_name(self):
 
1238
        format = self.get_format()
 
1239
        tree = self.make_branch_and_tree('.', format=format)
 
1240
        tree.commit('start')
 
1241
        tree.lock_read()
 
1242
        self.addCleanup(tree.unlock)
 
1243
        packs = tree.branch.repository._pack_collection
 
1244
        packs.reset()
 
1245
        packs.ensure_loaded()
 
1246
        name = packs.names()[0]
 
1247
        pack_1 = packs.get_pack_by_name(name)
 
1248
        # the pack should be correctly initialised
 
1249
        sizes = packs._names[name]
 
1250
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1251
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1252
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1253
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1254
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1255
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1256
        # and the same instance should be returned on successive calls.
 
1257
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1258
 
 
1259
    def test_reload_pack_names_new_entry(self):
 
1260
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1261
        names = packs.names()
 
1262
        # Add a new pack file into the repository
 
1263
        rev4 = tree.commit('four')
 
1264
        new_names = tree.branch.repository._pack_collection.names()
 
1265
        new_name = set(new_names).difference(names)
 
1266
        self.assertEqual(1, len(new_name))
 
1267
        new_name = new_name.pop()
 
1268
        # The old collection hasn't noticed yet
 
1269
        self.assertEqual(names, packs.names())
 
1270
        self.assertTrue(packs.reload_pack_names())
 
1271
        self.assertEqual(new_names, packs.names())
 
1272
        # And the repository can access the new revision
 
1273
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1274
        self.assertFalse(packs.reload_pack_names())
 
1275
 
 
1276
    def test_reload_pack_names_added_and_removed(self):
 
1277
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1278
        names = packs.names()
 
1279
        # Now repack the whole thing
 
1280
        tree.branch.repository.pack()
 
1281
        new_names = tree.branch.repository._pack_collection.names()
 
1282
        # The other collection hasn't noticed yet
 
1283
        self.assertEqual(names, packs.names())
 
1284
        self.assertTrue(packs.reload_pack_names())
 
1285
        self.assertEqual(new_names, packs.names())
 
1286
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1287
        self.assertFalse(packs.reload_pack_names())
 
1288
 
 
1289
    def test_reload_pack_names_preserves_pending(self):
 
1290
        # TODO: Update this to also test for pending-deleted names
 
1291
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1292
        # We will add one pack (via start_write_group + insert_record_stream),
 
1293
        # and remove another pack (via _remove_pack_from_memory)
 
1294
        orig_names = packs.names()
 
1295
        orig_at_load = packs._packs_at_load
 
1296
        to_remove_name = iter(orig_names).next()
 
1297
        r.start_write_group()
 
1298
        self.addCleanup(r.abort_write_group)
 
1299
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1300
            ('text', 'rev'), (), None, 'content\n')])
 
1301
        new_pack = packs._new_pack
 
1302
        self.assertTrue(new_pack.data_inserted())
 
1303
        new_pack.finish()
 
1304
        packs.allocate(new_pack)
 
1305
        packs._new_pack = None
 
1306
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1307
        packs._remove_pack_from_memory(removed_pack)
 
1308
        names = packs.names()
 
1309
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1310
        new_names = set([x[0][0] for x in new_nodes])
 
1311
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1312
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1313
        self.assertEqual(set([new_pack.name]), new_names)
 
1314
        self.assertEqual([to_remove_name],
 
1315
                         sorted([x[0][0] for x in deleted_nodes]))
 
1316
        packs.reload_pack_names()
 
1317
        reloaded_names = packs.names()
 
1318
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1319
        self.assertEqual(names, reloaded_names)
 
1320
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1321
        new_names = set([x[0][0] for x in new_nodes])
 
1322
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1323
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1324
        self.assertEqual(set([new_pack.name]), new_names)
 
1325
        self.assertEqual([to_remove_name],
 
1326
                         sorted([x[0][0] for x in deleted_nodes]))
 
1327
 
 
1328
    def test_autopack_obsoletes_new_pack(self):
 
1329
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1330
        packs._max_pack_count = lambda x: 1
 
1331
        packs.pack_distribution = lambda x: [10]
 
1332
        r.start_write_group()
 
1333
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1334
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1335
        # This should trigger an autopack, which will combine everything into a
 
1336
        # single pack file.
 
1337
        new_names = r.commit_write_group()
 
1338
        names = packs.names()
 
1339
        self.assertEqual(1, len(names))
 
1340
        self.assertEqual([names[0] + '.pack'],
 
1341
                         packs._pack_transport.list_dir('.'))
 
1342
 
 
1343
    def test_autopack_reloads_and_stops(self):
 
1344
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1345
        # After we have determined what needs to be autopacked, trigger a
 
1346
        # full-pack via the other repo which will cause us to re-evaluate and
 
1347
        # decide we don't need to do anything
 
1348
        orig_execute = packs._execute_pack_operations
 
1349
        def _munged_execute_pack_ops(*args, **kwargs):
 
1350
            tree.branch.repository.pack()
 
1351
            return orig_execute(*args, **kwargs)
 
1352
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1353
        packs._max_pack_count = lambda x: 1
 
1354
        packs.pack_distribution = lambda x: [10]
 
1355
        self.assertFalse(packs.autopack())
 
1356
        self.assertEqual(1, len(packs.names()))
 
1357
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1358
                         packs.names())
 
1359
 
 
1360
    def test__save_pack_names(self):
 
1361
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1362
        names = packs.names()
 
1363
        pack = packs.get_pack_by_name(names[0])
 
1364
        packs._remove_pack_from_memory(pack)
 
1365
        packs._save_pack_names(obsolete_packs=[pack])
 
1366
        cur_packs = packs._pack_transport.list_dir('.')
 
1367
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1368
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1369
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1370
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1371
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1372
 
 
1373
    def test__save_pack_names_already_obsoleted(self):
 
1374
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1375
        names = packs.names()
 
1376
        pack = packs.get_pack_by_name(names[0])
 
1377
        packs._remove_pack_from_memory(pack)
 
1378
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1379
        # the pack directly.
 
1380
        packs._obsolete_packs([pack])
 
1381
        packs._save_pack_names(clear_obsolete_packs=True,
 
1382
                               obsolete_packs=[pack])
 
1383
        cur_packs = packs._pack_transport.list_dir('.')
 
1384
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1385
        # Note that while we set clear_obsolete_packs=True, it should not
 
1386
        # delete a pack file that we have also scheduled for obsoletion.
 
1387
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1388
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1389
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1390
 
 
1391
    def test_pack_no_obsolete_packs_directory(self):
 
1392
        """Bug #314314, don't fail if obsolete_packs directory does
 
1393
        not exist."""
 
1394
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1395
        r.control_transport.rmdir('obsolete_packs')
 
1396
        packs._clear_obsolete_packs()
 
1397
 
 
1398
 
 
1399
class TestPack(TestCaseWithTransport):
 
1400
    """Tests for the Pack object."""
 
1401
 
 
1402
    def assertCurrentlyEqual(self, left, right):
 
1403
        self.assertTrue(left == right)
 
1404
        self.assertTrue(right == left)
 
1405
        self.assertFalse(left != right)
 
1406
        self.assertFalse(right != left)
 
1407
 
 
1408
    def assertCurrentlyNotEqual(self, left, right):
 
1409
        self.assertFalse(left == right)
 
1410
        self.assertFalse(right == left)
 
1411
        self.assertTrue(left != right)
 
1412
        self.assertTrue(right != left)
 
1413
 
 
1414
    def test___eq____ne__(self):
 
1415
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1416
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1417
        self.assertCurrentlyEqual(left, right)
 
1418
        # change all attributes and ensure equality changes as we do.
 
1419
        left.revision_index = 'a'
 
1420
        self.assertCurrentlyNotEqual(left, right)
 
1421
        right.revision_index = 'a'
 
1422
        self.assertCurrentlyEqual(left, right)
 
1423
        left.inventory_index = 'a'
 
1424
        self.assertCurrentlyNotEqual(left, right)
 
1425
        right.inventory_index = 'a'
 
1426
        self.assertCurrentlyEqual(left, right)
 
1427
        left.text_index = 'a'
 
1428
        self.assertCurrentlyNotEqual(left, right)
 
1429
        right.text_index = 'a'
 
1430
        self.assertCurrentlyEqual(left, right)
 
1431
        left.signature_index = 'a'
 
1432
        self.assertCurrentlyNotEqual(left, right)
 
1433
        right.signature_index = 'a'
 
1434
        self.assertCurrentlyEqual(left, right)
 
1435
        left.name = 'a'
 
1436
        self.assertCurrentlyNotEqual(left, right)
 
1437
        right.name = 'a'
 
1438
        self.assertCurrentlyEqual(left, right)
 
1439
        left.transport = 'a'
 
1440
        self.assertCurrentlyNotEqual(left, right)
 
1441
        right.transport = 'a'
 
1442
        self.assertCurrentlyEqual(left, right)
 
1443
 
 
1444
    def test_file_name(self):
 
1445
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1446
        self.assertEqual('a_name.pack', pack.file_name())
 
1447
 
 
1448
 
 
1449
class TestNewPack(TestCaseWithTransport):
 
1450
    """Tests for pack_repo.NewPack."""
 
1451
 
 
1452
    def test_new_instance_attributes(self):
 
1453
        upload_transport = self.get_transport('upload')
 
1454
        pack_transport = self.get_transport('pack')
 
1455
        index_transport = self.get_transport('index')
 
1456
        upload_transport.mkdir('.')
 
1457
        collection = pack_repo.RepositoryPackCollection(
 
1458
            repo=None,
 
1459
            transport=self.get_transport('.'),
 
1460
            index_transport=index_transport,
 
1461
            upload_transport=upload_transport,
 
1462
            pack_transport=pack_transport,
 
1463
            index_builder_class=BTreeBuilder,
 
1464
            index_class=BTreeGraphIndex,
 
1465
            use_chk_index=False)
 
1466
        pack = pack_repo.NewPack(collection)
 
1467
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1468
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1469
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1470
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1471
        self.assertTrue(pack.upload_transport is upload_transport)
 
1472
        self.assertTrue(pack.index_transport is index_transport)
 
1473
        self.assertTrue(pack.pack_transport is pack_transport)
 
1474
        self.assertEqual(None, pack.index_sizes)
 
1475
        self.assertEqual(20, len(pack.random_name))
 
1476
        self.assertIsInstance(pack.random_name, str)
 
1477
        self.assertIsInstance(pack.start_time, float)
 
1478
 
 
1479
 
 
1480
class TestPacker(TestCaseWithTransport):
 
1481
    """Tests for the packs repository Packer class."""
 
1482
 
 
1483
    def test_pack_optimizes_pack_order(self):
 
1484
        builder = self.make_branch_builder('.', format="1.9")
 
1485
        builder.start_series()
 
1486
        builder.build_snapshot('A', None, [
 
1487
            ('add', ('', 'root-id', 'directory', None)),
 
1488
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1489
        builder.build_snapshot('B', ['A'],
 
1490
            [('modify', ('f-id', 'new-content\n'))])
 
1491
        builder.build_snapshot('C', ['B'],
 
1492
            [('modify', ('f-id', 'third-content\n'))])
 
1493
        builder.build_snapshot('D', ['C'],
 
1494
            [('modify', ('f-id', 'fourth-content\n'))])
 
1495
        b = builder.get_branch()
 
1496
        b.lock_read()
 
1497
        builder.finish_series()
 
1498
        self.addCleanup(b.unlock)
 
1499
        # At this point, we should have 4 pack files available
 
1500
        # Because of how they were built, they correspond to
 
1501
        # ['D', 'C', 'B', 'A']
 
1502
        packs = b.repository._pack_collection.packs
 
1503
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1504
                                  packs, 'testing',
 
1505
                                  revision_ids=['B', 'C'])
 
1506
        # Now, when we are copying the B & C revisions, their pack files should
 
1507
        # be moved to the front of the stack
 
1508
        # The new ordering moves B & C to the front of the .packs attribute,
 
1509
        # and leaves the others in the original order.
 
1510
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1511
        new_pack = packer.pack()
 
1512
        self.assertEqual(new_packs, packer.packs)
 
1513
 
 
1514
 
 
1515
class TestOptimisingPacker(TestCaseWithTransport):
 
1516
    """Tests for the OptimisingPacker class."""
 
1517
 
 
1518
    def get_pack_collection(self):
 
1519
        repo = self.make_repository('.')
 
1520
        return repo._pack_collection
 
1521
 
 
1522
    def test_open_pack_will_optimise(self):
 
1523
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1524
                                            [], '.test')
 
1525
        new_pack = packer.open_pack()
 
1526
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1527
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1528
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1529
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1530
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1531
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1532
 
 
1533
 
 
1534
class TestGCCHKPacker(TestCaseWithTransport):
 
1535
 
 
1536
    def make_abc_branch(self):
 
1537
        builder = self.make_branch_builder('source')
 
1538
        builder.start_series()
 
1539
        builder.build_snapshot('A', None, [
 
1540
            ('add', ('', 'root-id', 'directory', None)),
 
1541
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1542
            ])
 
1543
        builder.build_snapshot('B', ['A'], [
 
1544
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1545
        builder.build_snapshot('C', ['B'], [
 
1546
            ('modify', ('file-id', 'new content\n'))])
 
1547
        builder.finish_series()
 
1548
        return builder.get_branch()
 
1549
 
 
1550
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1551
        """a repo with separate packs for a revisions Revision and Inventory.
 
1552
 
 
1553
        There will be one pack file that holds the Revision content, and one
 
1554
        for the Inventory content.
 
1555
 
 
1556
        :return: (repository,
 
1557
                  pack_name_with_rev_A_Revision,
 
1558
                  pack_name_with_rev_A_Inventory,
 
1559
                  pack_name_with_rev_C_content)
 
1560
        """
 
1561
        b_source = self.make_abc_branch()
 
1562
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1563
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1564
        b_stacked.lock_write()
 
1565
        self.addCleanup(b_stacked.unlock)
 
1566
        b_stacked.fetch(b_source, 'B')
 
1567
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1568
        # fill in the A rev.
 
1569
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1570
        repo_not_stacked.lock_write()
 
1571
        self.addCleanup(repo_not_stacked.unlock)
 
1572
        # Now we should have a pack file with A's inventory, but not its
 
1573
        # Revision
 
1574
        self.assertEqual([('A',), ('B',)],
 
1575
                         sorted(repo_not_stacked.inventories.keys()))
 
1576
        self.assertEqual([('B',)],
 
1577
                         sorted(repo_not_stacked.revisions.keys()))
 
1578
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1579
        # We have a couple names here, figure out which has A's inventory
 
1580
        for name in stacked_pack_names:
 
1581
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1582
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1583
            if ('A',) in keys:
 
1584
                inv_a_pack_name = name
 
1585
                break
 
1586
        else:
 
1587
            self.fail('Could not find pack containing A\'s inventory')
 
1588
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1589
        self.assertEqual([('A',), ('B',)],
 
1590
                         sorted(repo_not_stacked.revisions.keys()))
 
1591
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1592
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1593
        self.assertEqual(1, len(rev_a_pack_names))
 
1594
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1595
        # Now fetch 'C', so we have a couple pack files to join
 
1596
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1597
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1598
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1599
        self.assertEqual(1, len(rev_c_pack_names))
 
1600
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1601
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1602
                rev_c_pack_name)
 
1603
 
 
1604
    def test_pack_with_distant_inventories(self):
 
1605
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1606
        # When repacking, it is possible to have an inventory in a different
 
1607
        # pack file than the associated revision. An autopack can then come
 
1608
        # along, and miss that inventory, and complain.
 
1609
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1610
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1611
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1612
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1613
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1614
                    [a_pack, c_pack], '.test-pack')
 
1615
        # This would raise ValueError in bug #437003, but should not raise an
 
1616
        # error once fixed.
 
1617
        packer.pack()
 
1618
 
 
1619
    def test_pack_with_missing_inventory(self):
 
1620
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1621
        # the A inventory to actually be gone from the repository.
 
1622
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1623
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1624
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1625
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1626
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1627
            repo._pack_collection.all_packs(), '.test-pack')
 
1628
        e = self.assertRaises(ValueError, packer.pack)
 
1629
        packer.new_pack.abort()
 
1630
        self.assertContainsRe(str(e),
 
1631
            r"We are missing inventories for revisions: .*'A'")
 
1632
 
 
1633
 
 
1634
class TestCrossFormatPacks(TestCaseWithTransport):
 
1635
 
 
1636
    def log_pack(self, hint=None):
 
1637
        self.calls.append(('pack', hint))
 
1638
        self.orig_pack(hint=hint)
 
1639
        if self.expect_hint:
 
1640
            self.assertTrue(hint)
 
1641
 
 
1642
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1643
        self.expect_hint = expect_pack_called
 
1644
        self.calls = []
 
1645
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1646
        source_tree.lock_write()
 
1647
        self.addCleanup(source_tree.unlock)
 
1648
        tip = source_tree.commit('foo')
 
1649
        target = self.make_repository('target', format=target_fmt)
 
1650
        target.lock_write()
 
1651
        self.addCleanup(target.unlock)
 
1652
        source = source_tree.branch.repository._get_source(target._format)
 
1653
        self.orig_pack = target.pack
 
1654
        self.overrideAttr(target, "pack", self.log_pack)
 
1655
        search = target.search_missing_revision_ids(
 
1656
            source_tree.branch.repository, revision_ids=[tip])
 
1657
        stream = source.get_stream(search)
 
1658
        from_format = source_tree.branch.repository._format
 
1659
        sink = target._get_sink()
 
1660
        sink.insert_stream(stream, from_format, [])
 
1661
        if expect_pack_called:
 
1662
            self.assertLength(1, self.calls)
 
1663
        else:
 
1664
            self.assertLength(0, self.calls)
 
1665
 
 
1666
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1667
        self.expect_hint = expect_pack_called
 
1668
        self.calls = []
 
1669
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1670
        source_tree.lock_write()
 
1671
        self.addCleanup(source_tree.unlock)
 
1672
        tip = source_tree.commit('foo')
 
1673
        target = self.make_repository('target', format=target_fmt)
 
1674
        target.lock_write()
 
1675
        self.addCleanup(target.unlock)
 
1676
        source = source_tree.branch.repository
 
1677
        self.orig_pack = target.pack
 
1678
        self.overrideAttr(target, "pack", self.log_pack)
 
1679
        target.fetch(source)
 
1680
        if expect_pack_called:
 
1681
            self.assertLength(1, self.calls)
 
1682
        else:
 
1683
            self.assertLength(0, self.calls)
 
1684
 
 
1685
    def test_sink_format_hint_no(self):
 
1686
        # When the target format says packing makes no difference, pack is not
 
1687
        # called.
 
1688
        self.run_stream('1.9', 'rich-root-pack', False)
 
1689
 
 
1690
    def test_sink_format_hint_yes(self):
 
1691
        # When the target format says packing makes a difference, pack is
 
1692
        # called.
 
1693
        self.run_stream('1.9', '2a', True)
 
1694
 
 
1695
    def test_sink_format_same_no(self):
 
1696
        # When the formats are the same, pack is not called.
 
1697
        self.run_stream('2a', '2a', False)
 
1698
 
 
1699
    def test_IDS_format_hint_no(self):
 
1700
        # When the target format says packing makes no difference, pack is not
 
1701
        # called.
 
1702
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1703
 
 
1704
    def test_IDS_format_hint_yes(self):
 
1705
        # When the target format says packing makes a difference, pack is
 
1706
        # called.
 
1707
        self.run_fetch('1.9', '2a', True)
 
1708
 
 
1709
    def test_IDS_format_same_no(self):
 
1710
        # When the formats are the same, pack is not called.
 
1711
        self.run_fetch('2a', '2a', False)
 
1712
 
 
1713
 
 
1714
class Test_LazyListJoin(tests.TestCase):
 
1715
 
 
1716
    def test__repr__(self):
 
1717
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1718
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
 
1719
                         repr(lazy))
 
1720
 
 
1721
 
 
1722
class TestFeatures(tests.TestCaseWithTransport):
 
1723
 
 
1724
    def test_open_with_present_feature(self):
 
1725
        self.addCleanup(
 
1726
            repository.RepositoryFormatMetaDir.unregister_feature,
 
1727
            "makes-cheese-sandwich")
 
1728
        repository.RepositoryFormatMetaDir.register_feature(
 
1729
            "makes-cheese-sandwich")
 
1730
        repo = self.make_repository('.')
 
1731
        repo.lock_write()
 
1732
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1733
        repo._format.check_support_status(False)
 
1734
        repo.unlock()
 
1735
 
 
1736
    def test_open_with_missing_required_feature(self):
 
1737
        repo = self.make_repository('.')
 
1738
        repo.lock_write()
 
1739
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1740
        self.assertRaises(errors.MissingFeature,
 
1741
            repo._format.check_support_status, False)