~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2012-06-18 11:43:07 UTC
  • mfrom: (6437.54.10 2.5)
  • mto: This revision was merged to the branch mainline in revision 6525.
  • Revision ID: jelmer@samba.org-20120618114307-zeazlym311p38m98
MergeĀ 2.5.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
 
from bzrlib import symbol_versioning
29
27
import bzrlib
30
 
import bzrlib.bzrdir as bzrdir
31
 
import bzrlib.errors as errors
32
 
from bzrlib.errors import (NotBranchError,
33
 
                           NoSuchFile,
34
 
                           UnknownFormatError,
35
 
                           UnsupportedFormatError,
36
 
                           )
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from bzrlib import (
 
33
    btree_index,
 
34
    symbol_versioning,
 
35
    tests,
 
36
    transport,
 
37
    vf_search,
 
38
    )
 
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
40
from bzrlib.index import GraphIndex
37
41
from bzrlib.repository import RepositoryFormat
38
42
from bzrlib.tests import (
39
43
    TestCase,
40
44
    TestCaseWithTransport,
41
 
    test_knit,
42
45
    )
43
 
from bzrlib.transport import get_transport
44
 
from bzrlib.transport.memory import MemoryServer
45
 
from bzrlib.util import bencode
46
46
from bzrlib import (
 
47
    bzrdir,
 
48
    controldir,
 
49
    errors,
 
50
    inventory,
 
51
    osutils,
47
52
    repository,
 
53
    revision as _mod_revision,
48
54
    upgrade,
 
55
    versionedfile,
 
56
    vf_repository,
49
57
    workingtree,
50
58
    )
51
 
from bzrlib.repofmt import knitrepo, weaverepo
 
59
from bzrlib.repofmt import (
 
60
    groupcompress_repo,
 
61
    knitrepo,
 
62
    knitpack_repo,
 
63
    pack_repo,
 
64
    )
52
65
 
53
66
 
54
67
class TestDefaultFormat(TestCase):
55
68
 
56
69
    def test_get_set_default_format(self):
57
 
        old_default = bzrdir.format_registry.get('default')
 
70
        old_default = controldir.format_registry.get('default')
58
71
        private_default = old_default().repository_format.__class__
59
 
        old_format = repository.RepositoryFormat.get_default_format()
 
72
        old_format = repository.format_registry.get_default()
60
73
        self.assertTrue(isinstance(old_format, private_default))
61
74
        def make_sample_bzrdir():
62
75
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
63
76
            my_bzrdir.repository_format = SampleRepositoryFormat()
64
77
            return my_bzrdir
65
 
        bzrdir.format_registry.remove('default')
66
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
67
 
        bzrdir.format_registry.set_default('sample')
 
78
        controldir.format_registry.remove('default')
 
79
        controldir.format_registry.register('sample', make_sample_bzrdir, '')
 
80
        controldir.format_registry.set_default('sample')
68
81
        # creating a repository should now create an instrumented dir.
69
82
        try:
70
83
            # the default branch format is used by the meta dir format
73
86
            result = dir.create_repository()
74
87
            self.assertEqual(result, 'A bzr repository dir')
75
88
        finally:
76
 
            bzrdir.format_registry.remove('default')
77
 
            bzrdir.format_registry.remove('sample')
78
 
            bzrdir.format_registry.register('default', old_default, '')
79
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
89
            controldir.format_registry.remove('default')
 
90
            controldir.format_registry.remove('sample')
 
91
            controldir.format_registry.register('default', old_default, '')
 
92
        self.assertIsInstance(repository.format_registry.get_default(),
80
93
                              old_format.__class__)
81
94
 
82
95
 
83
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
96
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
84
97
    """A sample format
85
98
 
86
 
    this format is initializable, unsupported to aid in testing the 
 
99
    this format is initializable, unsupported to aid in testing the
87
100
    open and open(unsupported=True) routines.
88
101
    """
89
102
 
90
 
    def get_format_string(self):
 
103
    @classmethod
 
104
    def get_format_string(cls):
91
105
        """See RepositoryFormat.get_format_string()."""
92
106
        return "Sample .bzr repository format."
93
107
 
104
118
        return "opened repository."
105
119
 
106
120
 
 
121
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
122
    """A sample format that can not be used in a metadir
 
123
 
 
124
    """
 
125
 
 
126
    def get_format_string(self):
 
127
        raise NotImplementedError
 
128
 
 
129
 
107
130
class TestRepositoryFormat(TestCaseWithTransport):
108
131
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
109
132
 
110
133
    def test_find_format(self):
111
134
        # is the right format object found for a repository?
112
135
        # create a branch with a few known format objects.
113
 
        # this is not quite the same as 
 
136
        # this is not quite the same as
114
137
        self.build_tree(["foo/", "bar/"])
115
138
        def check_format(format, url):
116
139
            dir = format._matchingbzrdir.initialize(url)
117
140
            format.initialize(dir)
118
 
            t = get_transport(url)
119
 
            found_format = repository.RepositoryFormat.find_format(dir)
120
 
            self.failUnless(isinstance(found_format, format.__class__))
121
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
122
 
        
 
141
            t = transport.get_transport_from_path(url)
 
142
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
 
143
            self.assertIsInstance(found_format, format.__class__)
 
144
        check_format(repository.format_registry.get_default(), "bar")
 
145
 
123
146
    def test_find_format_no_repository(self):
124
147
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
125
148
        self.assertRaises(errors.NoRepositoryPresent,
126
 
                          repository.RepositoryFormat.find_format,
 
149
                          repository.RepositoryFormatMetaDir.find_format,
127
150
                          dir)
128
151
 
 
152
    def test_from_string(self):
 
153
        self.assertIsInstance(
 
154
            SampleRepositoryFormat.from_string(
 
155
                "Sample .bzr repository format."),
 
156
            SampleRepositoryFormat)
 
157
        self.assertRaises(AssertionError,
 
158
            SampleRepositoryFormat.from_string,
 
159
                "Different .bzr repository format.")
 
160
 
129
161
    def test_find_format_unknown_format(self):
130
162
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
131
163
        SampleRepositoryFormat().initialize(dir)
132
164
        self.assertRaises(UnknownFormatError,
133
 
                          repository.RepositoryFormat.find_format,
 
165
                          repository.RepositoryFormatMetaDir.find_format,
134
166
                          dir)
135
167
 
 
168
    def test_find_format_with_features(self):
 
169
        tree = self.make_branch_and_tree('.', format='2a')
 
170
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
171
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
172
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
 
173
        self.assertEquals(found_format.features.get("name"), "necessity")
 
174
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
175
            True)
 
176
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
 
177
            "name")
 
178
        repository.RepositoryFormatMetaDir.register_feature("name")
 
179
        found_format.check_support_status(True)
 
180
 
136
181
    def test_register_unregister_format(self):
 
182
        # Test deprecated format registration functions
137
183
        format = SampleRepositoryFormat()
138
184
        # make a control dir
139
185
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
140
186
        # make a repo
141
187
        format.initialize(dir)
142
188
        # register a format for it.
143
 
        repository.RepositoryFormat.register_format(format)
 
189
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
190
            repository.RepositoryFormat.register_format, format)
144
191
        # which repository.Open will refuse (not supported)
145
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
192
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
 
193
            self.get_url())
146
194
        # but open(unsupported) will work
147
195
        self.assertEqual(format.open(dir), "opened repository.")
148
196
        # unregister the format
149
 
        repository.RepositoryFormat.unregister_format(format)
150
 
 
151
 
 
152
 
class TestFormat6(TestCaseWithTransport):
153
 
 
154
 
    def test_no_ancestry_weave(self):
155
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
156
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
157
 
        # We no longer need to create the ancestry.weave file
158
 
        # since it is *never* used.
159
 
        self.assertRaises(NoSuchFile,
160
 
                          control.transport.get,
161
 
                          'ancestry.weave')
162
 
 
163
 
 
164
 
class TestFormat7(TestCaseWithTransport):
165
 
    
166
 
    def test_disk_layout(self):
167
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
168
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
169
 
        # in case of side effects of locking.
170
 
        repo.lock_write()
171
 
        repo.unlock()
172
 
        # we want:
173
 
        # format 'Bazaar-NG Repository format 7'
174
 
        # lock ''
175
 
        # inventory.weave == empty_weave
176
 
        # empty revision-store directory
177
 
        # empty weaves directory
178
 
        t = control.get_repository_transport(None)
179
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
180
 
                             t.get('format').read())
181
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
182
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
183
 
        self.assertEqualDiff('# bzr weave file v5\n'
184
 
                             'w\n'
185
 
                             'W\n',
186
 
                             t.get('inventory.weave').read())
187
 
 
188
 
    def test_shared_disk_layout(self):
189
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
190
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
191
 
        # we want:
192
 
        # format 'Bazaar-NG Repository format 7'
193
 
        # inventory.weave == empty_weave
194
 
        # empty revision-store directory
195
 
        # empty weaves directory
196
 
        # a 'shared-storage' marker file.
197
 
        # lock is not present when unlocked
198
 
        t = control.get_repository_transport(None)
199
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
200
 
                             t.get('format').read())
201
 
        self.assertEqualDiff('', t.get('shared-storage').read())
202
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
203
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
204
 
        self.assertEqualDiff('# bzr weave file v5\n'
205
 
                             'w\n'
206
 
                             'W\n',
207
 
                             t.get('inventory.weave').read())
208
 
        self.assertFalse(t.has('branch-lock'))
209
 
 
210
 
    def test_creates_lockdir(self):
211
 
        """Make sure it appears to be controlled by a LockDir existence"""
212
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
213
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
214
 
        t = control.get_repository_transport(None)
215
 
        # TODO: Should check there is a 'lock' toplevel directory, 
216
 
        # regardless of contents
217
 
        self.assertFalse(t.has('lock/held/info'))
218
 
        repo.lock_write()
219
 
        try:
220
 
            self.assertTrue(t.has('lock/held/info'))
221
 
        finally:
222
 
            # unlock so we don't get a warning about failing to do so
223
 
            repo.unlock()
224
 
 
225
 
    def test_uses_lockdir(self):
226
 
        """repo format 7 actually locks on lockdir"""
227
 
        base_url = self.get_url()
228
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
229
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
230
 
        t = control.get_repository_transport(None)
231
 
        repo.lock_write()
232
 
        repo.unlock()
233
 
        del repo
234
 
        # make sure the same lock is created by opening it
235
 
        repo = repository.Repository.open(base_url)
236
 
        repo.lock_write()
237
 
        self.assertTrue(t.has('lock/held/info'))
238
 
        repo.unlock()
239
 
        self.assertFalse(t.has('lock/held/info'))
240
 
 
241
 
    def test_shared_no_tree_disk_layout(self):
242
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
243
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
244
 
        repo.set_make_working_trees(False)
245
 
        # we want:
246
 
        # format 'Bazaar-NG Repository format 7'
247
 
        # lock ''
248
 
        # inventory.weave == empty_weave
249
 
        # empty revision-store directory
250
 
        # empty weaves directory
251
 
        # a 'shared-storage' marker file.
252
 
        t = control.get_repository_transport(None)
253
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
254
 
                             t.get('format').read())
255
 
        ## self.assertEqualDiff('', t.get('lock').read())
256
 
        self.assertEqualDiff('', t.get('shared-storage').read())
257
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
258
 
        repo.set_make_working_trees(True)
259
 
        self.assertFalse(t.has('no-working-trees'))
260
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
261
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
262
 
        self.assertEqualDiff('# bzr weave file v5\n'
263
 
                             'w\n'
264
 
                             'W\n',
265
 
                             t.get('inventory.weave').read())
 
197
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
198
            repository.RepositoryFormat.unregister_format, format)
 
199
 
 
200
 
 
201
class TestRepositoryFormatRegistry(TestCase):
 
202
 
 
203
    def setUp(self):
 
204
        super(TestRepositoryFormatRegistry, self).setUp()
 
205
        self.registry = repository.RepositoryFormatRegistry()
 
206
 
 
207
    def test_register_unregister_format(self):
 
208
        format = SampleRepositoryFormat()
 
209
        self.registry.register(format)
 
210
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
 
211
        self.registry.remove(format)
 
212
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
213
 
 
214
    def test_get_all(self):
 
215
        format = SampleRepositoryFormat()
 
216
        self.assertEquals([], self.registry._get_all())
 
217
        self.registry.register(format)
 
218
        self.assertEquals([format], self.registry._get_all())
 
219
 
 
220
    def test_register_extra(self):
 
221
        format = SampleExtraRepositoryFormat()
 
222
        self.assertEquals([], self.registry._get_all())
 
223
        self.registry.register_extra(format)
 
224
        self.assertEquals([format], self.registry._get_all())
 
225
 
 
226
    def test_register_extra_lazy(self):
 
227
        self.assertEquals([], self.registry._get_all())
 
228
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
229
            "SampleExtraRepositoryFormat")
 
230
        formats = self.registry._get_all()
 
231
        self.assertEquals(1, len(formats))
 
232
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
266
233
 
267
234
 
268
235
class TestFormatKnit1(TestCaseWithTransport):
269
 
    
 
236
 
 
237
    def test_attribute__fetch_order(self):
 
238
        """Knits need topological data insertion."""
 
239
        repo = self.make_repository('.',
 
240
                format=controldir.format_registry.get('knit')())
 
241
        self.assertEqual('topological', repo._format._fetch_order)
 
242
 
 
243
    def test_attribute__fetch_uses_deltas(self):
 
244
        """Knits reuse deltas."""
 
245
        repo = self.make_repository('.',
 
246
                format=controldir.format_registry.get('knit')())
 
247
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
248
 
270
249
    def test_disk_layout(self):
271
250
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
272
251
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
286
265
        # self.assertEqualDiff('', t.get('lock').read())
287
266
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
288
267
        self.check_knits(t)
 
268
        # Check per-file knits.
 
269
        branch = control.create_branch()
 
270
        tree = control.create_workingtree()
 
271
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
272
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
273
        tree.commit('1st post', rev_id='foo')
 
274
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
275
            '\nfoo fulltext 0 81  :')
289
276
 
290
 
    def assertHasKnit(self, t, knit_name):
 
277
    def assertHasKnit(self, t, knit_name, extra_content=''):
291
278
        """Assert that knit_name exists on t."""
292
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
279
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
293
280
                             t.get(knit_name + '.kndx').read())
294
 
        # no default content
295
 
        self.assertTrue(t.has(knit_name + '.knit'))
296
281
 
297
282
    def check_knits(self, t):
298
283
        """check knit content for a repository."""
342
327
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
343
328
        self.check_knits(t)
344
329
 
345
 
 
346
 
class KnitRepositoryStreamTests(test_knit.KnitTests):
347
 
    """Tests for knitrepo._get_stream_as_bytes."""
348
 
 
349
 
    def test_get_stream_as_bytes(self):
350
 
        # Make a simple knit
351
 
        k1 = self.make_test_knit()
352
 
        k1.add_lines('text-a', [], test_knit.split_lines(test_knit.TEXT_1))
353
 
        
354
 
        # Serialise it, check the output.
355
 
        bytes = knitrepo._get_stream_as_bytes(k1, ['text-a'])
356
 
        data = bencode.bdecode(bytes)
357
 
        format, record = data
358
 
        self.assertEqual('knit-plain', format)
359
 
        self.assertEqual(['text-a', ['fulltext'], []], record[:3])
360
 
        self.assertRecordContentEqual(k1, 'text-a', record[3])
361
 
 
362
 
    def test_get_stream_as_bytes_all(self):
363
 
        """Get a serialised data stream for all the records in a knit.
364
 
 
365
 
        Much like test_get_stream_all, except for get_stream_as_bytes.
 
330
    def test_deserialise_sets_root_revision(self):
 
331
        """We must have a inventory.root.revision
 
332
 
 
333
        Old versions of the XML5 serializer did not set the revision_id for
 
334
        the whole inventory. So we grab the one from the expected text. Which
 
335
        is valid when the api is not being abused.
366
336
        """
367
 
        k1 = self.make_test_knit()
368
 
        # Insert the same data as BasicKnitTests.test_knit_join, as they seem
369
 
        # to cover a range of cases (no parents, one parent, multiple parents).
370
 
        test_data = [
371
 
            ('text-a', [], test_knit.TEXT_1),
372
 
            ('text-b', ['text-a'], test_knit.TEXT_1),
373
 
            ('text-c', [], test_knit.TEXT_1),
374
 
            ('text-d', ['text-c'], test_knit.TEXT_1),
375
 
            ('text-m', ['text-b', 'text-d'], test_knit.TEXT_1),
376
 
           ]
377
 
        expected_data_list = [
378
 
            # version, options, parents
379
 
            ('text-a', ['fulltext'], []),
380
 
            ('text-b', ['line-delta'], ['text-a']),
381
 
            ('text-c', ['fulltext'], []),
382
 
            ('text-d', ['line-delta'], ['text-c']),
383
 
            ('text-m', ['line-delta'], ['text-b', 'text-d']),
384
 
            ]
385
 
        for version_id, parents, lines in test_data:
386
 
            k1.add_lines(version_id, parents, test_knit.split_lines(lines))
387
 
 
388
 
        bytes = knitrepo._get_stream_as_bytes(
389
 
            k1, ['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
390
 
 
391
 
        data = bencode.bdecode(bytes)
392
 
        format = data.pop(0)
393
 
        self.assertEqual('knit-plain', format)
394
 
 
395
 
        for expected, actual in zip(expected_data_list, data):
396
 
            expected_version = expected[0]
397
 
            expected_options = expected[1]
398
 
            expected_parents = expected[2]
399
 
            version, options, parents, bytes = actual
400
 
            self.assertEqual(expected_version, version)
401
 
            self.assertEqual(expected_options, options)
402
 
            self.assertEqual(expected_parents, parents)
403
 
            self.assertRecordContentEqual(k1, version, bytes)
 
337
        repo = self.make_repository('.',
 
338
                format=controldir.format_registry.get('knit')())
 
339
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
340
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
341
        self.assertEqual('test-rev-id', inv.root.revision)
 
342
 
 
343
    def test_deserialise_uses_global_revision_id(self):
 
344
        """If it is set, then we re-use the global revision id"""
 
345
        repo = self.make_repository('.',
 
346
                format=controldir.format_registry.get('knit')())
 
347
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
348
                   '</inventory>\n')
 
349
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
350
        # raise an error, rather than silently using one revision_id over the
 
351
        # other.
 
352
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
353
            'test-rev-id', inv_xml)
 
354
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
355
        self.assertEqual('other-rev-id', inv.root.revision)
 
356
 
 
357
    def test_supports_external_lookups(self):
 
358
        repo = self.make_repository('.',
 
359
                format=controldir.format_registry.get('knit')())
 
360
        self.assertFalse(repo._format.supports_external_lookups)
404
361
 
405
362
 
406
363
class DummyRepository(object):
407
364
    """A dummy repository for testing."""
408
365
 
 
366
    _format = None
409
367
    _serializer = None
410
368
 
411
369
    def supports_rich_root(self):
 
370
        if self._format is not None:
 
371
            return self._format.rich_root_data
412
372
        return False
413
373
 
 
374
    def get_graph(self):
 
375
        raise NotImplementedError
 
376
 
 
377
    def get_parent_map(self, revision_ids):
 
378
        raise NotImplementedError
 
379
 
414
380
 
415
381
class InterDummy(repository.InterRepository):
416
382
    """An inter-repository optimised code path for DummyRepository.
417
383
 
418
384
    This is for use during testing where we use DummyRepository as repositories
419
385
    so that none of the default regsitered inter-repository classes will
420
 
    match.
 
386
    MATCH.
421
387
    """
422
388
 
423
389
    @staticmethod
424
390
    def is_compatible(repo_source, repo_target):
425
391
        """InterDummy is compatible with DummyRepository."""
426
 
        return (isinstance(repo_source, DummyRepository) and 
 
392
        return (isinstance(repo_source, DummyRepository) and
427
393
            isinstance(repo_target, DummyRepository))
428
394
 
429
395
 
437
403
        # classes do not barf inappropriately when a surprising repository type
438
404
        # is handed to them.
439
405
        dummy_a = DummyRepository()
 
406
        dummy_a._format = RepositoryFormat()
 
407
        dummy_a._format.supports_full_versioned_files = True
440
408
        dummy_b = DummyRepository()
 
409
        dummy_b._format = RepositoryFormat()
 
410
        dummy_b._format.supports_full_versioned_files = True
441
411
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
442
412
 
443
413
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
444
414
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
445
 
        
 
415
 
446
416
        The effective default is now InterSameDataRepository because there is
447
417
        no actual sane default in the presence of incompatible data models.
448
418
        """
449
419
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
450
 
        self.assertEqual(repository.InterSameDataRepository,
 
420
        self.assertEqual(vf_repository.InterSameDataRepository,
451
421
                         inter_repo.__class__)
452
422
        self.assertEqual(repo_a, inter_repo.source)
453
423
        self.assertEqual(repo_b, inter_repo.target)
459
429
        # pair that it returns true on for the is_compatible static method
460
430
        # check
461
431
        dummy_a = DummyRepository()
 
432
        dummy_a._format = RepositoryFormat()
462
433
        dummy_b = DummyRepository()
 
434
        dummy_b._format = RepositoryFormat()
463
435
        repo = self.make_repository('.')
464
436
        # hack dummies to look like repo somewhat.
465
437
        dummy_a._serializer = repo._serializer
 
438
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
439
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
440
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
466
441
        dummy_b._serializer = repo._serializer
 
442
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
443
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
444
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
467
445
        repository.InterRepository.register_optimiser(InterDummy)
468
446
        try:
469
447
            # we should get the default for something InterDummy returns False
482
460
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
483
461
 
484
462
 
485
 
class TestInterWeaveRepo(TestCaseWithTransport):
486
 
 
487
 
    def test_is_compatible_and_registered(self):
488
 
        # InterWeaveRepo is compatible when either side
489
 
        # is a format 5/6/7 branch
490
 
        from bzrlib.repofmt import knitrepo, weaverepo
491
 
        formats = [weaverepo.RepositoryFormat5(),
492
 
                   weaverepo.RepositoryFormat6(),
493
 
                   weaverepo.RepositoryFormat7()]
494
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
495
 
                                knitrepo.RepositoryFormatKnit1(),
496
 
                                ]
497
 
        repo_a = self.make_repository('a')
498
 
        repo_b = self.make_repository('b')
499
 
        is_compatible = repository.InterWeaveRepo.is_compatible
500
 
        for source in incompatible_formats:
501
 
            # force incompatible left then right
502
 
            repo_a._format = source
503
 
            repo_b._format = formats[0]
504
 
            self.assertFalse(is_compatible(repo_a, repo_b))
505
 
            self.assertFalse(is_compatible(repo_b, repo_a))
506
 
        for source in formats:
507
 
            repo_a._format = source
508
 
            for target in formats:
509
 
                repo_b._format = target
510
 
                self.assertTrue(is_compatible(repo_a, repo_b))
511
 
        self.assertEqual(repository.InterWeaveRepo,
512
 
                         repository.InterRepository.get(repo_a,
513
 
                                                        repo_b).__class__)
 
463
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
464
 
 
465
    @classmethod
 
466
    def get_format_string(cls):
 
467
        return "Test Format 1"
 
468
 
 
469
 
 
470
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
471
 
 
472
    @classmethod
 
473
    def get_format_string(cls):
 
474
        return "Test Format 2"
514
475
 
515
476
 
516
477
class TestRepositoryConverter(TestCaseWithTransport):
517
478
 
518
479
    def test_convert_empty(self):
519
 
        t = get_transport(self.get_url('.'))
 
480
        source_format = TestRepositoryFormat1()
 
481
        target_format = TestRepositoryFormat2()
 
482
        repository.format_registry.register(source_format)
 
483
        self.addCleanup(repository.format_registry.remove,
 
484
            source_format)
 
485
        repository.format_registry.register(target_format)
 
486
        self.addCleanup(repository.format_registry.remove,
 
487
            target_format)
 
488
        t = self.get_transport()
520
489
        t.mkdir('repository')
521
490
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
522
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
523
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
491
        repo = TestRepositoryFormat1().initialize(repo_dir)
524
492
        converter = repository.CopyConverter(target_format)
525
493
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
526
494
        try:
531
499
        self.assertTrue(isinstance(target_format, repo._format.__class__))
532
500
 
533
501
 
534
 
class TestMisc(TestCase):
535
 
    
536
 
    def test_unescape_xml(self):
537
 
        """We get some kind of error when malformed entities are passed"""
538
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
539
 
 
540
 
 
541
502
class TestRepositoryFormatKnit3(TestCaseWithTransport):
542
503
 
 
504
    def test_attribute__fetch_order(self):
 
505
        """Knits need topological data insertion."""
 
506
        format = bzrdir.BzrDirMetaFormat1()
 
507
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
508
        repo = self.make_repository('.', format=format)
 
509
        self.assertEqual('topological', repo._format._fetch_order)
 
510
 
 
511
    def test_attribute__fetch_uses_deltas(self):
 
512
        """Knits reuse deltas."""
 
513
        format = bzrdir.BzrDirMetaFormat1()
 
514
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
515
        repo = self.make_repository('.', format=format)
 
516
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
517
 
543
518
    def test_convert(self):
544
519
        """Ensure the upgrade adds weaves for roots"""
545
520
        format = bzrdir.BzrDirMetaFormat1()
547
522
        tree = self.make_branch_and_tree('.', format)
548
523
        tree.commit("Dull commit", rev_id="dull")
549
524
        revision_tree = tree.branch.repository.revision_tree('dull')
550
 
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
551
 
            revision_tree.inventory.root.file_id)
 
525
        revision_tree.lock_read()
 
526
        try:
 
527
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
528
                revision_tree.get_root_id())
 
529
        finally:
 
530
            revision_tree.unlock()
552
531
        format = bzrdir.BzrDirMetaFormat1()
553
532
        format.repository_format = knitrepo.RepositoryFormatKnit3()
554
533
        upgrade.Convert('.', format)
555
534
        tree = workingtree.WorkingTree.open('.')
556
535
        revision_tree = tree.branch.repository.revision_tree('dull')
557
 
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
536
        revision_tree.lock_read()
 
537
        try:
 
538
            revision_tree.get_file_lines(revision_tree.get_root_id())
 
539
        finally:
 
540
            revision_tree.unlock()
558
541
        tree.commit("Another dull commit", rev_id='dull2')
559
542
        revision_tree = tree.branch.repository.revision_tree('dull2')
560
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
561
 
 
 
543
        revision_tree.lock_read()
 
544
        self.addCleanup(revision_tree.unlock)
 
545
        self.assertEqual('dull',
 
546
                revision_tree.get_file_revision(revision_tree.get_root_id()))
 
547
 
 
548
    def test_supports_external_lookups(self):
 
549
        format = bzrdir.BzrDirMetaFormat1()
 
550
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
551
        repo = self.make_repository('.', format=format)
 
552
        self.assertFalse(repo._format.supports_external_lookups)
 
553
 
 
554
 
 
555
class Test2a(tests.TestCaseWithMemoryTransport):
 
556
 
 
557
    def test_chk_bytes_uses_custom_btree_parser(self):
 
558
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
559
        mt.lock_write()
 
560
        self.addCleanup(mt.unlock)
 
561
        mt.add([''], ['root-id'])
 
562
        mt.commit('first')
 
563
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
564
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
565
        # It should also work if we re-open the repo
 
566
        repo = mt.branch.repository.bzrdir.open_repository()
 
567
        repo.lock_read()
 
568
        self.addCleanup(repo.unlock)
 
569
        index = repo.chk_bytes._index._graph_index._indices[0]
 
570
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
571
 
 
572
    def test_fetch_combines_groups(self):
 
573
        builder = self.make_branch_builder('source', format='2a')
 
574
        builder.start_series()
 
575
        builder.build_snapshot('1', None, [
 
576
            ('add', ('', 'root-id', 'directory', '')),
 
577
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
578
        builder.build_snapshot('2', ['1'], [
 
579
            ('modify', ('file-id', 'content-2\n'))])
 
580
        builder.finish_series()
 
581
        source = builder.get_branch()
 
582
        target = self.make_repository('target', format='2a')
 
583
        target.fetch(source.repository)
 
584
        target.lock_read()
 
585
        self.addCleanup(target.unlock)
 
586
        details = target.texts._index.get_build_details(
 
587
            [('file-id', '1',), ('file-id', '2',)])
 
588
        file_1_details = details[('file-id', '1')]
 
589
        file_2_details = details[('file-id', '2')]
 
590
        # The index, and what to read off disk, should be the same for both
 
591
        # versions of the file.
 
592
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
593
 
 
594
    def test_fetch_combines_groups(self):
 
595
        builder = self.make_branch_builder('source', format='2a')
 
596
        builder.start_series()
 
597
        builder.build_snapshot('1', None, [
 
598
            ('add', ('', 'root-id', 'directory', '')),
 
599
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
600
        builder.build_snapshot('2', ['1'], [
 
601
            ('modify', ('file-id', 'content-2\n'))])
 
602
        builder.finish_series()
 
603
        source = builder.get_branch()
 
604
        target = self.make_repository('target', format='2a')
 
605
        target.fetch(source.repository)
 
606
        target.lock_read()
 
607
        self.addCleanup(target.unlock)
 
608
        details = target.texts._index.get_build_details(
 
609
            [('file-id', '1',), ('file-id', '2',)])
 
610
        file_1_details = details[('file-id', '1')]
 
611
        file_2_details = details[('file-id', '2')]
 
612
        # The index, and what to read off disk, should be the same for both
 
613
        # versions of the file.
 
614
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
615
 
 
616
    def test_fetch_combines_groups(self):
 
617
        builder = self.make_branch_builder('source', format='2a')
 
618
        builder.start_series()
 
619
        builder.build_snapshot('1', None, [
 
620
            ('add', ('', 'root-id', 'directory', '')),
 
621
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
622
        builder.build_snapshot('2', ['1'], [
 
623
            ('modify', ('file-id', 'content-2\n'))])
 
624
        builder.finish_series()
 
625
        source = builder.get_branch()
 
626
        target = self.make_repository('target', format='2a')
 
627
        target.fetch(source.repository)
 
628
        target.lock_read()
 
629
        self.addCleanup(target.unlock)
 
630
        details = target.texts._index.get_build_details(
 
631
            [('file-id', '1',), ('file-id', '2',)])
 
632
        file_1_details = details[('file-id', '1')]
 
633
        file_2_details = details[('file-id', '2')]
 
634
        # The index, and what to read off disk, should be the same for both
 
635
        # versions of the file.
 
636
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
637
 
 
638
    def test_format_pack_compresses_True(self):
 
639
        repo = self.make_repository('repo', format='2a')
 
640
        self.assertTrue(repo._format.pack_compresses)
 
641
 
 
642
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
643
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
644
        tree.lock_write()
 
645
        tree.add([''], ['TREE_ROOT'])
 
646
        revid = tree.commit("foo")
 
647
        tree.unlock()
 
648
        tree.lock_read()
 
649
        self.addCleanup(tree.unlock)
 
650
        inv = tree.branch.repository.get_inventory(revid)
 
651
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
652
        inv.parent_id_basename_to_file_id._ensure_root()
 
653
        inv.id_to_entry._ensure_root()
 
654
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
655
        self.assertEqual(65536,
 
656
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
657
 
 
658
    def test_autopack_unchanged_chk_nodes(self):
 
659
        # at 20 unchanged commits, chk pages are packed that are split into
 
660
        # two groups such that the new pack being made doesn't have all its
 
661
        # pages in the source packs (though they are in the repository).
 
662
        # Use a memory backed repository, we don't need to hit disk for this
 
663
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
664
        tree.lock_write()
 
665
        self.addCleanup(tree.unlock)
 
666
        tree.add([''], ['TREE_ROOT'])
 
667
        for pos in range(20):
 
668
            tree.commit(str(pos))
 
669
 
 
670
    def test_pack_with_hint(self):
 
671
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
672
        tree.lock_write()
 
673
        self.addCleanup(tree.unlock)
 
674
        tree.add([''], ['TREE_ROOT'])
 
675
        # 1 commit to leave untouched
 
676
        tree.commit('1')
 
677
        to_keep = tree.branch.repository._pack_collection.names()
 
678
        # 2 to combine
 
679
        tree.commit('2')
 
680
        tree.commit('3')
 
681
        all = tree.branch.repository._pack_collection.names()
 
682
        combine = list(set(all) - set(to_keep))
 
683
        self.assertLength(3, all)
 
684
        self.assertLength(2, combine)
 
685
        tree.branch.repository.pack(hint=combine)
 
686
        final = tree.branch.repository._pack_collection.names()
 
687
        self.assertLength(2, final)
 
688
        self.assertFalse(combine[0] in final)
 
689
        self.assertFalse(combine[1] in final)
 
690
        self.assertSubset(to_keep, final)
 
691
 
 
692
    def test_stream_source_to_gc(self):
 
693
        source = self.make_repository('source', format='2a')
 
694
        target = self.make_repository('target', format='2a')
 
695
        stream = source._get_source(target._format)
 
696
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
697
 
 
698
    def test_stream_source_to_non_gc(self):
 
699
        source = self.make_repository('source', format='2a')
 
700
        target = self.make_repository('target', format='rich-root-pack')
 
701
        stream = source._get_source(target._format)
 
702
        # We don't want the child GroupCHKStreamSource
 
703
        self.assertIs(type(stream), vf_repository.StreamSource)
 
704
 
 
705
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
706
        source_builder = self.make_branch_builder('source',
 
707
                            format='2a')
 
708
        # We have to build a fairly large tree, so that we are sure the chk
 
709
        # pages will have split into multiple pages.
 
710
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
711
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
712
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
713
                fname = i + j
 
714
                fid = fname + '-id'
 
715
                content = 'content for %s\n' % (fname,)
 
716
                entries.append(('add', (fname, fid, 'file', content)))
 
717
        source_builder.start_series()
 
718
        source_builder.build_snapshot('rev-1', None, entries)
 
719
        # Now change a few of them, so we get a few new pages for the second
 
720
        # revision
 
721
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
722
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
723
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
724
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
725
            ])
 
726
        source_builder.finish_series()
 
727
        source_branch = source_builder.get_branch()
 
728
        source_branch.lock_read()
 
729
        self.addCleanup(source_branch.unlock)
 
730
        target = self.make_repository('target', format='2a')
 
731
        source = source_branch.repository._get_source(target._format)
 
732
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
733
 
 
734
        # On a regular pass, getting the inventories and chk pages for rev-2
 
735
        # would only get the newly created chk pages
 
736
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
737
                                    set(['rev-2']))
 
738
        simple_chk_records = []
 
739
        for vf_name, substream in source.get_stream(search):
 
740
            if vf_name == 'chk_bytes':
 
741
                for record in substream:
 
742
                    simple_chk_records.append(record.key)
 
743
            else:
 
744
                for _ in substream:
 
745
                    continue
 
746
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
747
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
748
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
749
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
750
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
751
                         simple_chk_records)
 
752
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
753
        # we should get a much larger set of pages.
 
754
        missing = [('inventories', 'rev-2')]
 
755
        full_chk_records = []
 
756
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
757
            if vf_name == 'inventories':
 
758
                for record in substream:
 
759
                    self.assertEqual(('rev-2',), record.key)
 
760
            elif vf_name == 'chk_bytes':
 
761
                for record in substream:
 
762
                    full_chk_records.append(record.key)
 
763
            else:
 
764
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
765
        # We have 257 records now. This is because we have 1 root page, and 256
 
766
        # leaf pages in a complete listing.
 
767
        self.assertEqual(257, len(full_chk_records))
 
768
        self.assertSubset(simple_chk_records, full_chk_records)
 
769
 
 
770
    def test_inconsistency_fatal(self):
 
771
        repo = self.make_repository('repo', format='2a')
 
772
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
773
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
774
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
775
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
776
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
777
 
 
778
 
 
779
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
780
 
 
781
    def test_source_to_exact_pack_092(self):
 
782
        source = self.make_repository('source', format='pack-0.92')
 
783
        target = self.make_repository('target', format='pack-0.92')
 
784
        stream_source = source._get_source(target._format)
 
785
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
786
 
 
787
    def test_source_to_exact_pack_rich_root_pack(self):
 
788
        source = self.make_repository('source', format='rich-root-pack')
 
789
        target = self.make_repository('target', format='rich-root-pack')
 
790
        stream_source = source._get_source(target._format)
 
791
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
792
 
 
793
    def test_source_to_exact_pack_19(self):
 
794
        source = self.make_repository('source', format='1.9')
 
795
        target = self.make_repository('target', format='1.9')
 
796
        stream_source = source._get_source(target._format)
 
797
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
798
 
 
799
    def test_source_to_exact_pack_19_rich_root(self):
 
800
        source = self.make_repository('source', format='1.9-rich-root')
 
801
        target = self.make_repository('target', format='1.9-rich-root')
 
802
        stream_source = source._get_source(target._format)
 
803
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
804
 
 
805
    def test_source_to_remote_exact_pack_19(self):
 
806
        trans = self.make_smart_server('target')
 
807
        trans.ensure_base()
 
808
        source = self.make_repository('source', format='1.9')
 
809
        target = self.make_repository('target', format='1.9')
 
810
        target = repository.Repository.open(trans.base)
 
811
        stream_source = source._get_source(target._format)
 
812
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
813
 
 
814
    def test_stream_source_to_non_exact(self):
 
815
        source = self.make_repository('source', format='pack-0.92')
 
816
        target = self.make_repository('target', format='1.9')
 
817
        stream = source._get_source(target._format)
 
818
        self.assertIs(type(stream), vf_repository.StreamSource)
 
819
 
 
820
    def test_stream_source_to_non_exact_rich_root(self):
 
821
        source = self.make_repository('source', format='1.9')
 
822
        target = self.make_repository('target', format='1.9-rich-root')
 
823
        stream = source._get_source(target._format)
 
824
        self.assertIs(type(stream), vf_repository.StreamSource)
 
825
 
 
826
    def test_source_to_remote_non_exact_pack_19(self):
 
827
        trans = self.make_smart_server('target')
 
828
        trans.ensure_base()
 
829
        source = self.make_repository('source', format='1.9')
 
830
        target = self.make_repository('target', format='1.6')
 
831
        target = repository.Repository.open(trans.base)
 
832
        stream_source = source._get_source(target._format)
 
833
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
834
 
 
835
    def test_stream_source_to_knit(self):
 
836
        source = self.make_repository('source', format='pack-0.92')
 
837
        target = self.make_repository('target', format='dirstate')
 
838
        stream = source._get_source(target._format)
 
839
        self.assertIs(type(stream), vf_repository.StreamSource)
 
840
 
 
841
 
 
842
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
843
    """Tests for _find_parent_ids_of_revisions."""
 
844
 
 
845
    def setUp(self):
 
846
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
847
        self.builder = self.make_branch_builder('source')
 
848
        self.builder.start_series()
 
849
        self.builder.build_snapshot('initial', None,
 
850
            [('add', ('', 'tree-root', 'directory', None))])
 
851
        self.repo = self.builder.get_branch().repository
 
852
        self.addCleanup(self.builder.finish_series)
 
853
 
 
854
    def assertParentIds(self, expected_result, rev_set):
 
855
        self.assertEqual(sorted(expected_result),
 
856
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
857
 
 
858
    def test_simple(self):
 
859
        self.builder.build_snapshot('revid1', None, [])
 
860
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
861
        rev_set = ['revid2']
 
862
        self.assertParentIds(['revid1'], rev_set)
 
863
 
 
864
    def test_not_first_parent(self):
 
865
        self.builder.build_snapshot('revid1', None, [])
 
866
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
867
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
868
        rev_set = ['revid3', 'revid2']
 
869
        self.assertParentIds(['revid1'], rev_set)
 
870
 
 
871
    def test_not_null(self):
 
872
        rev_set = ['initial']
 
873
        self.assertParentIds([], rev_set)
 
874
 
 
875
    def test_not_null_set(self):
 
876
        self.builder.build_snapshot('revid1', None, [])
 
877
        rev_set = [_mod_revision.NULL_REVISION]
 
878
        self.assertParentIds([], rev_set)
 
879
 
 
880
    def test_ghost(self):
 
881
        self.builder.build_snapshot('revid1', None, [])
 
882
        rev_set = ['ghost', 'revid1']
 
883
        self.assertParentIds(['initial'], rev_set)
 
884
 
 
885
    def test_ghost_parent(self):
 
886
        self.builder.build_snapshot('revid1', None, [])
 
887
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
888
        rev_set = ['revid2', 'revid1']
 
889
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
890
 
 
891
    def test_righthand_parent(self):
 
892
        self.builder.build_snapshot('revid1', None, [])
 
893
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
894
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
895
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
896
        rev_set = ['revid3', 'revid2a']
 
897
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
898
 
 
899
 
 
900
class TestWithBrokenRepo(TestCaseWithTransport):
 
901
    """These tests seem to be more appropriate as interface tests?"""
 
902
 
 
903
    def make_broken_repository(self):
 
904
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
905
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
906
        # it does, this duplication should be removed.
 
907
        repo = self.make_repository('broken-repo')
 
908
        cleanups = []
 
909
        try:
 
910
            repo.lock_write()
 
911
            cleanups.append(repo.unlock)
 
912
            repo.start_write_group()
 
913
            cleanups.append(repo.commit_write_group)
 
914
            # make rev1a: A well-formed revision, containing 'file1'
 
915
            inv = inventory.Inventory(revision_id='rev1a')
 
916
            inv.root.revision = 'rev1a'
 
917
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
918
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
919
            repo.add_inventory('rev1a', inv, [])
 
920
            revision = _mod_revision.Revision('rev1a',
 
921
                committer='jrandom@example.com', timestamp=0,
 
922
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
923
            repo.add_revision('rev1a', revision, inv)
 
924
 
 
925
            # make rev1b, which has no Revision, but has an Inventory, and
 
926
            # file1
 
927
            inv = inventory.Inventory(revision_id='rev1b')
 
928
            inv.root.revision = 'rev1b'
 
929
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
930
            repo.add_inventory('rev1b', inv, [])
 
931
 
 
932
            # make rev2, with file1 and file2
 
933
            # file2 is sane
 
934
            # file1 has 'rev1b' as an ancestor, even though this is not
 
935
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
936
            inv = inventory.Inventory()
 
937
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
938
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
939
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
940
 
 
941
            # make ghost revision rev1c
 
942
            inv = inventory.Inventory()
 
943
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
944
 
 
945
            # make rev3 with file2
 
946
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
947
            # file2 cannot have rev1c as its ancestor.
 
948
            inv = inventory.Inventory()
 
949
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
950
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
951
            return repo
 
952
        finally:
 
953
            for cleanup in reversed(cleanups):
 
954
                cleanup()
 
955
 
 
956
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
957
        inv.revision_id = revision_id
 
958
        inv.root.revision = revision_id
 
959
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
960
        repo.add_inventory(revision_id, inv, parent_ids)
 
961
        revision = _mod_revision.Revision(revision_id,
 
962
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
963
            timezone=0, message='foo', parent_ids=parent_ids)
 
964
        repo.add_revision(revision_id, revision, inv)
 
965
 
 
966
    def add_file(self, repo, inv, filename, revision, parents):
 
967
        file_id = filename + '-id'
 
968
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
969
        entry.revision = revision
 
970
        entry.text_size = 0
 
971
        inv.add(entry)
 
972
        text_key = (file_id, revision)
 
973
        parent_keys = [(file_id, parent) for parent in parents]
 
974
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
975
 
 
976
    def test_insert_from_broken_repo(self):
 
977
        """Inserting a data stream from a broken repository won't silently
 
978
        corrupt the target repository.
 
979
        """
 
980
        broken_repo = self.make_broken_repository()
 
981
        empty_repo = self.make_repository('empty-repo')
 
982
        try:
 
983
            empty_repo.fetch(broken_repo)
 
984
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
985
            # Test successful: compression parent not being copied leads to
 
986
            # error.
 
987
            return
 
988
        empty_repo.lock_read()
 
989
        self.addCleanup(empty_repo.unlock)
 
990
        text = empty_repo.texts.get_record_stream(
 
991
            [('file2-id', 'rev3')], 'topological', True).next()
 
992
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
993
 
 
994
 
 
995
class TestRepositoryPackCollection(TestCaseWithTransport):
 
996
 
 
997
    def get_format(self):
 
998
        return controldir.format_registry.make_bzrdir('pack-0.92')
 
999
 
 
1000
    def get_packs(self):
 
1001
        format = self.get_format()
 
1002
        repo = self.make_repository('.', format=format)
 
1003
        return repo._pack_collection
 
1004
 
 
1005
    def make_packs_and_alt_repo(self, write_lock=False):
 
1006
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1007
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1008
        tree.lock_write()
 
1009
        self.addCleanup(tree.unlock)
 
1010
        rev1 = tree.commit('one')
 
1011
        rev2 = tree.commit('two')
 
1012
        rev3 = tree.commit('three')
 
1013
        r = repository.Repository.open('.')
 
1014
        if write_lock:
 
1015
            r.lock_write()
 
1016
        else:
 
1017
            r.lock_read()
 
1018
        self.addCleanup(r.unlock)
 
1019
        packs = r._pack_collection
 
1020
        packs.ensure_loaded()
 
1021
        return tree, r, packs, [rev1, rev2, rev3]
 
1022
 
 
1023
    def test__clear_obsolete_packs(self):
 
1024
        packs = self.get_packs()
 
1025
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1026
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1027
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1028
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1029
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1030
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1031
        res = packs._clear_obsolete_packs()
 
1032
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1033
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1034
 
 
1035
    def test__clear_obsolete_packs_preserve(self):
 
1036
        packs = self.get_packs()
 
1037
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1038
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1039
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1040
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1041
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1042
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1043
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1044
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1045
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1046
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1047
 
 
1048
    def test__max_pack_count(self):
 
1049
        """The maximum pack count is a function of the number of revisions."""
 
1050
        # no revisions - one pack, so that we can have a revision free repo
 
1051
        # without it blowing up
 
1052
        packs = self.get_packs()
 
1053
        self.assertEqual(1, packs._max_pack_count(0))
 
1054
        # after that the sum of the digits, - check the first 1-9
 
1055
        self.assertEqual(1, packs._max_pack_count(1))
 
1056
        self.assertEqual(2, packs._max_pack_count(2))
 
1057
        self.assertEqual(3, packs._max_pack_count(3))
 
1058
        self.assertEqual(4, packs._max_pack_count(4))
 
1059
        self.assertEqual(5, packs._max_pack_count(5))
 
1060
        self.assertEqual(6, packs._max_pack_count(6))
 
1061
        self.assertEqual(7, packs._max_pack_count(7))
 
1062
        self.assertEqual(8, packs._max_pack_count(8))
 
1063
        self.assertEqual(9, packs._max_pack_count(9))
 
1064
        # check the boundary cases with two digits for the next decade
 
1065
        self.assertEqual(1, packs._max_pack_count(10))
 
1066
        self.assertEqual(2, packs._max_pack_count(11))
 
1067
        self.assertEqual(10, packs._max_pack_count(19))
 
1068
        self.assertEqual(2, packs._max_pack_count(20))
 
1069
        self.assertEqual(3, packs._max_pack_count(21))
 
1070
        # check some arbitrary big numbers
 
1071
        self.assertEqual(25, packs._max_pack_count(112894))
 
1072
 
 
1073
    def test_repr(self):
 
1074
        packs = self.get_packs()
 
1075
        self.assertContainsRe(repr(packs),
 
1076
            'RepositoryPackCollection(.*Repository(.*))')
 
1077
 
 
1078
    def test__obsolete_packs(self):
 
1079
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1080
        names = packs.names()
 
1081
        pack = packs.get_pack_by_name(names[0])
 
1082
        # Schedule this one for removal
 
1083
        packs._remove_pack_from_memory(pack)
 
1084
        # Simulate a concurrent update by renaming the .pack file and one of
 
1085
        # the indices
 
1086
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1087
                               'obsolete_packs/%s.pack' % (names[0],))
 
1088
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1089
                               'obsolete_packs/%s.iix' % (names[0],))
 
1090
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1091
        # are still renamed
 
1092
        packs._obsolete_packs([pack])
 
1093
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1094
                         sorted(packs._pack_transport.list_dir('.')))
 
1095
        # names[0] should not be present in the index anymore
 
1096
        self.assertEqual(names[1:],
 
1097
            sorted(set([osutils.splitext(n)[0] for n in
 
1098
                        packs._index_transport.list_dir('.')])))
 
1099
 
 
1100
    def test__obsolete_packs_missing_directory(self):
 
1101
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1102
        r.control_transport.rmdir('obsolete_packs')
 
1103
        names = packs.names()
 
1104
        pack = packs.get_pack_by_name(names[0])
 
1105
        # Schedule this one for removal
 
1106
        packs._remove_pack_from_memory(pack)
 
1107
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1108
        # are still renamed
 
1109
        packs._obsolete_packs([pack])
 
1110
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1111
                         sorted(packs._pack_transport.list_dir('.')))
 
1112
        # names[0] should not be present in the index anymore
 
1113
        self.assertEqual(names[1:],
 
1114
            sorted(set([osutils.splitext(n)[0] for n in
 
1115
                        packs._index_transport.list_dir('.')])))
 
1116
 
 
1117
    def test_pack_distribution_zero(self):
 
1118
        packs = self.get_packs()
 
1119
        self.assertEqual([0], packs.pack_distribution(0))
 
1120
 
 
1121
    def test_ensure_loaded_unlocked(self):
 
1122
        packs = self.get_packs()
 
1123
        self.assertRaises(errors.ObjectNotLocked,
 
1124
                          packs.ensure_loaded)
 
1125
 
 
1126
    def test_pack_distribution_one_to_nine(self):
 
1127
        packs = self.get_packs()
 
1128
        self.assertEqual([1],
 
1129
            packs.pack_distribution(1))
 
1130
        self.assertEqual([1, 1],
 
1131
            packs.pack_distribution(2))
 
1132
        self.assertEqual([1, 1, 1],
 
1133
            packs.pack_distribution(3))
 
1134
        self.assertEqual([1, 1, 1, 1],
 
1135
            packs.pack_distribution(4))
 
1136
        self.assertEqual([1, 1, 1, 1, 1],
 
1137
            packs.pack_distribution(5))
 
1138
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1139
            packs.pack_distribution(6))
 
1140
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1141
            packs.pack_distribution(7))
 
1142
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1143
            packs.pack_distribution(8))
 
1144
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1145
            packs.pack_distribution(9))
 
1146
 
 
1147
    def test_pack_distribution_stable_at_boundaries(self):
 
1148
        """When there are multi-rev packs the counts are stable."""
 
1149
        packs = self.get_packs()
 
1150
        # in 10s:
 
1151
        self.assertEqual([10], packs.pack_distribution(10))
 
1152
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1153
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1154
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1155
        # 100s
 
1156
        self.assertEqual([100], packs.pack_distribution(100))
 
1157
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1158
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1159
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1160
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1161
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1162
 
 
1163
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1164
        packs = self.get_packs()
 
1165
        existing_packs = [(2000, "big"), (9, "medium")]
 
1166
        # rev count - 2009 -> 2x1000 + 9x1
 
1167
        pack_operations = packs.plan_autopack_combinations(
 
1168
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1169
        self.assertEqual([], pack_operations)
 
1170
 
 
1171
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1172
        packs = self.get_packs()
 
1173
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1174
        # rev count - 2010 -> 2x1000 + 1x10
 
1175
        pack_operations = packs.plan_autopack_combinations(
 
1176
            existing_packs, [1000, 1000, 10])
 
1177
        self.assertEqual([], pack_operations)
 
1178
 
 
1179
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1180
        packs = self.get_packs()
 
1181
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1182
            (1, "single1")]
 
1183
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1184
        pack_operations = packs.plan_autopack_combinations(
 
1185
            existing_packs, [1000, 1000, 10])
 
1186
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1187
 
 
1188
    def test_plan_pack_operations_creates_a_single_op(self):
 
1189
        packs = self.get_packs()
 
1190
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1191
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1192
        # rev count 150 -> 1x100 and 5x10
 
1193
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1194
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1195
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1196
        # we save a pack file with no increased I/O by putting them into the
 
1197
        # same file.
 
1198
        distribution = packs.pack_distribution(150)
 
1199
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1200
                                                           distribution)
 
1201
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1202
 
 
1203
    def test_all_packs_none(self):
 
1204
        format = self.get_format()
 
1205
        tree = self.make_branch_and_tree('.', format=format)
 
1206
        tree.lock_read()
 
1207
        self.addCleanup(tree.unlock)
 
1208
        packs = tree.branch.repository._pack_collection
 
1209
        packs.ensure_loaded()
 
1210
        self.assertEqual([], packs.all_packs())
 
1211
 
 
1212
    def test_all_packs_one(self):
 
1213
        format = self.get_format()
 
1214
        tree = self.make_branch_and_tree('.', format=format)
 
1215
        tree.commit('start')
 
1216
        tree.lock_read()
 
1217
        self.addCleanup(tree.unlock)
 
1218
        packs = tree.branch.repository._pack_collection
 
1219
        packs.ensure_loaded()
 
1220
        self.assertEqual([
 
1221
            packs.get_pack_by_name(packs.names()[0])],
 
1222
            packs.all_packs())
 
1223
 
 
1224
    def test_all_packs_two(self):
 
1225
        format = self.get_format()
 
1226
        tree = self.make_branch_and_tree('.', format=format)
 
1227
        tree.commit('start')
 
1228
        tree.commit('continue')
 
1229
        tree.lock_read()
 
1230
        self.addCleanup(tree.unlock)
 
1231
        packs = tree.branch.repository._pack_collection
 
1232
        packs.ensure_loaded()
 
1233
        self.assertEqual([
 
1234
            packs.get_pack_by_name(packs.names()[0]),
 
1235
            packs.get_pack_by_name(packs.names()[1]),
 
1236
            ], packs.all_packs())
 
1237
 
 
1238
    def test_get_pack_by_name(self):
 
1239
        format = self.get_format()
 
1240
        tree = self.make_branch_and_tree('.', format=format)
 
1241
        tree.commit('start')
 
1242
        tree.lock_read()
 
1243
        self.addCleanup(tree.unlock)
 
1244
        packs = tree.branch.repository._pack_collection
 
1245
        packs.reset()
 
1246
        packs.ensure_loaded()
 
1247
        name = packs.names()[0]
 
1248
        pack_1 = packs.get_pack_by_name(name)
 
1249
        # the pack should be correctly initialised
 
1250
        sizes = packs._names[name]
 
1251
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1252
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1253
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1254
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1255
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1256
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1257
        # and the same instance should be returned on successive calls.
 
1258
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1259
 
 
1260
    def test_reload_pack_names_new_entry(self):
 
1261
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1262
        names = packs.names()
 
1263
        # Add a new pack file into the repository
 
1264
        rev4 = tree.commit('four')
 
1265
        new_names = tree.branch.repository._pack_collection.names()
 
1266
        new_name = set(new_names).difference(names)
 
1267
        self.assertEqual(1, len(new_name))
 
1268
        new_name = new_name.pop()
 
1269
        # The old collection hasn't noticed yet
 
1270
        self.assertEqual(names, packs.names())
 
1271
        self.assertTrue(packs.reload_pack_names())
 
1272
        self.assertEqual(new_names, packs.names())
 
1273
        # And the repository can access the new revision
 
1274
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1275
        self.assertFalse(packs.reload_pack_names())
 
1276
 
 
1277
    def test_reload_pack_names_added_and_removed(self):
 
1278
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1279
        names = packs.names()
 
1280
        # Now repack the whole thing
 
1281
        tree.branch.repository.pack()
 
1282
        new_names = tree.branch.repository._pack_collection.names()
 
1283
        # The other collection hasn't noticed yet
 
1284
        self.assertEqual(names, packs.names())
 
1285
        self.assertTrue(packs.reload_pack_names())
 
1286
        self.assertEqual(new_names, packs.names())
 
1287
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1288
        self.assertFalse(packs.reload_pack_names())
 
1289
 
 
1290
    def test_reload_pack_names_preserves_pending(self):
 
1291
        # TODO: Update this to also test for pending-deleted names
 
1292
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1293
        # We will add one pack (via start_write_group + insert_record_stream),
 
1294
        # and remove another pack (via _remove_pack_from_memory)
 
1295
        orig_names = packs.names()
 
1296
        orig_at_load = packs._packs_at_load
 
1297
        to_remove_name = iter(orig_names).next()
 
1298
        r.start_write_group()
 
1299
        self.addCleanup(r.abort_write_group)
 
1300
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1301
            ('text', 'rev'), (), None, 'content\n')])
 
1302
        new_pack = packs._new_pack
 
1303
        self.assertTrue(new_pack.data_inserted())
 
1304
        new_pack.finish()
 
1305
        packs.allocate(new_pack)
 
1306
        packs._new_pack = None
 
1307
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1308
        packs._remove_pack_from_memory(removed_pack)
 
1309
        names = packs.names()
 
1310
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1311
        new_names = set([x[0][0] for x in new_nodes])
 
1312
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1313
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1314
        self.assertEqual(set([new_pack.name]), new_names)
 
1315
        self.assertEqual([to_remove_name],
 
1316
                         sorted([x[0][0] for x in deleted_nodes]))
 
1317
        packs.reload_pack_names()
 
1318
        reloaded_names = packs.names()
 
1319
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1320
        self.assertEqual(names, reloaded_names)
 
1321
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1322
        new_names = set([x[0][0] for x in new_nodes])
 
1323
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1324
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1325
        self.assertEqual(set([new_pack.name]), new_names)
 
1326
        self.assertEqual([to_remove_name],
 
1327
                         sorted([x[0][0] for x in deleted_nodes]))
 
1328
 
 
1329
    def test_autopack_obsoletes_new_pack(self):
 
1330
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1331
        packs._max_pack_count = lambda x: 1
 
1332
        packs.pack_distribution = lambda x: [10]
 
1333
        r.start_write_group()
 
1334
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1335
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1336
        # This should trigger an autopack, which will combine everything into a
 
1337
        # single pack file.
 
1338
        new_names = r.commit_write_group()
 
1339
        names = packs.names()
 
1340
        self.assertEqual(1, len(names))
 
1341
        self.assertEqual([names[0] + '.pack'],
 
1342
                         packs._pack_transport.list_dir('.'))
 
1343
 
 
1344
    def test_autopack_reloads_and_stops(self):
 
1345
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1346
        # After we have determined what needs to be autopacked, trigger a
 
1347
        # full-pack via the other repo which will cause us to re-evaluate and
 
1348
        # decide we don't need to do anything
 
1349
        orig_execute = packs._execute_pack_operations
 
1350
        def _munged_execute_pack_ops(*args, **kwargs):
 
1351
            tree.branch.repository.pack()
 
1352
            return orig_execute(*args, **kwargs)
 
1353
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1354
        packs._max_pack_count = lambda x: 1
 
1355
        packs.pack_distribution = lambda x: [10]
 
1356
        self.assertFalse(packs.autopack())
 
1357
        self.assertEqual(1, len(packs.names()))
 
1358
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1359
                         packs.names())
 
1360
 
 
1361
    def test__save_pack_names(self):
 
1362
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1363
        names = packs.names()
 
1364
        pack = packs.get_pack_by_name(names[0])
 
1365
        packs._remove_pack_from_memory(pack)
 
1366
        packs._save_pack_names(obsolete_packs=[pack])
 
1367
        cur_packs = packs._pack_transport.list_dir('.')
 
1368
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1369
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1370
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1371
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1372
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1373
 
 
1374
    def test__save_pack_names_already_obsoleted(self):
 
1375
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1376
        names = packs.names()
 
1377
        pack = packs.get_pack_by_name(names[0])
 
1378
        packs._remove_pack_from_memory(pack)
 
1379
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1380
        # the pack directly.
 
1381
        packs._obsolete_packs([pack])
 
1382
        packs._save_pack_names(clear_obsolete_packs=True,
 
1383
                               obsolete_packs=[pack])
 
1384
        cur_packs = packs._pack_transport.list_dir('.')
 
1385
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1386
        # Note that while we set clear_obsolete_packs=True, it should not
 
1387
        # delete a pack file that we have also scheduled for obsoletion.
 
1388
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1389
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1390
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1391
 
 
1392
    def test_pack_no_obsolete_packs_directory(self):
 
1393
        """Bug #314314, don't fail if obsolete_packs directory does
 
1394
        not exist."""
 
1395
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1396
        r.control_transport.rmdir('obsolete_packs')
 
1397
        packs._clear_obsolete_packs()
 
1398
 
 
1399
 
 
1400
class TestPack(TestCaseWithTransport):
 
1401
    """Tests for the Pack object."""
 
1402
 
 
1403
    def assertCurrentlyEqual(self, left, right):
 
1404
        self.assertTrue(left == right)
 
1405
        self.assertTrue(right == left)
 
1406
        self.assertFalse(left != right)
 
1407
        self.assertFalse(right != left)
 
1408
 
 
1409
    def assertCurrentlyNotEqual(self, left, right):
 
1410
        self.assertFalse(left == right)
 
1411
        self.assertFalse(right == left)
 
1412
        self.assertTrue(left != right)
 
1413
        self.assertTrue(right != left)
 
1414
 
 
1415
    def test___eq____ne__(self):
 
1416
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1417
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1418
        self.assertCurrentlyEqual(left, right)
 
1419
        # change all attributes and ensure equality changes as we do.
 
1420
        left.revision_index = 'a'
 
1421
        self.assertCurrentlyNotEqual(left, right)
 
1422
        right.revision_index = 'a'
 
1423
        self.assertCurrentlyEqual(left, right)
 
1424
        left.inventory_index = 'a'
 
1425
        self.assertCurrentlyNotEqual(left, right)
 
1426
        right.inventory_index = 'a'
 
1427
        self.assertCurrentlyEqual(left, right)
 
1428
        left.text_index = 'a'
 
1429
        self.assertCurrentlyNotEqual(left, right)
 
1430
        right.text_index = 'a'
 
1431
        self.assertCurrentlyEqual(left, right)
 
1432
        left.signature_index = 'a'
 
1433
        self.assertCurrentlyNotEqual(left, right)
 
1434
        right.signature_index = 'a'
 
1435
        self.assertCurrentlyEqual(left, right)
 
1436
        left.name = 'a'
 
1437
        self.assertCurrentlyNotEqual(left, right)
 
1438
        right.name = 'a'
 
1439
        self.assertCurrentlyEqual(left, right)
 
1440
        left.transport = 'a'
 
1441
        self.assertCurrentlyNotEqual(left, right)
 
1442
        right.transport = 'a'
 
1443
        self.assertCurrentlyEqual(left, right)
 
1444
 
 
1445
    def test_file_name(self):
 
1446
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1447
        self.assertEqual('a_name.pack', pack.file_name())
 
1448
 
 
1449
 
 
1450
class TestNewPack(TestCaseWithTransport):
 
1451
    """Tests for pack_repo.NewPack."""
 
1452
 
 
1453
    def test_new_instance_attributes(self):
 
1454
        upload_transport = self.get_transport('upload')
 
1455
        pack_transport = self.get_transport('pack')
 
1456
        index_transport = self.get_transport('index')
 
1457
        upload_transport.mkdir('.')
 
1458
        collection = pack_repo.RepositoryPackCollection(
 
1459
            repo=None,
 
1460
            transport=self.get_transport('.'),
 
1461
            index_transport=index_transport,
 
1462
            upload_transport=upload_transport,
 
1463
            pack_transport=pack_transport,
 
1464
            index_builder_class=BTreeBuilder,
 
1465
            index_class=BTreeGraphIndex,
 
1466
            use_chk_index=False)
 
1467
        pack = pack_repo.NewPack(collection)
 
1468
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1469
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1470
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1471
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1472
        self.assertTrue(pack.upload_transport is upload_transport)
 
1473
        self.assertTrue(pack.index_transport is index_transport)
 
1474
        self.assertTrue(pack.pack_transport is pack_transport)
 
1475
        self.assertEqual(None, pack.index_sizes)
 
1476
        self.assertEqual(20, len(pack.random_name))
 
1477
        self.assertIsInstance(pack.random_name, str)
 
1478
        self.assertIsInstance(pack.start_time, float)
 
1479
 
 
1480
 
 
1481
class TestPacker(TestCaseWithTransport):
 
1482
    """Tests for the packs repository Packer class."""
 
1483
 
 
1484
    def test_pack_optimizes_pack_order(self):
 
1485
        builder = self.make_branch_builder('.', format="1.9")
 
1486
        builder.start_series()
 
1487
        builder.build_snapshot('A', None, [
 
1488
            ('add', ('', 'root-id', 'directory', None)),
 
1489
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1490
        builder.build_snapshot('B', ['A'],
 
1491
            [('modify', ('f-id', 'new-content\n'))])
 
1492
        builder.build_snapshot('C', ['B'],
 
1493
            [('modify', ('f-id', 'third-content\n'))])
 
1494
        builder.build_snapshot('D', ['C'],
 
1495
            [('modify', ('f-id', 'fourth-content\n'))])
 
1496
        b = builder.get_branch()
 
1497
        b.lock_read()
 
1498
        builder.finish_series()
 
1499
        self.addCleanup(b.unlock)
 
1500
        # At this point, we should have 4 pack files available
 
1501
        # Because of how they were built, they correspond to
 
1502
        # ['D', 'C', 'B', 'A']
 
1503
        packs = b.repository._pack_collection.packs
 
1504
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1505
                                  packs, 'testing',
 
1506
                                  revision_ids=['B', 'C'])
 
1507
        # Now, when we are copying the B & C revisions, their pack files should
 
1508
        # be moved to the front of the stack
 
1509
        # The new ordering moves B & C to the front of the .packs attribute,
 
1510
        # and leaves the others in the original order.
 
1511
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1512
        new_pack = packer.pack()
 
1513
        self.assertEqual(new_packs, packer.packs)
 
1514
 
 
1515
 
 
1516
class TestOptimisingPacker(TestCaseWithTransport):
 
1517
    """Tests for the OptimisingPacker class."""
 
1518
 
 
1519
    def get_pack_collection(self):
 
1520
        repo = self.make_repository('.')
 
1521
        return repo._pack_collection
 
1522
 
 
1523
    def test_open_pack_will_optimise(self):
 
1524
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1525
                                            [], '.test')
 
1526
        new_pack = packer.open_pack()
 
1527
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1528
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1529
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1530
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1531
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1532
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1533
 
 
1534
 
 
1535
class TestGCCHKPacker(TestCaseWithTransport):
 
1536
 
 
1537
    def make_abc_branch(self):
 
1538
        builder = self.make_branch_builder('source')
 
1539
        builder.start_series()
 
1540
        builder.build_snapshot('A', None, [
 
1541
            ('add', ('', 'root-id', 'directory', None)),
 
1542
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1543
            ])
 
1544
        builder.build_snapshot('B', ['A'], [
 
1545
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1546
        builder.build_snapshot('C', ['B'], [
 
1547
            ('modify', ('file-id', 'new content\n'))])
 
1548
        builder.finish_series()
 
1549
        return builder.get_branch()
 
1550
 
 
1551
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1552
        """a repo with separate packs for a revisions Revision and Inventory.
 
1553
 
 
1554
        There will be one pack file that holds the Revision content, and one
 
1555
        for the Inventory content.
 
1556
 
 
1557
        :return: (repository,
 
1558
                  pack_name_with_rev_A_Revision,
 
1559
                  pack_name_with_rev_A_Inventory,
 
1560
                  pack_name_with_rev_C_content)
 
1561
        """
 
1562
        b_source = self.make_abc_branch()
 
1563
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1564
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1565
        b_stacked.lock_write()
 
1566
        self.addCleanup(b_stacked.unlock)
 
1567
        b_stacked.fetch(b_source, 'B')
 
1568
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1569
        # fill in the A rev.
 
1570
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1571
        repo_not_stacked.lock_write()
 
1572
        self.addCleanup(repo_not_stacked.unlock)
 
1573
        # Now we should have a pack file with A's inventory, but not its
 
1574
        # Revision
 
1575
        self.assertEqual([('A',), ('B',)],
 
1576
                         sorted(repo_not_stacked.inventories.keys()))
 
1577
        self.assertEqual([('B',)],
 
1578
                         sorted(repo_not_stacked.revisions.keys()))
 
1579
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1580
        # We have a couple names here, figure out which has A's inventory
 
1581
        for name in stacked_pack_names:
 
1582
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1583
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1584
            if ('A',) in keys:
 
1585
                inv_a_pack_name = name
 
1586
                break
 
1587
        else:
 
1588
            self.fail('Could not find pack containing A\'s inventory')
 
1589
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1590
        self.assertEqual([('A',), ('B',)],
 
1591
                         sorted(repo_not_stacked.revisions.keys()))
 
1592
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1593
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1594
        self.assertEqual(1, len(rev_a_pack_names))
 
1595
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1596
        # Now fetch 'C', so we have a couple pack files to join
 
1597
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1598
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1599
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1600
        self.assertEqual(1, len(rev_c_pack_names))
 
1601
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1602
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1603
                rev_c_pack_name)
 
1604
 
 
1605
    def test_pack_with_distant_inventories(self):
 
1606
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1607
        # When repacking, it is possible to have an inventory in a different
 
1608
        # pack file than the associated revision. An autopack can then come
 
1609
        # along, and miss that inventory, and complain.
 
1610
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1611
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1612
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1613
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1614
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1615
                    [a_pack, c_pack], '.test-pack')
 
1616
        # This would raise ValueError in bug #437003, but should not raise an
 
1617
        # error once fixed.
 
1618
        packer.pack()
 
1619
 
 
1620
    def test_pack_with_missing_inventory(self):
 
1621
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1622
        # the A inventory to actually be gone from the repository.
 
1623
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1624
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1625
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1626
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1627
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1628
            repo._pack_collection.all_packs(), '.test-pack')
 
1629
        e = self.assertRaises(ValueError, packer.pack)
 
1630
        packer.new_pack.abort()
 
1631
        self.assertContainsRe(str(e),
 
1632
            r"We are missing inventories for revisions: .*'A'")
 
1633
 
 
1634
 
 
1635
class TestCrossFormatPacks(TestCaseWithTransport):
 
1636
 
 
1637
    def log_pack(self, hint=None):
 
1638
        self.calls.append(('pack', hint))
 
1639
        self.orig_pack(hint=hint)
 
1640
        if self.expect_hint:
 
1641
            self.assertTrue(hint)
 
1642
 
 
1643
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1644
        self.expect_hint = expect_pack_called
 
1645
        self.calls = []
 
1646
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1647
        source_tree.lock_write()
 
1648
        self.addCleanup(source_tree.unlock)
 
1649
        tip = source_tree.commit('foo')
 
1650
        target = self.make_repository('target', format=target_fmt)
 
1651
        target.lock_write()
 
1652
        self.addCleanup(target.unlock)
 
1653
        source = source_tree.branch.repository._get_source(target._format)
 
1654
        self.orig_pack = target.pack
 
1655
        self.overrideAttr(target, "pack", self.log_pack)
 
1656
        search = target.search_missing_revision_ids(
 
1657
            source_tree.branch.repository, revision_ids=[tip])
 
1658
        stream = source.get_stream(search)
 
1659
        from_format = source_tree.branch.repository._format
 
1660
        sink = target._get_sink()
 
1661
        sink.insert_stream(stream, from_format, [])
 
1662
        if expect_pack_called:
 
1663
            self.assertLength(1, self.calls)
 
1664
        else:
 
1665
            self.assertLength(0, self.calls)
 
1666
 
 
1667
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1668
        self.expect_hint = expect_pack_called
 
1669
        self.calls = []
 
1670
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1671
        source_tree.lock_write()
 
1672
        self.addCleanup(source_tree.unlock)
 
1673
        tip = source_tree.commit('foo')
 
1674
        target = self.make_repository('target', format=target_fmt)
 
1675
        target.lock_write()
 
1676
        self.addCleanup(target.unlock)
 
1677
        source = source_tree.branch.repository
 
1678
        self.orig_pack = target.pack
 
1679
        self.overrideAttr(target, "pack", self.log_pack)
 
1680
        target.fetch(source)
 
1681
        if expect_pack_called:
 
1682
            self.assertLength(1, self.calls)
 
1683
        else:
 
1684
            self.assertLength(0, self.calls)
 
1685
 
 
1686
    def test_sink_format_hint_no(self):
 
1687
        # When the target format says packing makes no difference, pack is not
 
1688
        # called.
 
1689
        self.run_stream('1.9', 'rich-root-pack', False)
 
1690
 
 
1691
    def test_sink_format_hint_yes(self):
 
1692
        # When the target format says packing makes a difference, pack is
 
1693
        # called.
 
1694
        self.run_stream('1.9', '2a', True)
 
1695
 
 
1696
    def test_sink_format_same_no(self):
 
1697
        # When the formats are the same, pack is not called.
 
1698
        self.run_stream('2a', '2a', False)
 
1699
 
 
1700
    def test_IDS_format_hint_no(self):
 
1701
        # When the target format says packing makes no difference, pack is not
 
1702
        # called.
 
1703
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1704
 
 
1705
    def test_IDS_format_hint_yes(self):
 
1706
        # When the target format says packing makes a difference, pack is
 
1707
        # called.
 
1708
        self.run_fetch('1.9', '2a', True)
 
1709
 
 
1710
    def test_IDS_format_same_no(self):
 
1711
        # When the formats are the same, pack is not called.
 
1712
        self.run_fetch('2a', '2a', False)
 
1713
 
 
1714
 
 
1715
class Test_LazyListJoin(tests.TestCase):
 
1716
 
 
1717
    def test__repr__(self):
 
1718
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1719
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
 
1720
                         repr(lazy))
 
1721
 
 
1722
 
 
1723
class TestFeatures(tests.TestCaseWithTransport):
 
1724
 
 
1725
    def test_open_with_present_feature(self):
 
1726
        self.addCleanup(
 
1727
            repository.RepositoryFormatMetaDir.unregister_feature,
 
1728
            "makes-cheese-sandwich")
 
1729
        repository.RepositoryFormatMetaDir.register_feature(
 
1730
            "makes-cheese-sandwich")
 
1731
        repo = self.make_repository('.')
 
1732
        repo.lock_write()
 
1733
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1734
        repo._format.check_support_status(False)
 
1735
        repo.unlock()
 
1736
 
 
1737
    def test_open_with_missing_required_feature(self):
 
1738
        repo = self.make_repository('.')
 
1739
        repo.lock_write()
 
1740
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1741
        self.assertRaises(errors.MissingFeature,
 
1742
            repo._format.check_support_status, False)