~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

Merge bzr.dev and tree-file-ids-as-tuples.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
27
import bzrlib
29
 
from bzrlib.errors import (NotBranchError,
30
 
                           NoSuchFile,
31
 
                           UnknownFormatError,
32
 
                           UnsupportedFormatError,
33
 
                           )
34
 
from bzrlib import graph
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from bzrlib import (
 
33
    btree_index,
 
34
    symbol_versioning,
 
35
    tests,
 
36
    transport,
 
37
    vf_search,
 
38
    )
35
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
36
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
40
from bzrlib.index import GraphIndex
37
41
from bzrlib.repository import RepositoryFormat
38
 
from bzrlib.smart import server
39
42
from bzrlib.tests import (
40
43
    TestCase,
41
44
    TestCaseWithTransport,
42
 
    TestSkipped,
43
 
    test_knit,
44
 
    )
45
 
from bzrlib.transport import (
46
 
    fakenfs,
47
 
    get_transport,
48
 
    )
49
 
from bzrlib.transport.memory import MemoryServer
50
 
from bzrlib.util import bencode
 
45
    )
51
46
from bzrlib import (
52
47
    bzrdir,
53
48
    errors,
54
49
    inventory,
55
50
    osutils,
56
 
    progress,
57
51
    repository,
58
52
    revision as _mod_revision,
59
 
    symbol_versioning,
60
53
    upgrade,
 
54
    versionedfile,
 
55
    vf_repository,
61
56
    workingtree,
62
57
    )
63
 
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
 
58
from bzrlib.repofmt import (
 
59
    groupcompress_repo,
 
60
    knitrepo,
 
61
    knitpack_repo,
 
62
    pack_repo,
 
63
    )
64
64
 
65
65
 
66
66
class TestDefaultFormat(TestCase):
68
68
    def test_get_set_default_format(self):
69
69
        old_default = bzrdir.format_registry.get('default')
70
70
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.RepositoryFormat.get_default_format()
 
71
        old_format = repository.format_registry.get_default()
72
72
        self.assertTrue(isinstance(old_format, private_default))
73
73
        def make_sample_bzrdir():
74
74
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
88
88
            bzrdir.format_registry.remove('default')
89
89
            bzrdir.format_registry.remove('sample')
90
90
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
91
        self.assertIsInstance(repository.format_registry.get_default(),
92
92
                              old_format.__class__)
93
93
 
94
94
 
95
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
95
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
96
96
    """A sample format
97
97
 
98
 
    this format is initializable, unsupported to aid in testing the 
 
98
    this format is initializable, unsupported to aid in testing the
99
99
    open and open(unsupported=True) routines.
100
100
    """
101
101
 
102
 
    def get_format_string(self):
 
102
    @classmethod
 
103
    def get_format_string(cls):
103
104
        """See RepositoryFormat.get_format_string()."""
104
105
        return "Sample .bzr repository format."
105
106
 
116
117
        return "opened repository."
117
118
 
118
119
 
 
120
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
121
    """A sample format that can not be used in a metadir
 
122
 
 
123
    """
 
124
 
 
125
    def get_format_string(self):
 
126
        raise NotImplementedError
 
127
 
 
128
 
119
129
class TestRepositoryFormat(TestCaseWithTransport):
120
130
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
121
131
 
122
132
    def test_find_format(self):
123
133
        # is the right format object found for a repository?
124
134
        # create a branch with a few known format objects.
125
 
        # this is not quite the same as 
 
135
        # this is not quite the same as
126
136
        self.build_tree(["foo/", "bar/"])
127
137
        def check_format(format, url):
128
138
            dir = format._matchingbzrdir.initialize(url)
129
139
            format.initialize(dir)
130
 
            t = get_transport(url)
131
 
            found_format = repository.RepositoryFormat.find_format(dir)
132
 
            self.failUnless(isinstance(found_format, format.__class__))
133
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
134
 
        
 
140
            t = transport.get_transport_from_path(url)
 
141
            found_format = repository.RepositoryFormatMetaDir.find_format(dir)
 
142
            self.assertIsInstance(found_format, format.__class__)
 
143
        check_format(repository.format_registry.get_default(), "bar")
 
144
 
135
145
    def test_find_format_no_repository(self):
136
146
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
147
        self.assertRaises(errors.NoRepositoryPresent,
138
 
                          repository.RepositoryFormat.find_format,
 
148
                          repository.RepositoryFormatMetaDir.find_format,
139
149
                          dir)
140
150
 
 
151
    def test_from_string(self):
 
152
        self.assertIsInstance(
 
153
            SampleRepositoryFormat.from_string(
 
154
                "Sample .bzr repository format."),
 
155
            SampleRepositoryFormat)
 
156
        self.assertRaises(AssertionError,
 
157
            SampleRepositoryFormat.from_string,
 
158
                "Different .bzr repository format.")
 
159
 
141
160
    def test_find_format_unknown_format(self):
142
161
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
143
162
        SampleRepositoryFormat().initialize(dir)
144
163
        self.assertRaises(UnknownFormatError,
145
 
                          repository.RepositoryFormat.find_format,
 
164
                          repository.RepositoryFormatMetaDir.find_format,
146
165
                          dir)
147
166
 
 
167
    def test_find_format_with_features(self):
 
168
        tree = self.make_branch_and_tree('.', format='2a')
 
169
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
170
        found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
171
        self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
 
172
        self.assertEquals(found_format.features.get("name"), "necessity")
 
173
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
174
            True)
 
175
        self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
 
176
            "name")
 
177
        repository.RepositoryFormatMetaDir.register_feature("name")
 
178
        found_format.check_support_status(True)
 
179
 
148
180
    def test_register_unregister_format(self):
 
181
        # Test deprecated format registration functions
149
182
        format = SampleRepositoryFormat()
150
183
        # make a control dir
151
184
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
152
185
        # make a repo
153
186
        format.initialize(dir)
154
187
        # register a format for it.
155
 
        repository.RepositoryFormat.register_format(format)
 
188
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
189
            repository.RepositoryFormat.register_format, format)
156
190
        # which repository.Open will refuse (not supported)
157
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
191
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
 
192
            self.get_url())
158
193
        # but open(unsupported) will work
159
194
        self.assertEqual(format.open(dir), "opened repository.")
160
195
        # unregister the format
161
 
        repository.RepositoryFormat.unregister_format(format)
162
 
 
163
 
 
164
 
class TestFormat6(TestCaseWithTransport):
165
 
 
166
 
    def test_attribute__fetch_order(self):
167
 
        """Weaves need topological data insertion."""
168
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
170
 
        self.assertEqual('topological', repo._fetch_order)
171
 
 
172
 
    def test_attribute__fetch_uses_deltas(self):
173
 
        """Weaves do not reuse deltas."""
174
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
176
 
        self.assertEqual(False, repo._fetch_uses_deltas)
177
 
 
178
 
    def test_attribute__fetch_reconcile(self):
179
 
        """Weave repositories need a reconcile after fetch."""
180
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
182
 
        self.assertEqual(True, repo._fetch_reconcile)
183
 
 
184
 
    def test_no_ancestry_weave(self):
185
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
187
 
        # We no longer need to create the ancestry.weave file
188
 
        # since it is *never* used.
189
 
        self.assertRaises(NoSuchFile,
190
 
                          control.transport.get,
191
 
                          'ancestry.weave')
192
 
 
193
 
    def test_supports_external_lookups(self):
194
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
196
 
        self.assertFalse(repo._format.supports_external_lookups)
197
 
 
198
 
 
199
 
class TestFormat7(TestCaseWithTransport):
200
 
 
201
 
    def test_attribute__fetch_order(self):
202
 
        """Weaves need topological data insertion."""
203
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
205
 
        self.assertEqual('topological', repo._fetch_order)
206
 
 
207
 
    def test_attribute__fetch_uses_deltas(self):
208
 
        """Weaves do not reuse deltas."""
209
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
211
 
        self.assertEqual(False, repo._fetch_uses_deltas)
212
 
 
213
 
    def test_attribute__fetch_reconcile(self):
214
 
        """Weave repositories need a reconcile after fetch."""
215
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
217
 
        self.assertEqual(True, repo._fetch_reconcile)
218
 
 
219
 
    def test_disk_layout(self):
220
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
222
 
        # in case of side effects of locking.
223
 
        repo.lock_write()
224
 
        repo.unlock()
225
 
        # we want:
226
 
        # format 'Bazaar-NG Repository format 7'
227
 
        # lock ''
228
 
        # inventory.weave == empty_weave
229
 
        # empty revision-store directory
230
 
        # empty weaves directory
231
 
        t = control.get_repository_transport(None)
232
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
233
 
                             t.get('format').read())
234
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
235
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
236
 
        self.assertEqualDiff('# bzr weave file v5\n'
237
 
                             'w\n'
238
 
                             'W\n',
239
 
                             t.get('inventory.weave').read())
240
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
241
 
        # disk.
242
 
        control.create_branch()
243
 
        tree = control.create_workingtree()
244
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        tree.commit('first post', rev_id='first')
247
 
        self.assertEqualDiff(
248
 
            '# bzr weave file v5\n'
249
 
            'i\n'
250
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
251
 
            'n first\n'
252
 
            '\n'
253
 
            'w\n'
254
 
            '{ 0\n'
255
 
            '. content\n'
256
 
            '}\n'
257
 
            'W\n',
258
 
            t.get('weaves/74/Foo%3ABar.weave').read())
259
 
 
260
 
    def test_shared_disk_layout(self):
261
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
262
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
263
 
        # we want:
264
 
        # format 'Bazaar-NG Repository format 7'
265
 
        # inventory.weave == empty_weave
266
 
        # empty revision-store directory
267
 
        # empty weaves directory
268
 
        # a 'shared-storage' marker file.
269
 
        # lock is not present when unlocked
270
 
        t = control.get_repository_transport(None)
271
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
272
 
                             t.get('format').read())
273
 
        self.assertEqualDiff('', t.get('shared-storage').read())
274
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
275
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
276
 
        self.assertEqualDiff('# bzr weave file v5\n'
277
 
                             'w\n'
278
 
                             'W\n',
279
 
                             t.get('inventory.weave').read())
280
 
        self.assertFalse(t.has('branch-lock'))
281
 
 
282
 
    def test_creates_lockdir(self):
283
 
        """Make sure it appears to be controlled by a LockDir existence"""
284
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
285
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
286
 
        t = control.get_repository_transport(None)
287
 
        # TODO: Should check there is a 'lock' toplevel directory, 
288
 
        # regardless of contents
289
 
        self.assertFalse(t.has('lock/held/info'))
290
 
        repo.lock_write()
291
 
        try:
292
 
            self.assertTrue(t.has('lock/held/info'))
293
 
        finally:
294
 
            # unlock so we don't get a warning about failing to do so
295
 
            repo.unlock()
296
 
 
297
 
    def test_uses_lockdir(self):
298
 
        """repo format 7 actually locks on lockdir"""
299
 
        base_url = self.get_url()
300
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
301
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
302
 
        t = control.get_repository_transport(None)
303
 
        repo.lock_write()
304
 
        repo.unlock()
305
 
        del repo
306
 
        # make sure the same lock is created by opening it
307
 
        repo = repository.Repository.open(base_url)
308
 
        repo.lock_write()
309
 
        self.assertTrue(t.has('lock/held/info'))
310
 
        repo.unlock()
311
 
        self.assertFalse(t.has('lock/held/info'))
312
 
 
313
 
    def test_shared_no_tree_disk_layout(self):
314
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
315
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
316
 
        repo.set_make_working_trees(False)
317
 
        # we want:
318
 
        # format 'Bazaar-NG Repository format 7'
319
 
        # lock ''
320
 
        # inventory.weave == empty_weave
321
 
        # empty revision-store directory
322
 
        # empty weaves directory
323
 
        # a 'shared-storage' marker file.
324
 
        t = control.get_repository_transport(None)
325
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
326
 
                             t.get('format').read())
327
 
        ## self.assertEqualDiff('', t.get('lock').read())
328
 
        self.assertEqualDiff('', t.get('shared-storage').read())
329
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
330
 
        repo.set_make_working_trees(True)
331
 
        self.assertFalse(t.has('no-working-trees'))
332
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
333
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
334
 
        self.assertEqualDiff('# bzr weave file v5\n'
335
 
                             'w\n'
336
 
                             'W\n',
337
 
                             t.get('inventory.weave').read())
338
 
 
339
 
    def test_supports_external_lookups(self):
340
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
341
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
342
 
        self.assertFalse(repo._format.supports_external_lookups)
 
196
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
197
            repository.RepositoryFormat.unregister_format, format)
 
198
 
 
199
 
 
200
class TestRepositoryFormatRegistry(TestCase):
 
201
 
 
202
    def setUp(self):
 
203
        super(TestRepositoryFormatRegistry, self).setUp()
 
204
        self.registry = repository.RepositoryFormatRegistry()
 
205
 
 
206
    def test_register_unregister_format(self):
 
207
        format = SampleRepositoryFormat()
 
208
        self.registry.register(format)
 
209
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
 
210
        self.registry.remove(format)
 
211
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
212
 
 
213
    def test_get_all(self):
 
214
        format = SampleRepositoryFormat()
 
215
        self.assertEquals([], self.registry._get_all())
 
216
        self.registry.register(format)
 
217
        self.assertEquals([format], self.registry._get_all())
 
218
 
 
219
    def test_register_extra(self):
 
220
        format = SampleExtraRepositoryFormat()
 
221
        self.assertEquals([], self.registry._get_all())
 
222
        self.registry.register_extra(format)
 
223
        self.assertEquals([format], self.registry._get_all())
 
224
 
 
225
    def test_register_extra_lazy(self):
 
226
        self.assertEquals([], self.registry._get_all())
 
227
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
228
            "SampleExtraRepositoryFormat")
 
229
        formats = self.registry._get_all()
 
230
        self.assertEquals(1, len(formats))
 
231
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
343
232
 
344
233
 
345
234
class TestFormatKnit1(TestCaseWithTransport):
346
 
    
 
235
 
347
236
    def test_attribute__fetch_order(self):
348
237
        """Knits need topological data insertion."""
349
238
        repo = self.make_repository('.',
350
239
                format=bzrdir.format_registry.get('knit')())
351
 
        self.assertEqual('topological', repo._fetch_order)
 
240
        self.assertEqual('topological', repo._format._fetch_order)
352
241
 
353
242
    def test_attribute__fetch_uses_deltas(self):
354
243
        """Knits reuse deltas."""
355
244
        repo = self.make_repository('.',
356
245
                format=bzrdir.format_registry.get('knit')())
357
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
246
        self.assertEqual(True, repo._format._fetch_uses_deltas)
358
247
 
359
248
    def test_disk_layout(self):
360
249
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
447
336
        repo = self.make_repository('.',
448
337
                format=bzrdir.format_registry.get('knit')())
449
338
        inv_xml = '<inventory format="5">\n</inventory>\n'
450
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
339
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
451
340
        self.assertEqual('test-rev-id', inv.root.revision)
452
341
 
453
342
    def test_deserialise_uses_global_revision_id(self):
459
348
        # Arguably, the deserialise_inventory should detect a mismatch, and
460
349
        # raise an error, rather than silently using one revision_id over the
461
350
        # other.
462
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
351
        self.assertRaises(AssertionError, repo._deserialise_inventory,
463
352
            'test-rev-id', inv_xml)
464
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
353
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
465
354
        self.assertEqual('other-rev-id', inv.root.revision)
466
355
 
467
356
    def test_supports_external_lookups(self):
473
362
class DummyRepository(object):
474
363
    """A dummy repository for testing."""
475
364
 
 
365
    _format = None
476
366
    _serializer = None
477
367
 
478
368
    def supports_rich_root(self):
 
369
        if self._format is not None:
 
370
            return self._format.rich_root_data
479
371
        return False
480
372
 
 
373
    def get_graph(self):
 
374
        raise NotImplementedError
 
375
 
 
376
    def get_parent_map(self, revision_ids):
 
377
        raise NotImplementedError
 
378
 
481
379
 
482
380
class InterDummy(repository.InterRepository):
483
381
    """An inter-repository optimised code path for DummyRepository.
490
388
    @staticmethod
491
389
    def is_compatible(repo_source, repo_target):
492
390
        """InterDummy is compatible with DummyRepository."""
493
 
        return (isinstance(repo_source, DummyRepository) and 
 
391
        return (isinstance(repo_source, DummyRepository) and
494
392
            isinstance(repo_target, DummyRepository))
495
393
 
496
394
 
504
402
        # classes do not barf inappropriately when a surprising repository type
505
403
        # is handed to them.
506
404
        dummy_a = DummyRepository()
 
405
        dummy_a._format = RepositoryFormat()
 
406
        dummy_a._format.supports_full_versioned_files = True
507
407
        dummy_b = DummyRepository()
 
408
        dummy_b._format = RepositoryFormat()
 
409
        dummy_b._format.supports_full_versioned_files = True
508
410
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
509
411
 
510
412
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
511
413
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
512
 
        
 
414
 
513
415
        The effective default is now InterSameDataRepository because there is
514
416
        no actual sane default in the presence of incompatible data models.
515
417
        """
516
418
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
517
 
        self.assertEqual(repository.InterSameDataRepository,
 
419
        self.assertEqual(vf_repository.InterSameDataRepository,
518
420
                         inter_repo.__class__)
519
421
        self.assertEqual(repo_a, inter_repo.source)
520
422
        self.assertEqual(repo_b, inter_repo.target)
526
428
        # pair that it returns true on for the is_compatible static method
527
429
        # check
528
430
        dummy_a = DummyRepository()
 
431
        dummy_a._format = RepositoryFormat()
529
432
        dummy_b = DummyRepository()
 
433
        dummy_b._format = RepositoryFormat()
530
434
        repo = self.make_repository('.')
531
435
        # hack dummies to look like repo somewhat.
532
436
        dummy_a._serializer = repo._serializer
 
437
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
438
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
439
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
533
440
        dummy_b._serializer = repo._serializer
 
441
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
442
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
443
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
534
444
        repository.InterRepository.register_optimiser(InterDummy)
535
445
        try:
536
446
            # we should get the default for something InterDummy returns False
549
459
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
550
460
 
551
461
 
552
 
class TestInterWeaveRepo(TestCaseWithTransport):
553
 
 
554
 
    def test_is_compatible_and_registered(self):
555
 
        # InterWeaveRepo is compatible when either side
556
 
        # is a format 5/6/7 branch
557
 
        from bzrlib.repofmt import knitrepo, weaverepo
558
 
        formats = [weaverepo.RepositoryFormat5(),
559
 
                   weaverepo.RepositoryFormat6(),
560
 
                   weaverepo.RepositoryFormat7()]
561
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
562
 
                                knitrepo.RepositoryFormatKnit1(),
563
 
                                ]
564
 
        repo_a = self.make_repository('a')
565
 
        repo_b = self.make_repository('b')
566
 
        is_compatible = repository.InterWeaveRepo.is_compatible
567
 
        for source in incompatible_formats:
568
 
            # force incompatible left then right
569
 
            repo_a._format = source
570
 
            repo_b._format = formats[0]
571
 
            self.assertFalse(is_compatible(repo_a, repo_b))
572
 
            self.assertFalse(is_compatible(repo_b, repo_a))
573
 
        for source in formats:
574
 
            repo_a._format = source
575
 
            for target in formats:
576
 
                repo_b._format = target
577
 
                self.assertTrue(is_compatible(repo_a, repo_b))
578
 
        self.assertEqual(repository.InterWeaveRepo,
579
 
                         repository.InterRepository.get(repo_a,
580
 
                                                        repo_b).__class__)
 
462
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
463
 
 
464
    @classmethod
 
465
    def get_format_string(cls):
 
466
        return "Test Format 1"
 
467
 
 
468
 
 
469
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
470
 
 
471
    @classmethod
 
472
    def get_format_string(cls):
 
473
        return "Test Format 2"
581
474
 
582
475
 
583
476
class TestRepositoryConverter(TestCaseWithTransport):
584
477
 
585
478
    def test_convert_empty(self):
586
 
        t = get_transport(self.get_url('.'))
 
479
        source_format = TestRepositoryFormat1()
 
480
        target_format = TestRepositoryFormat2()
 
481
        repository.format_registry.register(source_format)
 
482
        self.addCleanup(repository.format_registry.remove,
 
483
            source_format)
 
484
        repository.format_registry.register(target_format)
 
485
        self.addCleanup(repository.format_registry.remove,
 
486
            target_format)
 
487
        t = self.get_transport()
587
488
        t.mkdir('repository')
588
489
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
589
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
590
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
490
        repo = TestRepositoryFormat1().initialize(repo_dir)
591
491
        converter = repository.CopyConverter(target_format)
592
492
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
593
493
        try:
598
498
        self.assertTrue(isinstance(target_format, repo._format.__class__))
599
499
 
600
500
 
601
 
class TestMisc(TestCase):
602
 
    
603
 
    def test_unescape_xml(self):
604
 
        """We get some kind of error when malformed entities are passed"""
605
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
606
 
 
607
 
 
608
501
class TestRepositoryFormatKnit3(TestCaseWithTransport):
609
502
 
610
503
    def test_attribute__fetch_order(self):
612
505
        format = bzrdir.BzrDirMetaFormat1()
613
506
        format.repository_format = knitrepo.RepositoryFormatKnit3()
614
507
        repo = self.make_repository('.', format=format)
615
 
        self.assertEqual('topological', repo._fetch_order)
 
508
        self.assertEqual('topological', repo._format._fetch_order)
616
509
 
617
510
    def test_attribute__fetch_uses_deltas(self):
618
511
        """Knits reuse deltas."""
619
512
        format = bzrdir.BzrDirMetaFormat1()
620
513
        format.repository_format = knitrepo.RepositoryFormatKnit3()
621
514
        repo = self.make_repository('.', format=format)
622
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
515
        self.assertEqual(True, repo._format._fetch_uses_deltas)
623
516
 
624
517
    def test_convert(self):
625
518
        """Ensure the upgrade adds weaves for roots"""
631
524
        revision_tree.lock_read()
632
525
        try:
633
526
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
634
 
                revision_tree.inventory.root.file_id)
 
527
                revision_tree.get_root_id())
635
528
        finally:
636
529
            revision_tree.unlock()
637
530
        format = bzrdir.BzrDirMetaFormat1()
641
534
        revision_tree = tree.branch.repository.revision_tree('dull')
642
535
        revision_tree.lock_read()
643
536
        try:
644
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
537
            revision_tree.get_file_lines(revision_tree.get_root_id())
645
538
        finally:
646
539
            revision_tree.unlock()
647
540
        tree.commit("Another dull commit", rev_id='dull2')
648
541
        revision_tree = tree.branch.repository.revision_tree('dull2')
649
542
        revision_tree.lock_read()
650
543
        self.addCleanup(revision_tree.unlock)
651
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
544
        self.assertEqual('dull',
 
545
                revision_tree.get_file_revision(revision_tree.get_root_id()))
652
546
 
653
547
    def test_supports_external_lookups(self):
654
548
        format = bzrdir.BzrDirMetaFormat1()
657
551
        self.assertFalse(repo._format.supports_external_lookups)
658
552
 
659
553
 
 
554
class Test2a(tests.TestCaseWithMemoryTransport):
 
555
 
 
556
    def test_chk_bytes_uses_custom_btree_parser(self):
 
557
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
558
        mt.lock_write()
 
559
        self.addCleanup(mt.unlock)
 
560
        mt.add([''], ['root-id'])
 
561
        mt.commit('first')
 
562
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
563
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
564
        # It should also work if we re-open the repo
 
565
        repo = mt.branch.repository.bzrdir.open_repository()
 
566
        repo.lock_read()
 
567
        self.addCleanup(repo.unlock)
 
568
        index = repo.chk_bytes._index._graph_index._indices[0]
 
569
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
570
 
 
571
    def test_fetch_combines_groups(self):
 
572
        builder = self.make_branch_builder('source', format='2a')
 
573
        builder.start_series()
 
574
        builder.build_snapshot('1', None, [
 
575
            ('add', ('', 'root-id', 'directory', '')),
 
576
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
577
        builder.build_snapshot('2', ['1'], [
 
578
            ('modify', ('file-id', 'content-2\n'))])
 
579
        builder.finish_series()
 
580
        source = builder.get_branch()
 
581
        target = self.make_repository('target', format='2a')
 
582
        target.fetch(source.repository)
 
583
        target.lock_read()
 
584
        self.addCleanup(target.unlock)
 
585
        details = target.texts._index.get_build_details(
 
586
            [('file-id', '1',), ('file-id', '2',)])
 
587
        file_1_details = details[('file-id', '1')]
 
588
        file_2_details = details[('file-id', '2')]
 
589
        # The index, and what to read off disk, should be the same for both
 
590
        # versions of the file.
 
591
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
592
 
 
593
    def test_fetch_combines_groups(self):
 
594
        builder = self.make_branch_builder('source', format='2a')
 
595
        builder.start_series()
 
596
        builder.build_snapshot('1', None, [
 
597
            ('add', ('', 'root-id', 'directory', '')),
 
598
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
599
        builder.build_snapshot('2', ['1'], [
 
600
            ('modify', ('file-id', 'content-2\n'))])
 
601
        builder.finish_series()
 
602
        source = builder.get_branch()
 
603
        target = self.make_repository('target', format='2a')
 
604
        target.fetch(source.repository)
 
605
        target.lock_read()
 
606
        self.addCleanup(target.unlock)
 
607
        details = target.texts._index.get_build_details(
 
608
            [('file-id', '1',), ('file-id', '2',)])
 
609
        file_1_details = details[('file-id', '1')]
 
610
        file_2_details = details[('file-id', '2')]
 
611
        # The index, and what to read off disk, should be the same for both
 
612
        # versions of the file.
 
613
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
614
 
 
615
    def test_fetch_combines_groups(self):
 
616
        builder = self.make_branch_builder('source', format='2a')
 
617
        builder.start_series()
 
618
        builder.build_snapshot('1', None, [
 
619
            ('add', ('', 'root-id', 'directory', '')),
 
620
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
621
        builder.build_snapshot('2', ['1'], [
 
622
            ('modify', ('file-id', 'content-2\n'))])
 
623
        builder.finish_series()
 
624
        source = builder.get_branch()
 
625
        target = self.make_repository('target', format='2a')
 
626
        target.fetch(source.repository)
 
627
        target.lock_read()
 
628
        self.addCleanup(target.unlock)
 
629
        details = target.texts._index.get_build_details(
 
630
            [('file-id', '1',), ('file-id', '2',)])
 
631
        file_1_details = details[('file-id', '1')]
 
632
        file_2_details = details[('file-id', '2')]
 
633
        # The index, and what to read off disk, should be the same for both
 
634
        # versions of the file.
 
635
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
636
 
 
637
    def test_format_pack_compresses_True(self):
 
638
        repo = self.make_repository('repo', format='2a')
 
639
        self.assertTrue(repo._format.pack_compresses)
 
640
 
 
641
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
642
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
643
        tree.lock_write()
 
644
        tree.add([''], ['TREE_ROOT'])
 
645
        revid = tree.commit("foo")
 
646
        tree.unlock()
 
647
        tree.lock_read()
 
648
        self.addCleanup(tree.unlock)
 
649
        inv = tree.branch.repository.get_inventory(revid)
 
650
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
651
        inv.parent_id_basename_to_file_id._ensure_root()
 
652
        inv.id_to_entry._ensure_root()
 
653
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
654
        self.assertEqual(65536,
 
655
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
656
 
 
657
    def test_autopack_unchanged_chk_nodes(self):
 
658
        # at 20 unchanged commits, chk pages are packed that are split into
 
659
        # two groups such that the new pack being made doesn't have all its
 
660
        # pages in the source packs (though they are in the repository).
 
661
        # Use a memory backed repository, we don't need to hit disk for this
 
662
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
663
        tree.lock_write()
 
664
        self.addCleanup(tree.unlock)
 
665
        tree.add([''], ['TREE_ROOT'])
 
666
        for pos in range(20):
 
667
            tree.commit(str(pos))
 
668
 
 
669
    def test_pack_with_hint(self):
 
670
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
671
        tree.lock_write()
 
672
        self.addCleanup(tree.unlock)
 
673
        tree.add([''], ['TREE_ROOT'])
 
674
        # 1 commit to leave untouched
 
675
        tree.commit('1')
 
676
        to_keep = tree.branch.repository._pack_collection.names()
 
677
        # 2 to combine
 
678
        tree.commit('2')
 
679
        tree.commit('3')
 
680
        all = tree.branch.repository._pack_collection.names()
 
681
        combine = list(set(all) - set(to_keep))
 
682
        self.assertLength(3, all)
 
683
        self.assertLength(2, combine)
 
684
        tree.branch.repository.pack(hint=combine)
 
685
        final = tree.branch.repository._pack_collection.names()
 
686
        self.assertLength(2, final)
 
687
        self.assertFalse(combine[0] in final)
 
688
        self.assertFalse(combine[1] in final)
 
689
        self.assertSubset(to_keep, final)
 
690
 
 
691
    def test_stream_source_to_gc(self):
 
692
        source = self.make_repository('source', format='2a')
 
693
        target = self.make_repository('target', format='2a')
 
694
        stream = source._get_source(target._format)
 
695
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
696
 
 
697
    def test_stream_source_to_non_gc(self):
 
698
        source = self.make_repository('source', format='2a')
 
699
        target = self.make_repository('target', format='rich-root-pack')
 
700
        stream = source._get_source(target._format)
 
701
        # We don't want the child GroupCHKStreamSource
 
702
        self.assertIs(type(stream), vf_repository.StreamSource)
 
703
 
 
704
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
705
        source_builder = self.make_branch_builder('source',
 
706
                            format='2a')
 
707
        # We have to build a fairly large tree, so that we are sure the chk
 
708
        # pages will have split into multiple pages.
 
709
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
710
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
711
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
712
                fname = i + j
 
713
                fid = fname + '-id'
 
714
                content = 'content for %s\n' % (fname,)
 
715
                entries.append(('add', (fname, fid, 'file', content)))
 
716
        source_builder.start_series()
 
717
        source_builder.build_snapshot('rev-1', None, entries)
 
718
        # Now change a few of them, so we get a few new pages for the second
 
719
        # revision
 
720
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
721
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
722
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
723
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
724
            ])
 
725
        source_builder.finish_series()
 
726
        source_branch = source_builder.get_branch()
 
727
        source_branch.lock_read()
 
728
        self.addCleanup(source_branch.unlock)
 
729
        target = self.make_repository('target', format='2a')
 
730
        source = source_branch.repository._get_source(target._format)
 
731
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
732
 
 
733
        # On a regular pass, getting the inventories and chk pages for rev-2
 
734
        # would only get the newly created chk pages
 
735
        search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
736
                                    set(['rev-2']))
 
737
        simple_chk_records = []
 
738
        for vf_name, substream in source.get_stream(search):
 
739
            if vf_name == 'chk_bytes':
 
740
                for record in substream:
 
741
                    simple_chk_records.append(record.key)
 
742
            else:
 
743
                for _ in substream:
 
744
                    continue
 
745
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
746
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
747
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
748
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
749
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
750
                         simple_chk_records)
 
751
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
752
        # we should get a much larger set of pages.
 
753
        missing = [('inventories', 'rev-2')]
 
754
        full_chk_records = []
 
755
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
756
            if vf_name == 'inventories':
 
757
                for record in substream:
 
758
                    self.assertEqual(('rev-2',), record.key)
 
759
            elif vf_name == 'chk_bytes':
 
760
                for record in substream:
 
761
                    full_chk_records.append(record.key)
 
762
            else:
 
763
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
764
        # We have 257 records now. This is because we have 1 root page, and 256
 
765
        # leaf pages in a complete listing.
 
766
        self.assertEqual(257, len(full_chk_records))
 
767
        self.assertSubset(simple_chk_records, full_chk_records)
 
768
 
 
769
    def test_inconsistency_fatal(self):
 
770
        repo = self.make_repository('repo', format='2a')
 
771
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
772
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
773
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
774
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
775
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
776
 
 
777
 
 
778
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
779
 
 
780
    def test_source_to_exact_pack_092(self):
 
781
        source = self.make_repository('source', format='pack-0.92')
 
782
        target = self.make_repository('target', format='pack-0.92')
 
783
        stream_source = source._get_source(target._format)
 
784
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
785
 
 
786
    def test_source_to_exact_pack_rich_root_pack(self):
 
787
        source = self.make_repository('source', format='rich-root-pack')
 
788
        target = self.make_repository('target', format='rich-root-pack')
 
789
        stream_source = source._get_source(target._format)
 
790
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
791
 
 
792
    def test_source_to_exact_pack_19(self):
 
793
        source = self.make_repository('source', format='1.9')
 
794
        target = self.make_repository('target', format='1.9')
 
795
        stream_source = source._get_source(target._format)
 
796
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
797
 
 
798
    def test_source_to_exact_pack_19_rich_root(self):
 
799
        source = self.make_repository('source', format='1.9-rich-root')
 
800
        target = self.make_repository('target', format='1.9-rich-root')
 
801
        stream_source = source._get_source(target._format)
 
802
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
803
 
 
804
    def test_source_to_remote_exact_pack_19(self):
 
805
        trans = self.make_smart_server('target')
 
806
        trans.ensure_base()
 
807
        source = self.make_repository('source', format='1.9')
 
808
        target = self.make_repository('target', format='1.9')
 
809
        target = repository.Repository.open(trans.base)
 
810
        stream_source = source._get_source(target._format)
 
811
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
812
 
 
813
    def test_stream_source_to_non_exact(self):
 
814
        source = self.make_repository('source', format='pack-0.92')
 
815
        target = self.make_repository('target', format='1.9')
 
816
        stream = source._get_source(target._format)
 
817
        self.assertIs(type(stream), vf_repository.StreamSource)
 
818
 
 
819
    def test_stream_source_to_non_exact_rich_root(self):
 
820
        source = self.make_repository('source', format='1.9')
 
821
        target = self.make_repository('target', format='1.9-rich-root')
 
822
        stream = source._get_source(target._format)
 
823
        self.assertIs(type(stream), vf_repository.StreamSource)
 
824
 
 
825
    def test_source_to_remote_non_exact_pack_19(self):
 
826
        trans = self.make_smart_server('target')
 
827
        trans.ensure_base()
 
828
        source = self.make_repository('source', format='1.9')
 
829
        target = self.make_repository('target', format='1.6')
 
830
        target = repository.Repository.open(trans.base)
 
831
        stream_source = source._get_source(target._format)
 
832
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
833
 
 
834
    def test_stream_source_to_knit(self):
 
835
        source = self.make_repository('source', format='pack-0.92')
 
836
        target = self.make_repository('target', format='dirstate')
 
837
        stream = source._get_source(target._format)
 
838
        self.assertIs(type(stream), vf_repository.StreamSource)
 
839
 
 
840
 
 
841
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
842
    """Tests for _find_parent_ids_of_revisions."""
 
843
 
 
844
    def setUp(self):
 
845
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
846
        self.builder = self.make_branch_builder('source')
 
847
        self.builder.start_series()
 
848
        self.builder.build_snapshot('initial', None,
 
849
            [('add', ('', 'tree-root', 'directory', None))])
 
850
        self.repo = self.builder.get_branch().repository
 
851
        self.addCleanup(self.builder.finish_series)
 
852
 
 
853
    def assertParentIds(self, expected_result, rev_set):
 
854
        self.assertEqual(sorted(expected_result),
 
855
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
856
 
 
857
    def test_simple(self):
 
858
        self.builder.build_snapshot('revid1', None, [])
 
859
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
860
        rev_set = ['revid2']
 
861
        self.assertParentIds(['revid1'], rev_set)
 
862
 
 
863
    def test_not_first_parent(self):
 
864
        self.builder.build_snapshot('revid1', None, [])
 
865
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
866
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
867
        rev_set = ['revid3', 'revid2']
 
868
        self.assertParentIds(['revid1'], rev_set)
 
869
 
 
870
    def test_not_null(self):
 
871
        rev_set = ['initial']
 
872
        self.assertParentIds([], rev_set)
 
873
 
 
874
    def test_not_null_set(self):
 
875
        self.builder.build_snapshot('revid1', None, [])
 
876
        rev_set = [_mod_revision.NULL_REVISION]
 
877
        self.assertParentIds([], rev_set)
 
878
 
 
879
    def test_ghost(self):
 
880
        self.builder.build_snapshot('revid1', None, [])
 
881
        rev_set = ['ghost', 'revid1']
 
882
        self.assertParentIds(['initial'], rev_set)
 
883
 
 
884
    def test_ghost_parent(self):
 
885
        self.builder.build_snapshot('revid1', None, [])
 
886
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
887
        rev_set = ['revid2', 'revid1']
 
888
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
889
 
 
890
    def test_righthand_parent(self):
 
891
        self.builder.build_snapshot('revid1', None, [])
 
892
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
893
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
894
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
895
        rev_set = ['revid3', 'revid2a']
 
896
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
897
 
 
898
 
660
899
class TestWithBrokenRepo(TestCaseWithTransport):
661
900
    """These tests seem to be more appropriate as interface tests?"""
662
901
 
675
914
            inv = inventory.Inventory(revision_id='rev1a')
676
915
            inv.root.revision = 'rev1a'
677
916
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
917
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
678
918
            repo.add_inventory('rev1a', inv, [])
679
919
            revision = _mod_revision.Revision('rev1a',
680
920
                committer='jrandom@example.com', timestamp=0,
681
921
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
682
 
            repo.add_revision('rev1a',revision, inv)
 
922
            repo.add_revision('rev1a', revision, inv)
683
923
 
684
924
            # make rev1b, which has no Revision, but has an Inventory, and
685
925
            # file1
715
955
    def add_revision(self, repo, revision_id, inv, parent_ids):
716
956
        inv.revision_id = revision_id
717
957
        inv.root.revision = revision_id
 
958
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
718
959
        repo.add_inventory(revision_id, inv, parent_ids)
719
960
        revision = _mod_revision.Revision(revision_id,
720
961
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
721
962
            timezone=0, message='foo', parent_ids=parent_ids)
722
 
        repo.add_revision(revision_id,revision, inv)
 
963
        repo.add_revision(revision_id, revision, inv)
723
964
 
724
965
    def add_file(self, repo, inv, filename, revision, parents):
725
966
        file_id = filename + '-id'
737
978
        """
738
979
        broken_repo = self.make_broken_repository()
739
980
        empty_repo = self.make_repository('empty-repo')
740
 
        self.assertRaises(errors.RevisionNotPresent, empty_repo.fetch, broken_repo)
 
981
        try:
 
982
            empty_repo.fetch(broken_repo)
 
983
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
984
            # Test successful: compression parent not being copied leads to
 
985
            # error.
 
986
            return
 
987
        empty_repo.lock_read()
 
988
        self.addCleanup(empty_repo.unlock)
 
989
        text = empty_repo.texts.get_record_stream(
 
990
            [('file2-id', 'rev3')], 'topological', True).next()
 
991
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
741
992
 
742
993
 
743
994
class TestRepositoryPackCollection(TestCaseWithTransport):
750
1001
        repo = self.make_repository('.', format=format)
751
1002
        return repo._pack_collection
752
1003
 
 
1004
    def make_packs_and_alt_repo(self, write_lock=False):
 
1005
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1006
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1007
        tree.lock_write()
 
1008
        self.addCleanup(tree.unlock)
 
1009
        rev1 = tree.commit('one')
 
1010
        rev2 = tree.commit('two')
 
1011
        rev3 = tree.commit('three')
 
1012
        r = repository.Repository.open('.')
 
1013
        if write_lock:
 
1014
            r.lock_write()
 
1015
        else:
 
1016
            r.lock_read()
 
1017
        self.addCleanup(r.unlock)
 
1018
        packs = r._pack_collection
 
1019
        packs.ensure_loaded()
 
1020
        return tree, r, packs, [rev1, rev2, rev3]
 
1021
 
 
1022
    def test__clear_obsolete_packs(self):
 
1023
        packs = self.get_packs()
 
1024
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1025
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1026
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1027
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1028
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1029
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1030
        res = packs._clear_obsolete_packs()
 
1031
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1032
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1033
 
 
1034
    def test__clear_obsolete_packs_preserve(self):
 
1035
        packs = self.get_packs()
 
1036
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1037
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1038
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1039
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1040
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1041
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1042
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1043
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1044
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1045
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1046
 
753
1047
    def test__max_pack_count(self):
754
1048
        """The maximum pack count is a function of the number of revisions."""
755
1049
        # no revisions - one pack, so that we can have a revision free repo
775
1069
        # check some arbitrary big numbers
776
1070
        self.assertEqual(25, packs._max_pack_count(112894))
777
1071
 
 
1072
    def test_repr(self):
 
1073
        packs = self.get_packs()
 
1074
        self.assertContainsRe(repr(packs),
 
1075
            'RepositoryPackCollection(.*Repository(.*))')
 
1076
 
 
1077
    def test__obsolete_packs(self):
 
1078
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1079
        names = packs.names()
 
1080
        pack = packs.get_pack_by_name(names[0])
 
1081
        # Schedule this one for removal
 
1082
        packs._remove_pack_from_memory(pack)
 
1083
        # Simulate a concurrent update by renaming the .pack file and one of
 
1084
        # the indices
 
1085
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1086
                               'obsolete_packs/%s.pack' % (names[0],))
 
1087
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1088
                               'obsolete_packs/%s.iix' % (names[0],))
 
1089
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1090
        # are still renamed
 
1091
        packs._obsolete_packs([pack])
 
1092
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1093
                         sorted(packs._pack_transport.list_dir('.')))
 
1094
        # names[0] should not be present in the index anymore
 
1095
        self.assertEqual(names[1:],
 
1096
            sorted(set([osutils.splitext(n)[0] for n in
 
1097
                        packs._index_transport.list_dir('.')])))
 
1098
 
 
1099
    def test__obsolete_packs_missing_directory(self):
 
1100
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1101
        r.control_transport.rmdir('obsolete_packs')
 
1102
        names = packs.names()
 
1103
        pack = packs.get_pack_by_name(names[0])
 
1104
        # Schedule this one for removal
 
1105
        packs._remove_pack_from_memory(pack)
 
1106
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1107
        # are still renamed
 
1108
        packs._obsolete_packs([pack])
 
1109
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1110
                         sorted(packs._pack_transport.list_dir('.')))
 
1111
        # names[0] should not be present in the index anymore
 
1112
        self.assertEqual(names[1:],
 
1113
            sorted(set([osutils.splitext(n)[0] for n in
 
1114
                        packs._index_transport.list_dir('.')])))
 
1115
 
778
1116
    def test_pack_distribution_zero(self):
779
1117
        packs = self.get_packs()
780
1118
        self.assertEqual([0], packs.pack_distribution(0))
903
1241
        tree.lock_read()
904
1242
        self.addCleanup(tree.unlock)
905
1243
        packs = tree.branch.repository._pack_collection
 
1244
        packs.reset()
906
1245
        packs.ensure_loaded()
907
1246
        name = packs.names()[0]
908
1247
        pack_1 = packs.get_pack_by_name(name)
917
1256
        # and the same instance should be returned on successive calls.
918
1257
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
919
1258
 
 
1259
    def test_reload_pack_names_new_entry(self):
 
1260
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1261
        names = packs.names()
 
1262
        # Add a new pack file into the repository
 
1263
        rev4 = tree.commit('four')
 
1264
        new_names = tree.branch.repository._pack_collection.names()
 
1265
        new_name = set(new_names).difference(names)
 
1266
        self.assertEqual(1, len(new_name))
 
1267
        new_name = new_name.pop()
 
1268
        # The old collection hasn't noticed yet
 
1269
        self.assertEqual(names, packs.names())
 
1270
        self.assertTrue(packs.reload_pack_names())
 
1271
        self.assertEqual(new_names, packs.names())
 
1272
        # And the repository can access the new revision
 
1273
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1274
        self.assertFalse(packs.reload_pack_names())
 
1275
 
 
1276
    def test_reload_pack_names_added_and_removed(self):
 
1277
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1278
        names = packs.names()
 
1279
        # Now repack the whole thing
 
1280
        tree.branch.repository.pack()
 
1281
        new_names = tree.branch.repository._pack_collection.names()
 
1282
        # The other collection hasn't noticed yet
 
1283
        self.assertEqual(names, packs.names())
 
1284
        self.assertTrue(packs.reload_pack_names())
 
1285
        self.assertEqual(new_names, packs.names())
 
1286
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1287
        self.assertFalse(packs.reload_pack_names())
 
1288
 
 
1289
    def test_reload_pack_names_preserves_pending(self):
 
1290
        # TODO: Update this to also test for pending-deleted names
 
1291
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1292
        # We will add one pack (via start_write_group + insert_record_stream),
 
1293
        # and remove another pack (via _remove_pack_from_memory)
 
1294
        orig_names = packs.names()
 
1295
        orig_at_load = packs._packs_at_load
 
1296
        to_remove_name = iter(orig_names).next()
 
1297
        r.start_write_group()
 
1298
        self.addCleanup(r.abort_write_group)
 
1299
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1300
            ('text', 'rev'), (), None, 'content\n')])
 
1301
        new_pack = packs._new_pack
 
1302
        self.assertTrue(new_pack.data_inserted())
 
1303
        new_pack.finish()
 
1304
        packs.allocate(new_pack)
 
1305
        packs._new_pack = None
 
1306
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1307
        packs._remove_pack_from_memory(removed_pack)
 
1308
        names = packs.names()
 
1309
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1310
        new_names = set([x[0][0] for x in new_nodes])
 
1311
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1312
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1313
        self.assertEqual(set([new_pack.name]), new_names)
 
1314
        self.assertEqual([to_remove_name],
 
1315
                         sorted([x[0][0] for x in deleted_nodes]))
 
1316
        packs.reload_pack_names()
 
1317
        reloaded_names = packs.names()
 
1318
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1319
        self.assertEqual(names, reloaded_names)
 
1320
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1321
        new_names = set([x[0][0] for x in new_nodes])
 
1322
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1323
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1324
        self.assertEqual(set([new_pack.name]), new_names)
 
1325
        self.assertEqual([to_remove_name],
 
1326
                         sorted([x[0][0] for x in deleted_nodes]))
 
1327
 
 
1328
    def test_autopack_obsoletes_new_pack(self):
 
1329
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1330
        packs._max_pack_count = lambda x: 1
 
1331
        packs.pack_distribution = lambda x: [10]
 
1332
        r.start_write_group()
 
1333
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1334
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1335
        # This should trigger an autopack, which will combine everything into a
 
1336
        # single pack file.
 
1337
        new_names = r.commit_write_group()
 
1338
        names = packs.names()
 
1339
        self.assertEqual(1, len(names))
 
1340
        self.assertEqual([names[0] + '.pack'],
 
1341
                         packs._pack_transport.list_dir('.'))
 
1342
 
 
1343
    def test_autopack_reloads_and_stops(self):
 
1344
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1345
        # After we have determined what needs to be autopacked, trigger a
 
1346
        # full-pack via the other repo which will cause us to re-evaluate and
 
1347
        # decide we don't need to do anything
 
1348
        orig_execute = packs._execute_pack_operations
 
1349
        def _munged_execute_pack_ops(*args, **kwargs):
 
1350
            tree.branch.repository.pack()
 
1351
            return orig_execute(*args, **kwargs)
 
1352
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1353
        packs._max_pack_count = lambda x: 1
 
1354
        packs.pack_distribution = lambda x: [10]
 
1355
        self.assertFalse(packs.autopack())
 
1356
        self.assertEqual(1, len(packs.names()))
 
1357
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1358
                         packs.names())
 
1359
 
 
1360
    def test__save_pack_names(self):
 
1361
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1362
        names = packs.names()
 
1363
        pack = packs.get_pack_by_name(names[0])
 
1364
        packs._remove_pack_from_memory(pack)
 
1365
        packs._save_pack_names(obsolete_packs=[pack])
 
1366
        cur_packs = packs._pack_transport.list_dir('.')
 
1367
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1368
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1369
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1370
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1371
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1372
 
 
1373
    def test__save_pack_names_already_obsoleted(self):
 
1374
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1375
        names = packs.names()
 
1376
        pack = packs.get_pack_by_name(names[0])
 
1377
        packs._remove_pack_from_memory(pack)
 
1378
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1379
        # the pack directly.
 
1380
        packs._obsolete_packs([pack])
 
1381
        packs._save_pack_names(clear_obsolete_packs=True,
 
1382
                               obsolete_packs=[pack])
 
1383
        cur_packs = packs._pack_transport.list_dir('.')
 
1384
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1385
        # Note that while we set clear_obsolete_packs=True, it should not
 
1386
        # delete a pack file that we have also scheduled for obsoletion.
 
1387
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1388
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1389
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1390
 
 
1391
    def test_pack_no_obsolete_packs_directory(self):
 
1392
        """Bug #314314, don't fail if obsolete_packs directory does
 
1393
        not exist."""
 
1394
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1395
        r.control_transport.rmdir('obsolete_packs')
 
1396
        packs._clear_obsolete_packs()
 
1397
 
920
1398
 
921
1399
class TestPack(TestCaseWithTransport):
922
1400
    """Tests for the Pack object."""
976
1454
        pack_transport = self.get_transport('pack')
977
1455
        index_transport = self.get_transport('index')
978
1456
        upload_transport.mkdir('.')
979
 
        pack = pack_repo.NewPack(upload_transport, index_transport,
980
 
            pack_transport, index_builder_class=BTreeBuilder,
981
 
            index_class=BTreeGraphIndex)
 
1457
        collection = pack_repo.RepositoryPackCollection(
 
1458
            repo=None,
 
1459
            transport=self.get_transport('.'),
 
1460
            index_transport=index_transport,
 
1461
            upload_transport=upload_transport,
 
1462
            pack_transport=pack_transport,
 
1463
            index_builder_class=BTreeBuilder,
 
1464
            index_class=BTreeGraphIndex,
 
1465
            use_chk_index=False)
 
1466
        pack = pack_repo.NewPack(collection)
 
1467
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
982
1468
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
983
1469
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
984
1470
        self.assertIsInstance(pack._hash, type(osutils.md5()))
994
1480
class TestPacker(TestCaseWithTransport):
995
1481
    """Tests for the packs repository Packer class."""
996
1482
 
997
 
    # To date, this class has been factored out and nothing new added to it;
998
 
    # thus there are not yet any tests.
999
 
 
1000
 
 
1001
 
class TestInterDifferingSerializer(TestCaseWithTransport):
1002
 
 
1003
 
    def test_progress_bar(self):
1004
 
        tree = self.make_branch_and_tree('tree')
1005
 
        tree.commit('rev1', rev_id='rev-1')
1006
 
        tree.commit('rev2', rev_id='rev-2')
1007
 
        tree.commit('rev3', rev_id='rev-3')
1008
 
        repo = self.make_repository('repo')
1009
 
        inter_repo = repository.InterDifferingSerializer(
1010
 
            tree.branch.repository, repo)
1011
 
        pb = progress.InstrumentedProgress(to_file=StringIO())
1012
 
        pb.never_throttle = True
1013
 
        inter_repo.fetch('rev-1', pb)
1014
 
        self.assertEqual('Transferring revisions', pb.last_msg)
1015
 
        self.assertEqual(1, pb.last_cnt)
1016
 
        self.assertEqual(1, pb.last_total)
1017
 
        inter_repo.fetch('rev-3', pb)
1018
 
        self.assertEqual(2, pb.last_cnt)
1019
 
        self.assertEqual(2, pb.last_total)
 
1483
    def test_pack_optimizes_pack_order(self):
 
1484
        builder = self.make_branch_builder('.', format="1.9")
 
1485
        builder.start_series()
 
1486
        builder.build_snapshot('A', None, [
 
1487
            ('add', ('', 'root-id', 'directory', None)),
 
1488
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1489
        builder.build_snapshot('B', ['A'],
 
1490
            [('modify', ('f-id', 'new-content\n'))])
 
1491
        builder.build_snapshot('C', ['B'],
 
1492
            [('modify', ('f-id', 'third-content\n'))])
 
1493
        builder.build_snapshot('D', ['C'],
 
1494
            [('modify', ('f-id', 'fourth-content\n'))])
 
1495
        b = builder.get_branch()
 
1496
        b.lock_read()
 
1497
        builder.finish_series()
 
1498
        self.addCleanup(b.unlock)
 
1499
        # At this point, we should have 4 pack files available
 
1500
        # Because of how they were built, they correspond to
 
1501
        # ['D', 'C', 'B', 'A']
 
1502
        packs = b.repository._pack_collection.packs
 
1503
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1504
                                  packs, 'testing',
 
1505
                                  revision_ids=['B', 'C'])
 
1506
        # Now, when we are copying the B & C revisions, their pack files should
 
1507
        # be moved to the front of the stack
 
1508
        # The new ordering moves B & C to the front of the .packs attribute,
 
1509
        # and leaves the others in the original order.
 
1510
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1511
        new_pack = packer.pack()
 
1512
        self.assertEqual(new_packs, packer.packs)
 
1513
 
 
1514
 
 
1515
class TestOptimisingPacker(TestCaseWithTransport):
 
1516
    """Tests for the OptimisingPacker class."""
 
1517
 
 
1518
    def get_pack_collection(self):
 
1519
        repo = self.make_repository('.')
 
1520
        return repo._pack_collection
 
1521
 
 
1522
    def test_open_pack_will_optimise(self):
 
1523
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1524
                                            [], '.test')
 
1525
        new_pack = packer.open_pack()
 
1526
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1527
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1528
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1529
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1530
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1531
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1532
 
 
1533
 
 
1534
class TestGCCHKPacker(TestCaseWithTransport):
 
1535
 
 
1536
    def make_abc_branch(self):
 
1537
        builder = self.make_branch_builder('source')
 
1538
        builder.start_series()
 
1539
        builder.build_snapshot('A', None, [
 
1540
            ('add', ('', 'root-id', 'directory', None)),
 
1541
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1542
            ])
 
1543
        builder.build_snapshot('B', ['A'], [
 
1544
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1545
        builder.build_snapshot('C', ['B'], [
 
1546
            ('modify', ('file-id', 'new content\n'))])
 
1547
        builder.finish_series()
 
1548
        return builder.get_branch()
 
1549
 
 
1550
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1551
        """a repo with separate packs for a revisions Revision and Inventory.
 
1552
 
 
1553
        There will be one pack file that holds the Revision content, and one
 
1554
        for the Inventory content.
 
1555
 
 
1556
        :return: (repository,
 
1557
                  pack_name_with_rev_A_Revision,
 
1558
                  pack_name_with_rev_A_Inventory,
 
1559
                  pack_name_with_rev_C_content)
 
1560
        """
 
1561
        b_source = self.make_abc_branch()
 
1562
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1563
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1564
        b_stacked.lock_write()
 
1565
        self.addCleanup(b_stacked.unlock)
 
1566
        b_stacked.fetch(b_source, 'B')
 
1567
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1568
        # fill in the A rev.
 
1569
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1570
        repo_not_stacked.lock_write()
 
1571
        self.addCleanup(repo_not_stacked.unlock)
 
1572
        # Now we should have a pack file with A's inventory, but not its
 
1573
        # Revision
 
1574
        self.assertEqual([('A',), ('B',)],
 
1575
                         sorted(repo_not_stacked.inventories.keys()))
 
1576
        self.assertEqual([('B',)],
 
1577
                         sorted(repo_not_stacked.revisions.keys()))
 
1578
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1579
        # We have a couple names here, figure out which has A's inventory
 
1580
        for name in stacked_pack_names:
 
1581
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1582
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1583
            if ('A',) in keys:
 
1584
                inv_a_pack_name = name
 
1585
                break
 
1586
        else:
 
1587
            self.fail('Could not find pack containing A\'s inventory')
 
1588
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1589
        self.assertEqual([('A',), ('B',)],
 
1590
                         sorted(repo_not_stacked.revisions.keys()))
 
1591
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1592
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1593
        self.assertEqual(1, len(rev_a_pack_names))
 
1594
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1595
        # Now fetch 'C', so we have a couple pack files to join
 
1596
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1597
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1598
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1599
        self.assertEqual(1, len(rev_c_pack_names))
 
1600
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1601
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1602
                rev_c_pack_name)
 
1603
 
 
1604
    def test_pack_with_distant_inventories(self):
 
1605
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1606
        # When repacking, it is possible to have an inventory in a different
 
1607
        # pack file than the associated revision. An autopack can then come
 
1608
        # along, and miss that inventory, and complain.
 
1609
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1610
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1611
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1612
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1613
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1614
                    [a_pack, c_pack], '.test-pack')
 
1615
        # This would raise ValueError in bug #437003, but should not raise an
 
1616
        # error once fixed.
 
1617
        packer.pack()
 
1618
 
 
1619
    def test_pack_with_missing_inventory(self):
 
1620
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1621
        # the A inventory to actually be gone from the repository.
 
1622
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1623
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1624
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1625
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1626
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1627
            repo._pack_collection.all_packs(), '.test-pack')
 
1628
        e = self.assertRaises(ValueError, packer.pack)
 
1629
        packer.new_pack.abort()
 
1630
        self.assertContainsRe(str(e),
 
1631
            r"We are missing inventories for revisions: .*'A'")
 
1632
 
 
1633
 
 
1634
class TestCrossFormatPacks(TestCaseWithTransport):
 
1635
 
 
1636
    def log_pack(self, hint=None):
 
1637
        self.calls.append(('pack', hint))
 
1638
        self.orig_pack(hint=hint)
 
1639
        if self.expect_hint:
 
1640
            self.assertTrue(hint)
 
1641
 
 
1642
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1643
        self.expect_hint = expect_pack_called
 
1644
        self.calls = []
 
1645
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1646
        source_tree.lock_write()
 
1647
        self.addCleanup(source_tree.unlock)
 
1648
        tip = source_tree.commit('foo')
 
1649
        target = self.make_repository('target', format=target_fmt)
 
1650
        target.lock_write()
 
1651
        self.addCleanup(target.unlock)
 
1652
        source = source_tree.branch.repository._get_source(target._format)
 
1653
        self.orig_pack = target.pack
 
1654
        self.overrideAttr(target, "pack", self.log_pack)
 
1655
        search = target.search_missing_revision_ids(
 
1656
            source_tree.branch.repository, revision_ids=[tip])
 
1657
        stream = source.get_stream(search)
 
1658
        from_format = source_tree.branch.repository._format
 
1659
        sink = target._get_sink()
 
1660
        sink.insert_stream(stream, from_format, [])
 
1661
        if expect_pack_called:
 
1662
            self.assertLength(1, self.calls)
 
1663
        else:
 
1664
            self.assertLength(0, self.calls)
 
1665
 
 
1666
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1667
        self.expect_hint = expect_pack_called
 
1668
        self.calls = []
 
1669
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1670
        source_tree.lock_write()
 
1671
        self.addCleanup(source_tree.unlock)
 
1672
        tip = source_tree.commit('foo')
 
1673
        target = self.make_repository('target', format=target_fmt)
 
1674
        target.lock_write()
 
1675
        self.addCleanup(target.unlock)
 
1676
        source = source_tree.branch.repository
 
1677
        self.orig_pack = target.pack
 
1678
        self.overrideAttr(target, "pack", self.log_pack)
 
1679
        target.fetch(source)
 
1680
        if expect_pack_called:
 
1681
            self.assertLength(1, self.calls)
 
1682
        else:
 
1683
            self.assertLength(0, self.calls)
 
1684
 
 
1685
    def test_sink_format_hint_no(self):
 
1686
        # When the target format says packing makes no difference, pack is not
 
1687
        # called.
 
1688
        self.run_stream('1.9', 'rich-root-pack', False)
 
1689
 
 
1690
    def test_sink_format_hint_yes(self):
 
1691
        # When the target format says packing makes a difference, pack is
 
1692
        # called.
 
1693
        self.run_stream('1.9', '2a', True)
 
1694
 
 
1695
    def test_sink_format_same_no(self):
 
1696
        # When the formats are the same, pack is not called.
 
1697
        self.run_stream('2a', '2a', False)
 
1698
 
 
1699
    def test_IDS_format_hint_no(self):
 
1700
        # When the target format says packing makes no difference, pack is not
 
1701
        # called.
 
1702
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1703
 
 
1704
    def test_IDS_format_hint_yes(self):
 
1705
        # When the target format says packing makes a difference, pack is
 
1706
        # called.
 
1707
        self.run_fetch('1.9', '2a', True)
 
1708
 
 
1709
    def test_IDS_format_same_no(self):
 
1710
        # When the formats are the same, pack is not called.
 
1711
        self.run_fetch('2a', '2a', False)
 
1712
 
 
1713
 
 
1714
class Test_LazyListJoin(tests.TestCase):
 
1715
 
 
1716
    def test__repr__(self):
 
1717
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1718
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
 
1719
                         repr(lazy))
 
1720
 
 
1721
 
 
1722
class TestFeatures(tests.TestCaseWithTransport):
 
1723
 
 
1724
    def test_open_with_present_feature(self):
 
1725
        self.addCleanup(
 
1726
            repository.RepositoryFormatMetaDir.unregister_feature,
 
1727
            "makes-cheese-sandwich")
 
1728
        repository.RepositoryFormatMetaDir.register_feature(
 
1729
            "makes-cheese-sandwich")
 
1730
        repo = self.make_repository('.')
 
1731
        repo.lock_write()
 
1732
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1733
        repo._format.check_support_status(False)
 
1734
        repo.unlock()
 
1735
 
 
1736
    def test_open_with_missing_required_feature(self):
 
1737
        repo = self.make_repository('.')
 
1738
        repo.lock_write()
 
1739
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1740
        self.assertRaises(errors.MissingFeature,
 
1741
            repo._format.check_support_status, False)