~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-10-02 20:32:50 UTC
  • mto: (4679.6.1 2.1-export-c-api)
  • mto: This revision was merged to the branch mainline in revision 4735.
  • Revision ID: john@arbash-meinel.com-20091002203250-q6iv6o2mwjqp4g53
Add __iter__ support.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
26
26
from StringIO import StringIO
27
27
 
28
28
import bzrlib
29
 
import bzrlib.bzrdir as bzrdir
30
 
import bzrlib.errors as errors
31
29
from bzrlib.errors import (NotBranchError,
32
30
                           NoSuchFile,
33
31
                           UnknownFormatError,
34
32
                           UnsupportedFormatError,
35
33
                           )
36
 
import bzrlib.repository as repository
37
 
from bzrlib.tests import TestCase, TestCaseWithTransport
38
 
from bzrlib.transport import get_transport
39
 
from bzrlib.transport.http import HttpServer
 
34
from bzrlib import (
 
35
    graph,
 
36
    tests,
 
37
    )
 
38
from bzrlib.branchbuilder import BranchBuilder
 
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
41
from bzrlib.repository import RepositoryFormat
 
42
from bzrlib.smart import server
 
43
from bzrlib.tests import (
 
44
    TestCase,
 
45
    TestCaseWithTransport,
 
46
    TestSkipped,
 
47
    test_knit,
 
48
    )
 
49
from bzrlib.transport import (
 
50
    fakenfs,
 
51
    get_transport,
 
52
    )
40
53
from bzrlib.transport.memory import MemoryServer
 
54
from bzrlib import (
 
55
    bencode,
 
56
    bzrdir,
 
57
    errors,
 
58
    inventory,
 
59
    osutils,
 
60
    progress,
 
61
    repository,
 
62
    revision as _mod_revision,
 
63
    symbol_versioning,
 
64
    upgrade,
 
65
    workingtree,
 
66
    )
 
67
from bzrlib.repofmt import (
 
68
    groupcompress_repo,
 
69
    knitrepo,
 
70
    pack_repo,
 
71
    weaverepo,
 
72
    )
41
73
 
42
74
 
43
75
class TestDefaultFormat(TestCase):
44
76
 
45
77
    def test_get_set_default_format(self):
 
78
        old_default = bzrdir.format_registry.get('default')
 
79
        private_default = old_default().repository_format.__class__
46
80
        old_format = repository.RepositoryFormat.get_default_format()
47
 
        self.assertTrue(isinstance(old_format, repository.RepositoryFormatKnit1))
48
 
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
 
81
        self.assertTrue(isinstance(old_format, private_default))
 
82
        def make_sample_bzrdir():
 
83
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
 
84
            my_bzrdir.repository_format = SampleRepositoryFormat()
 
85
            return my_bzrdir
 
86
        bzrdir.format_registry.remove('default')
 
87
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
88
        bzrdir.format_registry.set_default('sample')
49
89
        # creating a repository should now create an instrumented dir.
50
90
        try:
51
91
            # the default branch format is used by the meta dir format
54
94
            result = dir.create_repository()
55
95
            self.assertEqual(result, 'A bzr repository dir')
56
96
        finally:
57
 
            repository.RepositoryFormat.set_default_format(old_format)
58
 
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
 
97
            bzrdir.format_registry.remove('default')
 
98
            bzrdir.format_registry.remove('sample')
 
99
            bzrdir.format_registry.register('default', old_default, '')
 
100
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
101
                              old_format.__class__)
59
102
 
60
103
 
61
104
class SampleRepositoryFormat(repository.RepositoryFormat):
62
105
    """A sample format
63
106
 
64
 
    this format is initializable, unsupported to aid in testing the 
 
107
    this format is initializable, unsupported to aid in testing the
65
108
    open and open(unsupported=True) routines.
66
109
    """
67
110
 
72
115
    def initialize(self, a_bzrdir, shared=False):
73
116
        """Initialize a repository in a BzrDir"""
74
117
        t = a_bzrdir.get_repository_transport(self)
75
 
        t.put('format', StringIO(self.get_format_string()))
 
118
        t.put_bytes('format', self.get_format_string())
76
119
        return 'A bzr repository dir'
77
120
 
78
121
    def is_supported(self):
88
131
    def test_find_format(self):
89
132
        # is the right format object found for a repository?
90
133
        # create a branch with a few known format objects.
91
 
        # this is not quite the same as 
 
134
        # this is not quite the same as
92
135
        self.build_tree(["foo/", "bar/"])
93
136
        def check_format(format, url):
94
137
            dir = format._matchingbzrdir.initialize(url)
96
139
            t = get_transport(url)
97
140
            found_format = repository.RepositoryFormat.find_format(dir)
98
141
            self.failUnless(isinstance(found_format, format.__class__))
99
 
        check_format(repository.RepositoryFormat7(), "bar")
100
 
        
 
142
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
143
 
101
144
    def test_find_format_no_repository(self):
102
145
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
103
146
        self.assertRaises(errors.NoRepositoryPresent,
129
172
 
130
173
class TestFormat6(TestCaseWithTransport):
131
174
 
 
175
    def test_attribute__fetch_order(self):
 
176
        """Weaves need topological data insertion."""
 
177
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
178
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
179
        self.assertEqual('topological', repo._format._fetch_order)
 
180
 
 
181
    def test_attribute__fetch_uses_deltas(self):
 
182
        """Weaves do not reuse deltas."""
 
183
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
184
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
185
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
186
 
 
187
    def test_attribute__fetch_reconcile(self):
 
188
        """Weave repositories need a reconcile after fetch."""
 
189
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
190
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
191
        self.assertEqual(True, repo._format._fetch_reconcile)
 
192
 
132
193
    def test_no_ancestry_weave(self):
133
194
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
134
 
        repo = repository.RepositoryFormat6().initialize(control)
 
195
        repo = weaverepo.RepositoryFormat6().initialize(control)
135
196
        # We no longer need to create the ancestry.weave file
136
197
        # since it is *never* used.
137
198
        self.assertRaises(NoSuchFile,
138
199
                          control.transport.get,
139
200
                          'ancestry.weave')
140
201
 
 
202
    def test_supports_external_lookups(self):
 
203
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
204
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
205
        self.assertFalse(repo._format.supports_external_lookups)
 
206
 
141
207
 
142
208
class TestFormat7(TestCaseWithTransport):
143
 
    
 
209
 
 
210
    def test_attribute__fetch_order(self):
 
211
        """Weaves need topological data insertion."""
 
212
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
213
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
214
        self.assertEqual('topological', repo._format._fetch_order)
 
215
 
 
216
    def test_attribute__fetch_uses_deltas(self):
 
217
        """Weaves do not reuse deltas."""
 
218
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
219
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
220
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
221
 
 
222
    def test_attribute__fetch_reconcile(self):
 
223
        """Weave repositories need a reconcile after fetch."""
 
224
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
225
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
226
        self.assertEqual(True, repo._format._fetch_reconcile)
 
227
 
144
228
    def test_disk_layout(self):
145
229
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
 
        repo = repository.RepositoryFormat7().initialize(control)
 
230
        repo = weaverepo.RepositoryFormat7().initialize(control)
147
231
        # in case of side effects of locking.
148
232
        repo.lock_write()
149
233
        repo.unlock()
162
246
                             'w\n'
163
247
                             'W\n',
164
248
                             t.get('inventory.weave').read())
 
249
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
250
        # disk.
 
251
        control.create_branch()
 
252
        tree = control.create_workingtree()
 
253
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
254
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
255
        tree.commit('first post', rev_id='first')
 
256
        self.assertEqualDiff(
 
257
            '# bzr weave file v5\n'
 
258
            'i\n'
 
259
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
260
            'n first\n'
 
261
            '\n'
 
262
            'w\n'
 
263
            '{ 0\n'
 
264
            '. content\n'
 
265
            '}\n'
 
266
            'W\n',
 
267
            t.get('weaves/74/Foo%3ABar.weave').read())
165
268
 
166
269
    def test_shared_disk_layout(self):
167
270
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
168
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
271
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
169
272
        # we want:
170
273
        # format 'Bazaar-NG Repository format 7'
171
274
        # inventory.weave == empty_weave
188
291
    def test_creates_lockdir(self):
189
292
        """Make sure it appears to be controlled by a LockDir existence"""
190
293
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
191
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
294
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
192
295
        t = control.get_repository_transport(None)
193
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
296
        # TODO: Should check there is a 'lock' toplevel directory,
194
297
        # regardless of contents
195
298
        self.assertFalse(t.has('lock/held/info'))
196
299
        repo.lock_write()
204
307
        """repo format 7 actually locks on lockdir"""
205
308
        base_url = self.get_url()
206
309
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
207
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
310
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
208
311
        t = control.get_repository_transport(None)
209
312
        repo.lock_write()
210
313
        repo.unlock()
218
321
 
219
322
    def test_shared_no_tree_disk_layout(self):
220
323
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
324
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
222
325
        repo.set_make_working_trees(False)
223
326
        # we want:
224
327
        # format 'Bazaar-NG Repository format 7'
242
345
                             'W\n',
243
346
                             t.get('inventory.weave').read())
244
347
 
 
348
    def test_supports_external_lookups(self):
 
349
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
350
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
351
        self.assertFalse(repo._format.supports_external_lookups)
 
352
 
245
353
 
246
354
class TestFormatKnit1(TestCaseWithTransport):
247
 
    
 
355
 
 
356
    def test_attribute__fetch_order(self):
 
357
        """Knits need topological data insertion."""
 
358
        repo = self.make_repository('.',
 
359
                format=bzrdir.format_registry.get('knit')())
 
360
        self.assertEqual('topological', repo._format._fetch_order)
 
361
 
 
362
    def test_attribute__fetch_uses_deltas(self):
 
363
        """Knits reuse deltas."""
 
364
        repo = self.make_repository('.',
 
365
                format=bzrdir.format_registry.get('knit')())
 
366
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
367
 
248
368
    def test_disk_layout(self):
249
369
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
250
 
        repo = repository.RepositoryFormatKnit1().initialize(control)
 
370
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
251
371
        # in case of side effects of locking.
252
372
        repo.lock_write()
253
373
        repo.unlock()
264
384
        # self.assertEqualDiff('', t.get('lock').read())
265
385
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
266
386
        self.check_knits(t)
 
387
        # Check per-file knits.
 
388
        branch = control.create_branch()
 
389
        tree = control.create_workingtree()
 
390
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
391
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
392
        tree.commit('1st post', rev_id='foo')
 
393
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
394
            '\nfoo fulltext 0 81  :')
267
395
 
268
 
    def assertHasKnit(self, t, knit_name):
 
396
    def assertHasKnit(self, t, knit_name, extra_content=''):
269
397
        """Assert that knit_name exists on t."""
270
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
398
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
271
399
                             t.get(knit_name + '.kndx').read())
272
 
        # no default content
273
 
        self.assertTrue(t.has(knit_name + '.knit'))
274
400
 
275
401
    def check_knits(self, t):
276
402
        """check knit content for a repository."""
280
406
 
281
407
    def test_shared_disk_layout(self):
282
408
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
283
 
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
 
409
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
284
410
        # we want:
285
411
        # format 'Bazaar-NG Knit Repository Format 1'
286
412
        # lock: is a directory
299
425
 
300
426
    def test_shared_no_tree_disk_layout(self):
301
427
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
302
 
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
 
428
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
303
429
        repo.set_make_working_trees(False)
304
430
        # we want:
305
431
        # format 'Bazaar-NG Knit Repository Format 1'
320
446
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
321
447
        self.check_knits(t)
322
448
 
323
 
 
324
 
class InterString(repository.InterRepository):
325
 
    """An inter-repository optimised code path for strings.
326
 
 
327
 
    This is for use during testing where we use strings as repositories
 
449
    def test_deserialise_sets_root_revision(self):
 
450
        """We must have a inventory.root.revision
 
451
 
 
452
        Old versions of the XML5 serializer did not set the revision_id for
 
453
        the whole inventory. So we grab the one from the expected text. Which
 
454
        is valid when the api is not being abused.
 
455
        """
 
456
        repo = self.make_repository('.',
 
457
                format=bzrdir.format_registry.get('knit')())
 
458
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
459
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
460
        self.assertEqual('test-rev-id', inv.root.revision)
 
461
 
 
462
    def test_deserialise_uses_global_revision_id(self):
 
463
        """If it is set, then we re-use the global revision id"""
 
464
        repo = self.make_repository('.',
 
465
                format=bzrdir.format_registry.get('knit')())
 
466
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
467
                   '</inventory>\n')
 
468
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
469
        # raise an error, rather than silently using one revision_id over the
 
470
        # other.
 
471
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
472
            'test-rev-id', inv_xml)
 
473
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
474
        self.assertEqual('other-rev-id', inv.root.revision)
 
475
 
 
476
    def test_supports_external_lookups(self):
 
477
        repo = self.make_repository('.',
 
478
                format=bzrdir.format_registry.get('knit')())
 
479
        self.assertFalse(repo._format.supports_external_lookups)
 
480
 
 
481
 
 
482
class DummyRepository(object):
 
483
    """A dummy repository for testing."""
 
484
 
 
485
    _format = None
 
486
    _serializer = None
 
487
 
 
488
    def supports_rich_root(self):
 
489
        if self._format is not None:
 
490
            return self._format.rich_root_data
 
491
        return False
 
492
 
 
493
    def get_graph(self):
 
494
        raise NotImplementedError
 
495
 
 
496
    def get_parent_map(self, revision_ids):
 
497
        raise NotImplementedError
 
498
 
 
499
 
 
500
class InterDummy(repository.InterRepository):
 
501
    """An inter-repository optimised code path for DummyRepository.
 
502
 
 
503
    This is for use during testing where we use DummyRepository as repositories
328
504
    so that none of the default regsitered inter-repository classes will
329
 
    match.
 
505
    MATCH.
330
506
    """
331
507
 
332
508
    @staticmethod
333
509
    def is_compatible(repo_source, repo_target):
334
 
        """InterString is compatible with strings-as-repos."""
335
 
        return isinstance(repo_source, str) and isinstance(repo_target, str)
 
510
        """InterDummy is compatible with DummyRepository."""
 
511
        return (isinstance(repo_source, DummyRepository) and
 
512
            isinstance(repo_target, DummyRepository))
336
513
 
337
514
 
338
515
class TestInterRepository(TestCaseWithTransport):
344
521
        # This also tests that the default registered optimised interrepository
345
522
        # classes do not barf inappropriately when a surprising repository type
346
523
        # is handed to them.
347
 
        dummy_a = "Repository 1."
348
 
        dummy_b = "Repository 2."
 
524
        dummy_a = DummyRepository()
 
525
        dummy_b = DummyRepository()
349
526
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
350
527
 
351
528
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
352
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
 
529
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
530
 
 
531
        The effective default is now InterSameDataRepository because there is
 
532
        no actual sane default in the presence of incompatible data models.
 
533
        """
353
534
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
354
 
        self.assertEqual(repository.InterRepository,
 
535
        self.assertEqual(repository.InterSameDataRepository,
355
536
                         inter_repo.__class__)
356
537
        self.assertEqual(repo_a, inter_repo.source)
357
538
        self.assertEqual(repo_b, inter_repo.target)
362
543
        # and that it is correctly selected when given a repository
363
544
        # pair that it returns true on for the is_compatible static method
364
545
        # check
365
 
        dummy_a = "Repository 1."
366
 
        dummy_b = "Repository 2."
367
 
        repository.InterRepository.register_optimiser(InterString)
 
546
        dummy_a = DummyRepository()
 
547
        dummy_a._format = RepositoryFormat()
 
548
        dummy_b = DummyRepository()
 
549
        dummy_b._format = RepositoryFormat()
 
550
        repo = self.make_repository('.')
 
551
        # hack dummies to look like repo somewhat.
 
552
        dummy_a._serializer = repo._serializer
 
553
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
554
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
555
        dummy_b._serializer = repo._serializer
 
556
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
557
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
558
        repository.InterRepository.register_optimiser(InterDummy)
368
559
        try:
369
 
            # we should get the default for something InterString returns False
 
560
            # we should get the default for something InterDummy returns False
370
561
            # to
371
 
            self.assertFalse(InterString.is_compatible(dummy_a, None))
372
 
            self.assertGetsDefaultInterRepository(dummy_a, None)
373
 
            # and we should get an InterString for a pair it 'likes'
374
 
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
 
562
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
563
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
564
            # and we should get an InterDummy for a pair it 'likes'
 
565
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
375
566
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
376
 
            self.assertEqual(InterString, inter_repo.__class__)
 
567
            self.assertEqual(InterDummy, inter_repo.__class__)
377
568
            self.assertEqual(dummy_a, inter_repo.source)
378
569
            self.assertEqual(dummy_b, inter_repo.target)
379
570
        finally:
380
 
            repository.InterRepository.unregister_optimiser(InterString)
 
571
            repository.InterRepository.unregister_optimiser(InterDummy)
381
572
        # now we should get the default InterRepository object again.
382
573
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
383
574
 
387
578
    def test_is_compatible_and_registered(self):
388
579
        # InterWeaveRepo is compatible when either side
389
580
        # is a format 5/6/7 branch
390
 
        formats = [repository.RepositoryFormat5(),
391
 
                   repository.RepositoryFormat6(),
392
 
                   repository.RepositoryFormat7()]
393
 
        incompatible_formats = [repository.RepositoryFormat4(),
394
 
                                repository.RepositoryFormatKnit1(),
 
581
        from bzrlib.repofmt import knitrepo, weaverepo
 
582
        formats = [weaverepo.RepositoryFormat5(),
 
583
                   weaverepo.RepositoryFormat6(),
 
584
                   weaverepo.RepositoryFormat7()]
 
585
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
586
                                knitrepo.RepositoryFormatKnit1(),
395
587
                                ]
396
588
        repo_a = self.make_repository('a')
397
589
        repo_b = self.make_repository('b')
418
610
        t = get_transport(self.get_url('.'))
419
611
        t.mkdir('repository')
420
612
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
421
 
        repo = repository.RepositoryFormat7().initialize(repo_dir)
422
 
        target_format = repository.RepositoryFormatKnit1()
 
613
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
614
        target_format = knitrepo.RepositoryFormatKnit1()
423
615
        converter = repository.CopyConverter(target_format)
424
616
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
425
617
        try:
431
623
 
432
624
 
433
625
class TestMisc(TestCase):
434
 
    
 
626
 
435
627
    def test_unescape_xml(self):
436
628
        """We get some kind of error when malformed entities are passed"""
437
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
629
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
630
 
 
631
 
 
632
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
633
 
 
634
    def test_attribute__fetch_order(self):
 
635
        """Knits need topological data insertion."""
 
636
        format = bzrdir.BzrDirMetaFormat1()
 
637
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
638
        repo = self.make_repository('.', format=format)
 
639
        self.assertEqual('topological', repo._format._fetch_order)
 
640
 
 
641
    def test_attribute__fetch_uses_deltas(self):
 
642
        """Knits reuse deltas."""
 
643
        format = bzrdir.BzrDirMetaFormat1()
 
644
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
645
        repo = self.make_repository('.', format=format)
 
646
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
647
 
 
648
    def test_convert(self):
 
649
        """Ensure the upgrade adds weaves for roots"""
 
650
        format = bzrdir.BzrDirMetaFormat1()
 
651
        format.repository_format = knitrepo.RepositoryFormatKnit1()
 
652
        tree = self.make_branch_and_tree('.', format)
 
653
        tree.commit("Dull commit", rev_id="dull")
 
654
        revision_tree = tree.branch.repository.revision_tree('dull')
 
655
        revision_tree.lock_read()
 
656
        try:
 
657
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
658
                revision_tree.inventory.root.file_id)
 
659
        finally:
 
660
            revision_tree.unlock()
 
661
        format = bzrdir.BzrDirMetaFormat1()
 
662
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
663
        upgrade.Convert('.', format)
 
664
        tree = workingtree.WorkingTree.open('.')
 
665
        revision_tree = tree.branch.repository.revision_tree('dull')
 
666
        revision_tree.lock_read()
 
667
        try:
 
668
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
669
        finally:
 
670
            revision_tree.unlock()
 
671
        tree.commit("Another dull commit", rev_id='dull2')
 
672
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
673
        revision_tree.lock_read()
 
674
        self.addCleanup(revision_tree.unlock)
 
675
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
676
 
 
677
    def test_supports_external_lookups(self):
 
678
        format = bzrdir.BzrDirMetaFormat1()
 
679
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
680
        repo = self.make_repository('.', format=format)
 
681
        self.assertFalse(repo._format.supports_external_lookups)
 
682
 
 
683
 
 
684
class Test2a(tests.TestCaseWithMemoryTransport):
 
685
 
 
686
    def test_fetch_combines_groups(self):
 
687
        builder = self.make_branch_builder('source', format='2a')
 
688
        builder.start_series()
 
689
        builder.build_snapshot('1', None, [
 
690
            ('add', ('', 'root-id', 'directory', '')),
 
691
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
692
        builder.build_snapshot('2', ['1'], [
 
693
            ('modify', ('file-id', 'content-2\n'))])
 
694
        builder.finish_series()
 
695
        source = builder.get_branch()
 
696
        target = self.make_repository('target', format='2a')
 
697
        target.fetch(source.repository)
 
698
        target.lock_read()
 
699
        self.addCleanup(target.unlock)
 
700
        details = target.texts._index.get_build_details(
 
701
            [('file-id', '1',), ('file-id', '2',)])
 
702
        file_1_details = details[('file-id', '1')]
 
703
        file_2_details = details[('file-id', '2')]
 
704
        # The index, and what to read off disk, should be the same for both
 
705
        # versions of the file.
 
706
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
707
 
 
708
    def test_fetch_combines_groups(self):
 
709
        builder = self.make_branch_builder('source', format='2a')
 
710
        builder.start_series()
 
711
        builder.build_snapshot('1', None, [
 
712
            ('add', ('', 'root-id', 'directory', '')),
 
713
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
714
        builder.build_snapshot('2', ['1'], [
 
715
            ('modify', ('file-id', 'content-2\n'))])
 
716
        builder.finish_series()
 
717
        source = builder.get_branch()
 
718
        target = self.make_repository('target', format='2a')
 
719
        target.fetch(source.repository)
 
720
        target.lock_read()
 
721
        self.addCleanup(target.unlock)
 
722
        details = target.texts._index.get_build_details(
 
723
            [('file-id', '1',), ('file-id', '2',)])
 
724
        file_1_details = details[('file-id', '1')]
 
725
        file_2_details = details[('file-id', '2')]
 
726
        # The index, and what to read off disk, should be the same for both
 
727
        # versions of the file.
 
728
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
729
 
 
730
    def test_format_pack_compresses_True(self):
 
731
        repo = self.make_repository('repo', format='2a')
 
732
        self.assertTrue(repo._format.pack_compresses)
 
733
 
 
734
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
735
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
736
        tree.lock_write()
 
737
        tree.add([''], ['TREE_ROOT'])
 
738
        revid = tree.commit("foo")
 
739
        tree.unlock()
 
740
        tree.lock_read()
 
741
        self.addCleanup(tree.unlock)
 
742
        inv = tree.branch.repository.get_inventory(revid)
 
743
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
744
        inv.parent_id_basename_to_file_id._ensure_root()
 
745
        inv.id_to_entry._ensure_root()
 
746
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
747
        self.assertEqual(65536,
 
748
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
749
 
 
750
    def test_autopack_unchanged_chk_nodes(self):
 
751
        # at 20 unchanged commits, chk pages are packed that are split into
 
752
        # two groups such that the new pack being made doesn't have all its
 
753
        # pages in the source packs (though they are in the repository).
 
754
        # Use a memory backed repository, we don't need to hit disk for this
 
755
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
756
        tree.lock_write()
 
757
        self.addCleanup(tree.unlock)
 
758
        tree.add([''], ['TREE_ROOT'])
 
759
        for pos in range(20):
 
760
            tree.commit(str(pos))
 
761
 
 
762
    def test_pack_with_hint(self):
 
763
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
764
        tree.lock_write()
 
765
        self.addCleanup(tree.unlock)
 
766
        tree.add([''], ['TREE_ROOT'])
 
767
        # 1 commit to leave untouched
 
768
        tree.commit('1')
 
769
        to_keep = tree.branch.repository._pack_collection.names()
 
770
        # 2 to combine
 
771
        tree.commit('2')
 
772
        tree.commit('3')
 
773
        all = tree.branch.repository._pack_collection.names()
 
774
        combine = list(set(all) - set(to_keep))
 
775
        self.assertLength(3, all)
 
776
        self.assertLength(2, combine)
 
777
        tree.branch.repository.pack(hint=combine)
 
778
        final = tree.branch.repository._pack_collection.names()
 
779
        self.assertLength(2, final)
 
780
        self.assertFalse(combine[0] in final)
 
781
        self.assertFalse(combine[1] in final)
 
782
        self.assertSubset(to_keep, final)
 
783
 
 
784
    def test_stream_source_to_gc(self):
 
785
        source = self.make_repository('source', format='2a')
 
786
        target = self.make_repository('target', format='2a')
 
787
        stream = source._get_source(target._format)
 
788
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
789
 
 
790
    def test_stream_source_to_non_gc(self):
 
791
        source = self.make_repository('source', format='2a')
 
792
        target = self.make_repository('target', format='rich-root-pack')
 
793
        stream = source._get_source(target._format)
 
794
        # We don't want the child GroupCHKStreamSource
 
795
        self.assertIs(type(stream), repository.StreamSource)
 
796
 
 
797
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
798
        source_builder = self.make_branch_builder('source',
 
799
                            format='2a')
 
800
        # We have to build a fairly large tree, so that we are sure the chk
 
801
        # pages will have split into multiple pages.
 
802
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
803
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
804
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
805
                fname = i + j
 
806
                fid = fname + '-id'
 
807
                content = 'content for %s\n' % (fname,)
 
808
                entries.append(('add', (fname, fid, 'file', content)))
 
809
        source_builder.start_series()
 
810
        source_builder.build_snapshot('rev-1', None, entries)
 
811
        # Now change a few of them, so we get a few new pages for the second
 
812
        # revision
 
813
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
814
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
815
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
816
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
817
            ])
 
818
        source_builder.finish_series()
 
819
        source_branch = source_builder.get_branch()
 
820
        source_branch.lock_read()
 
821
        self.addCleanup(source_branch.unlock)
 
822
        target = self.make_repository('target', format='2a')
 
823
        source = source_branch.repository._get_source(target._format)
 
824
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
825
 
 
826
        # On a regular pass, getting the inventories and chk pages for rev-2
 
827
        # would only get the newly created chk pages
 
828
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
829
                                    set(['rev-2']))
 
830
        simple_chk_records = []
 
831
        for vf_name, substream in source.get_stream(search):
 
832
            if vf_name == 'chk_bytes':
 
833
                for record in substream:
 
834
                    simple_chk_records.append(record.key)
 
835
            else:
 
836
                for _ in substream:
 
837
                    continue
 
838
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
839
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
840
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
841
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
842
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
843
                         simple_chk_records)
 
844
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
845
        # we should get a much larger set of pages.
 
846
        missing = [('inventories', 'rev-2')]
 
847
        full_chk_records = []
 
848
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
849
            if vf_name == 'inventories':
 
850
                for record in substream:
 
851
                    self.assertEqual(('rev-2',), record.key)
 
852
            elif vf_name == 'chk_bytes':
 
853
                for record in substream:
 
854
                    full_chk_records.append(record.key)
 
855
            else:
 
856
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
857
        # We have 257 records now. This is because we have 1 root page, and 256
 
858
        # leaf pages in a complete listing.
 
859
        self.assertEqual(257, len(full_chk_records))
 
860
        self.assertSubset(simple_chk_records, full_chk_records)
 
861
 
 
862
    def test_inconsistency_fatal(self):
 
863
        repo = self.make_repository('repo', format='2a')
 
864
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
865
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
866
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
867
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
868
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
869
 
 
870
 
 
871
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
872
 
 
873
    def test_source_to_exact_pack_092(self):
 
874
        source = self.make_repository('source', format='pack-0.92')
 
875
        target = self.make_repository('target', format='pack-0.92')
 
876
        stream_source = source._get_source(target._format)
 
877
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
878
 
 
879
    def test_source_to_exact_pack_rich_root_pack(self):
 
880
        source = self.make_repository('source', format='rich-root-pack')
 
881
        target = self.make_repository('target', format='rich-root-pack')
 
882
        stream_source = source._get_source(target._format)
 
883
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
884
 
 
885
    def test_source_to_exact_pack_19(self):
 
886
        source = self.make_repository('source', format='1.9')
 
887
        target = self.make_repository('target', format='1.9')
 
888
        stream_source = source._get_source(target._format)
 
889
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
890
 
 
891
    def test_source_to_exact_pack_19_rich_root(self):
 
892
        source = self.make_repository('source', format='1.9-rich-root')
 
893
        target = self.make_repository('target', format='1.9-rich-root')
 
894
        stream_source = source._get_source(target._format)
 
895
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
896
 
 
897
    def test_source_to_remote_exact_pack_19(self):
 
898
        trans = self.make_smart_server('target')
 
899
        trans.ensure_base()
 
900
        source = self.make_repository('source', format='1.9')
 
901
        target = self.make_repository('target', format='1.9')
 
902
        target = repository.Repository.open(trans.base)
 
903
        stream_source = source._get_source(target._format)
 
904
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
905
 
 
906
    def test_stream_source_to_non_exact(self):
 
907
        source = self.make_repository('source', format='pack-0.92')
 
908
        target = self.make_repository('target', format='1.9')
 
909
        stream = source._get_source(target._format)
 
910
        self.assertIs(type(stream), repository.StreamSource)
 
911
 
 
912
    def test_stream_source_to_non_exact_rich_root(self):
 
913
        source = self.make_repository('source', format='1.9')
 
914
        target = self.make_repository('target', format='1.9-rich-root')
 
915
        stream = source._get_source(target._format)
 
916
        self.assertIs(type(stream), repository.StreamSource)
 
917
 
 
918
    def test_source_to_remote_non_exact_pack_19(self):
 
919
        trans = self.make_smart_server('target')
 
920
        trans.ensure_base()
 
921
        source = self.make_repository('source', format='1.9')
 
922
        target = self.make_repository('target', format='1.6')
 
923
        target = repository.Repository.open(trans.base)
 
924
        stream_source = source._get_source(target._format)
 
925
        self.assertIs(type(stream_source), repository.StreamSource)
 
926
 
 
927
    def test_stream_source_to_knit(self):
 
928
        source = self.make_repository('source', format='pack-0.92')
 
929
        target = self.make_repository('target', format='dirstate')
 
930
        stream = source._get_source(target._format)
 
931
        self.assertIs(type(stream), repository.StreamSource)
 
932
 
 
933
 
 
934
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
935
    """Tests for _find_parent_ids_of_revisions."""
 
936
 
 
937
    def setUp(self):
 
938
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
939
        self.builder = self.make_branch_builder('source',
 
940
            format='development6-rich-root')
 
941
        self.builder.start_series()
 
942
        self.builder.build_snapshot('initial', None,
 
943
            [('add', ('', 'tree-root', 'directory', None))])
 
944
        self.repo = self.builder.get_branch().repository
 
945
        self.addCleanup(self.builder.finish_series)
 
946
 
 
947
    def assertParentIds(self, expected_result, rev_set):
 
948
        self.assertEqual(sorted(expected_result),
 
949
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
950
 
 
951
    def test_simple(self):
 
952
        self.builder.build_snapshot('revid1', None, [])
 
953
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
954
        rev_set = ['revid2']
 
955
        self.assertParentIds(['revid1'], rev_set)
 
956
 
 
957
    def test_not_first_parent(self):
 
958
        self.builder.build_snapshot('revid1', None, [])
 
959
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
960
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
961
        rev_set = ['revid3', 'revid2']
 
962
        self.assertParentIds(['revid1'], rev_set)
 
963
 
 
964
    def test_not_null(self):
 
965
        rev_set = ['initial']
 
966
        self.assertParentIds([], rev_set)
 
967
 
 
968
    def test_not_null_set(self):
 
969
        self.builder.build_snapshot('revid1', None, [])
 
970
        rev_set = [_mod_revision.NULL_REVISION]
 
971
        self.assertParentIds([], rev_set)
 
972
 
 
973
    def test_ghost(self):
 
974
        self.builder.build_snapshot('revid1', None, [])
 
975
        rev_set = ['ghost', 'revid1']
 
976
        self.assertParentIds(['initial'], rev_set)
 
977
 
 
978
    def test_ghost_parent(self):
 
979
        self.builder.build_snapshot('revid1', None, [])
 
980
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
981
        rev_set = ['revid2', 'revid1']
 
982
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
983
 
 
984
    def test_righthand_parent(self):
 
985
        self.builder.build_snapshot('revid1', None, [])
 
986
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
987
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
988
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
989
        rev_set = ['revid3', 'revid2a']
 
990
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
991
 
 
992
 
 
993
class TestWithBrokenRepo(TestCaseWithTransport):
 
994
    """These tests seem to be more appropriate as interface tests?"""
 
995
 
 
996
    def make_broken_repository(self):
 
997
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
998
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
999
        # it does, this duplication should be removed.
 
1000
        repo = self.make_repository('broken-repo')
 
1001
        cleanups = []
 
1002
        try:
 
1003
            repo.lock_write()
 
1004
            cleanups.append(repo.unlock)
 
1005
            repo.start_write_group()
 
1006
            cleanups.append(repo.commit_write_group)
 
1007
            # make rev1a: A well-formed revision, containing 'file1'
 
1008
            inv = inventory.Inventory(revision_id='rev1a')
 
1009
            inv.root.revision = 'rev1a'
 
1010
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1011
            repo.add_inventory('rev1a', inv, [])
 
1012
            revision = _mod_revision.Revision('rev1a',
 
1013
                committer='jrandom@example.com', timestamp=0,
 
1014
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
1015
            repo.add_revision('rev1a',revision, inv)
 
1016
 
 
1017
            # make rev1b, which has no Revision, but has an Inventory, and
 
1018
            # file1
 
1019
            inv = inventory.Inventory(revision_id='rev1b')
 
1020
            inv.root.revision = 'rev1b'
 
1021
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
1022
            repo.add_inventory('rev1b', inv, [])
 
1023
 
 
1024
            # make rev2, with file1 and file2
 
1025
            # file2 is sane
 
1026
            # file1 has 'rev1b' as an ancestor, even though this is not
 
1027
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
1028
            inv = inventory.Inventory()
 
1029
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
1030
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
1031
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
1032
 
 
1033
            # make ghost revision rev1c
 
1034
            inv = inventory.Inventory()
 
1035
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
1036
 
 
1037
            # make rev3 with file2
 
1038
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
1039
            # file2 cannot have rev1c as its ancestor.
 
1040
            inv = inventory.Inventory()
 
1041
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
1042
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
1043
            return repo
 
1044
        finally:
 
1045
            for cleanup in reversed(cleanups):
 
1046
                cleanup()
 
1047
 
 
1048
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
1049
        inv.revision_id = revision_id
 
1050
        inv.root.revision = revision_id
 
1051
        repo.add_inventory(revision_id, inv, parent_ids)
 
1052
        revision = _mod_revision.Revision(revision_id,
 
1053
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
1054
            timezone=0, message='foo', parent_ids=parent_ids)
 
1055
        repo.add_revision(revision_id,revision, inv)
 
1056
 
 
1057
    def add_file(self, repo, inv, filename, revision, parents):
 
1058
        file_id = filename + '-id'
 
1059
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
1060
        entry.revision = revision
 
1061
        entry.text_size = 0
 
1062
        inv.add(entry)
 
1063
        text_key = (file_id, revision)
 
1064
        parent_keys = [(file_id, parent) for parent in parents]
 
1065
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
1066
 
 
1067
    def test_insert_from_broken_repo(self):
 
1068
        """Inserting a data stream from a broken repository won't silently
 
1069
        corrupt the target repository.
 
1070
        """
 
1071
        broken_repo = self.make_broken_repository()
 
1072
        empty_repo = self.make_repository('empty-repo')
 
1073
        try:
 
1074
            empty_repo.fetch(broken_repo)
 
1075
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1076
            # Test successful: compression parent not being copied leads to
 
1077
            # error.
 
1078
            return
 
1079
        empty_repo.lock_read()
 
1080
        self.addCleanup(empty_repo.unlock)
 
1081
        text = empty_repo.texts.get_record_stream(
 
1082
            [('file2-id', 'rev3')], 'topological', True).next()
 
1083
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
1084
 
 
1085
 
 
1086
class TestRepositoryPackCollection(TestCaseWithTransport):
 
1087
 
 
1088
    def get_format(self):
 
1089
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
1090
 
 
1091
    def get_packs(self):
 
1092
        format = self.get_format()
 
1093
        repo = self.make_repository('.', format=format)
 
1094
        return repo._pack_collection
 
1095
 
 
1096
    def make_packs_and_alt_repo(self, write_lock=False):
 
1097
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1098
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1099
        tree.lock_write()
 
1100
        self.addCleanup(tree.unlock)
 
1101
        rev1 = tree.commit('one')
 
1102
        rev2 = tree.commit('two')
 
1103
        rev3 = tree.commit('three')
 
1104
        r = repository.Repository.open('.')
 
1105
        if write_lock:
 
1106
            r.lock_write()
 
1107
        else:
 
1108
            r.lock_read()
 
1109
        self.addCleanup(r.unlock)
 
1110
        packs = r._pack_collection
 
1111
        packs.ensure_loaded()
 
1112
        return tree, r, packs, [rev1, rev2, rev3]
 
1113
 
 
1114
    def test__max_pack_count(self):
 
1115
        """The maximum pack count is a function of the number of revisions."""
 
1116
        # no revisions - one pack, so that we can have a revision free repo
 
1117
        # without it blowing up
 
1118
        packs = self.get_packs()
 
1119
        self.assertEqual(1, packs._max_pack_count(0))
 
1120
        # after that the sum of the digits, - check the first 1-9
 
1121
        self.assertEqual(1, packs._max_pack_count(1))
 
1122
        self.assertEqual(2, packs._max_pack_count(2))
 
1123
        self.assertEqual(3, packs._max_pack_count(3))
 
1124
        self.assertEqual(4, packs._max_pack_count(4))
 
1125
        self.assertEqual(5, packs._max_pack_count(5))
 
1126
        self.assertEqual(6, packs._max_pack_count(6))
 
1127
        self.assertEqual(7, packs._max_pack_count(7))
 
1128
        self.assertEqual(8, packs._max_pack_count(8))
 
1129
        self.assertEqual(9, packs._max_pack_count(9))
 
1130
        # check the boundary cases with two digits for the next decade
 
1131
        self.assertEqual(1, packs._max_pack_count(10))
 
1132
        self.assertEqual(2, packs._max_pack_count(11))
 
1133
        self.assertEqual(10, packs._max_pack_count(19))
 
1134
        self.assertEqual(2, packs._max_pack_count(20))
 
1135
        self.assertEqual(3, packs._max_pack_count(21))
 
1136
        # check some arbitrary big numbers
 
1137
        self.assertEqual(25, packs._max_pack_count(112894))
 
1138
 
 
1139
    def test_pack_distribution_zero(self):
 
1140
        packs = self.get_packs()
 
1141
        self.assertEqual([0], packs.pack_distribution(0))
 
1142
 
 
1143
    def test_ensure_loaded_unlocked(self):
 
1144
        packs = self.get_packs()
 
1145
        self.assertRaises(errors.ObjectNotLocked,
 
1146
                          packs.ensure_loaded)
 
1147
 
 
1148
    def test_pack_distribution_one_to_nine(self):
 
1149
        packs = self.get_packs()
 
1150
        self.assertEqual([1],
 
1151
            packs.pack_distribution(1))
 
1152
        self.assertEqual([1, 1],
 
1153
            packs.pack_distribution(2))
 
1154
        self.assertEqual([1, 1, 1],
 
1155
            packs.pack_distribution(3))
 
1156
        self.assertEqual([1, 1, 1, 1],
 
1157
            packs.pack_distribution(4))
 
1158
        self.assertEqual([1, 1, 1, 1, 1],
 
1159
            packs.pack_distribution(5))
 
1160
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1161
            packs.pack_distribution(6))
 
1162
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1163
            packs.pack_distribution(7))
 
1164
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1165
            packs.pack_distribution(8))
 
1166
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1167
            packs.pack_distribution(9))
 
1168
 
 
1169
    def test_pack_distribution_stable_at_boundaries(self):
 
1170
        """When there are multi-rev packs the counts are stable."""
 
1171
        packs = self.get_packs()
 
1172
        # in 10s:
 
1173
        self.assertEqual([10], packs.pack_distribution(10))
 
1174
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1175
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1176
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1177
        # 100s
 
1178
        self.assertEqual([100], packs.pack_distribution(100))
 
1179
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1180
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1181
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1182
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1183
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1184
 
 
1185
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1186
        packs = self.get_packs()
 
1187
        existing_packs = [(2000, "big"), (9, "medium")]
 
1188
        # rev count - 2009 -> 2x1000 + 9x1
 
1189
        pack_operations = packs.plan_autopack_combinations(
 
1190
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1191
        self.assertEqual([], pack_operations)
 
1192
 
 
1193
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1194
        packs = self.get_packs()
 
1195
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1196
        # rev count - 2010 -> 2x1000 + 1x10
 
1197
        pack_operations = packs.plan_autopack_combinations(
 
1198
            existing_packs, [1000, 1000, 10])
 
1199
        self.assertEqual([], pack_operations)
 
1200
 
 
1201
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1202
        packs = self.get_packs()
 
1203
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1204
            (1, "single1")]
 
1205
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1206
        pack_operations = packs.plan_autopack_combinations(
 
1207
            existing_packs, [1000, 1000, 10])
 
1208
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1209
 
 
1210
    def test_plan_pack_operations_creates_a_single_op(self):
 
1211
        packs = self.get_packs()
 
1212
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1213
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1214
        # rev count 150 -> 1x100 and 5x10
 
1215
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1216
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1217
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1218
        # we save a pack file with no increased I/O by putting them into the
 
1219
        # same file.
 
1220
        distribution = packs.pack_distribution(150)
 
1221
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1222
                                                           distribution)
 
1223
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1224
 
 
1225
    def test_all_packs_none(self):
 
1226
        format = self.get_format()
 
1227
        tree = self.make_branch_and_tree('.', format=format)
 
1228
        tree.lock_read()
 
1229
        self.addCleanup(tree.unlock)
 
1230
        packs = tree.branch.repository._pack_collection
 
1231
        packs.ensure_loaded()
 
1232
        self.assertEqual([], packs.all_packs())
 
1233
 
 
1234
    def test_all_packs_one(self):
 
1235
        format = self.get_format()
 
1236
        tree = self.make_branch_and_tree('.', format=format)
 
1237
        tree.commit('start')
 
1238
        tree.lock_read()
 
1239
        self.addCleanup(tree.unlock)
 
1240
        packs = tree.branch.repository._pack_collection
 
1241
        packs.ensure_loaded()
 
1242
        self.assertEqual([
 
1243
            packs.get_pack_by_name(packs.names()[0])],
 
1244
            packs.all_packs())
 
1245
 
 
1246
    def test_all_packs_two(self):
 
1247
        format = self.get_format()
 
1248
        tree = self.make_branch_and_tree('.', format=format)
 
1249
        tree.commit('start')
 
1250
        tree.commit('continue')
 
1251
        tree.lock_read()
 
1252
        self.addCleanup(tree.unlock)
 
1253
        packs = tree.branch.repository._pack_collection
 
1254
        packs.ensure_loaded()
 
1255
        self.assertEqual([
 
1256
            packs.get_pack_by_name(packs.names()[0]),
 
1257
            packs.get_pack_by_name(packs.names()[1]),
 
1258
            ], packs.all_packs())
 
1259
 
 
1260
    def test_get_pack_by_name(self):
 
1261
        format = self.get_format()
 
1262
        tree = self.make_branch_and_tree('.', format=format)
 
1263
        tree.commit('start')
 
1264
        tree.lock_read()
 
1265
        self.addCleanup(tree.unlock)
 
1266
        packs = tree.branch.repository._pack_collection
 
1267
        packs.reset()
 
1268
        packs.ensure_loaded()
 
1269
        name = packs.names()[0]
 
1270
        pack_1 = packs.get_pack_by_name(name)
 
1271
        # the pack should be correctly initialised
 
1272
        sizes = packs._names[name]
 
1273
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1274
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1275
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1276
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1277
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1278
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1279
        # and the same instance should be returned on successive calls.
 
1280
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1281
 
 
1282
    def test_reload_pack_names_new_entry(self):
 
1283
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1284
        names = packs.names()
 
1285
        # Add a new pack file into the repository
 
1286
        rev4 = tree.commit('four')
 
1287
        new_names = tree.branch.repository._pack_collection.names()
 
1288
        new_name = set(new_names).difference(names)
 
1289
        self.assertEqual(1, len(new_name))
 
1290
        new_name = new_name.pop()
 
1291
        # The old collection hasn't noticed yet
 
1292
        self.assertEqual(names, packs.names())
 
1293
        self.assertTrue(packs.reload_pack_names())
 
1294
        self.assertEqual(new_names, packs.names())
 
1295
        # And the repository can access the new revision
 
1296
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1297
        self.assertFalse(packs.reload_pack_names())
 
1298
 
 
1299
    def test_reload_pack_names_added_and_removed(self):
 
1300
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1301
        names = packs.names()
 
1302
        # Now repack the whole thing
 
1303
        tree.branch.repository.pack()
 
1304
        new_names = tree.branch.repository._pack_collection.names()
 
1305
        # The other collection hasn't noticed yet
 
1306
        self.assertEqual(names, packs.names())
 
1307
        self.assertTrue(packs.reload_pack_names())
 
1308
        self.assertEqual(new_names, packs.names())
 
1309
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1310
        self.assertFalse(packs.reload_pack_names())
 
1311
 
 
1312
    def test_autopack_reloads_and_stops(self):
 
1313
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1314
        # After we have determined what needs to be autopacked, trigger a
 
1315
        # full-pack via the other repo which will cause us to re-evaluate and
 
1316
        # decide we don't need to do anything
 
1317
        orig_execute = packs._execute_pack_operations
 
1318
        def _munged_execute_pack_ops(*args, **kwargs):
 
1319
            tree.branch.repository.pack()
 
1320
            return orig_execute(*args, **kwargs)
 
1321
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1322
        packs._max_pack_count = lambda x: 1
 
1323
        packs.pack_distribution = lambda x: [10]
 
1324
        self.assertFalse(packs.autopack())
 
1325
        self.assertEqual(1, len(packs.names()))
 
1326
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1327
                         packs.names())
 
1328
 
 
1329
 
 
1330
class TestPack(TestCaseWithTransport):
 
1331
    """Tests for the Pack object."""
 
1332
 
 
1333
    def assertCurrentlyEqual(self, left, right):
 
1334
        self.assertTrue(left == right)
 
1335
        self.assertTrue(right == left)
 
1336
        self.assertFalse(left != right)
 
1337
        self.assertFalse(right != left)
 
1338
 
 
1339
    def assertCurrentlyNotEqual(self, left, right):
 
1340
        self.assertFalse(left == right)
 
1341
        self.assertFalse(right == left)
 
1342
        self.assertTrue(left != right)
 
1343
        self.assertTrue(right != left)
 
1344
 
 
1345
    def test___eq____ne__(self):
 
1346
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1347
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1348
        self.assertCurrentlyEqual(left, right)
 
1349
        # change all attributes and ensure equality changes as we do.
 
1350
        left.revision_index = 'a'
 
1351
        self.assertCurrentlyNotEqual(left, right)
 
1352
        right.revision_index = 'a'
 
1353
        self.assertCurrentlyEqual(left, right)
 
1354
        left.inventory_index = 'a'
 
1355
        self.assertCurrentlyNotEqual(left, right)
 
1356
        right.inventory_index = 'a'
 
1357
        self.assertCurrentlyEqual(left, right)
 
1358
        left.text_index = 'a'
 
1359
        self.assertCurrentlyNotEqual(left, right)
 
1360
        right.text_index = 'a'
 
1361
        self.assertCurrentlyEqual(left, right)
 
1362
        left.signature_index = 'a'
 
1363
        self.assertCurrentlyNotEqual(left, right)
 
1364
        right.signature_index = 'a'
 
1365
        self.assertCurrentlyEqual(left, right)
 
1366
        left.name = 'a'
 
1367
        self.assertCurrentlyNotEqual(left, right)
 
1368
        right.name = 'a'
 
1369
        self.assertCurrentlyEqual(left, right)
 
1370
        left.transport = 'a'
 
1371
        self.assertCurrentlyNotEqual(left, right)
 
1372
        right.transport = 'a'
 
1373
        self.assertCurrentlyEqual(left, right)
 
1374
 
 
1375
    def test_file_name(self):
 
1376
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1377
        self.assertEqual('a_name.pack', pack.file_name())
 
1378
 
 
1379
 
 
1380
class TestNewPack(TestCaseWithTransport):
 
1381
    """Tests for pack_repo.NewPack."""
 
1382
 
 
1383
    def test_new_instance_attributes(self):
 
1384
        upload_transport = self.get_transport('upload')
 
1385
        pack_transport = self.get_transport('pack')
 
1386
        index_transport = self.get_transport('index')
 
1387
        upload_transport.mkdir('.')
 
1388
        collection = pack_repo.RepositoryPackCollection(
 
1389
            repo=None,
 
1390
            transport=self.get_transport('.'),
 
1391
            index_transport=index_transport,
 
1392
            upload_transport=upload_transport,
 
1393
            pack_transport=pack_transport,
 
1394
            index_builder_class=BTreeBuilder,
 
1395
            index_class=BTreeGraphIndex,
 
1396
            use_chk_index=False)
 
1397
        pack = pack_repo.NewPack(collection)
 
1398
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1399
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1400
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1401
        self.assertTrue(pack.upload_transport is upload_transport)
 
1402
        self.assertTrue(pack.index_transport is index_transport)
 
1403
        self.assertTrue(pack.pack_transport is pack_transport)
 
1404
        self.assertEqual(None, pack.index_sizes)
 
1405
        self.assertEqual(20, len(pack.random_name))
 
1406
        self.assertIsInstance(pack.random_name, str)
 
1407
        self.assertIsInstance(pack.start_time, float)
 
1408
 
 
1409
 
 
1410
class TestPacker(TestCaseWithTransport):
 
1411
    """Tests for the packs repository Packer class."""
 
1412
 
 
1413
    def test_pack_optimizes_pack_order(self):
 
1414
        builder = self.make_branch_builder('.', format="1.9")
 
1415
        builder.start_series()
 
1416
        builder.build_snapshot('A', None, [
 
1417
            ('add', ('', 'root-id', 'directory', None)),
 
1418
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1419
        builder.build_snapshot('B', ['A'],
 
1420
            [('modify', ('f-id', 'new-content\n'))])
 
1421
        builder.build_snapshot('C', ['B'],
 
1422
            [('modify', ('f-id', 'third-content\n'))])
 
1423
        builder.build_snapshot('D', ['C'],
 
1424
            [('modify', ('f-id', 'fourth-content\n'))])
 
1425
        b = builder.get_branch()
 
1426
        b.lock_read()
 
1427
        builder.finish_series()
 
1428
        self.addCleanup(b.unlock)
 
1429
        # At this point, we should have 4 pack files available
 
1430
        # Because of how they were built, they correspond to
 
1431
        # ['D', 'C', 'B', 'A']
 
1432
        packs = b.repository._pack_collection.packs
 
1433
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1434
                                  packs, 'testing',
 
1435
                                  revision_ids=['B', 'C'])
 
1436
        # Now, when we are copying the B & C revisions, their pack files should
 
1437
        # be moved to the front of the stack
 
1438
        # The new ordering moves B & C to the front of the .packs attribute,
 
1439
        # and leaves the others in the original order.
 
1440
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1441
        new_pack = packer.pack()
 
1442
        self.assertEqual(new_packs, packer.packs)
 
1443
 
 
1444
 
 
1445
class TestOptimisingPacker(TestCaseWithTransport):
 
1446
    """Tests for the OptimisingPacker class."""
 
1447
 
 
1448
    def get_pack_collection(self):
 
1449
        repo = self.make_repository('.')
 
1450
        return repo._pack_collection
 
1451
 
 
1452
    def test_open_pack_will_optimise(self):
 
1453
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1454
                                            [], '.test')
 
1455
        new_pack = packer.open_pack()
 
1456
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1457
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1458
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1459
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1460
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1461
 
 
1462
 
 
1463
class TestCrossFormatPacks(TestCaseWithTransport):
 
1464
 
 
1465
    def log_pack(self, hint=None):
 
1466
        self.calls.append(('pack', hint))
 
1467
        self.orig_pack(hint=hint)
 
1468
        if self.expect_hint:
 
1469
            self.assertTrue(hint)
 
1470
 
 
1471
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1472
        self.expect_hint = expect_pack_called
 
1473
        self.calls = []
 
1474
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1475
        source_tree.lock_write()
 
1476
        self.addCleanup(source_tree.unlock)
 
1477
        tip = source_tree.commit('foo')
 
1478
        target = self.make_repository('target', format=target_fmt)
 
1479
        target.lock_write()
 
1480
        self.addCleanup(target.unlock)
 
1481
        source = source_tree.branch.repository._get_source(target._format)
 
1482
        self.orig_pack = target.pack
 
1483
        target.pack = self.log_pack
 
1484
        search = target.search_missing_revision_ids(
 
1485
            source_tree.branch.repository, tip)
 
1486
        stream = source.get_stream(search)
 
1487
        from_format = source_tree.branch.repository._format
 
1488
        sink = target._get_sink()
 
1489
        sink.insert_stream(stream, from_format, [])
 
1490
        if expect_pack_called:
 
1491
            self.assertLength(1, self.calls)
 
1492
        else:
 
1493
            self.assertLength(0, self.calls)
 
1494
 
 
1495
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1496
        self.expect_hint = expect_pack_called
 
1497
        self.calls = []
 
1498
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1499
        source_tree.lock_write()
 
1500
        self.addCleanup(source_tree.unlock)
 
1501
        tip = source_tree.commit('foo')
 
1502
        target = self.make_repository('target', format=target_fmt)
 
1503
        target.lock_write()
 
1504
        self.addCleanup(target.unlock)
 
1505
        source = source_tree.branch.repository
 
1506
        self.orig_pack = target.pack
 
1507
        target.pack = self.log_pack
 
1508
        target.fetch(source)
 
1509
        if expect_pack_called:
 
1510
            self.assertLength(1, self.calls)
 
1511
        else:
 
1512
            self.assertLength(0, self.calls)
 
1513
 
 
1514
    def test_sink_format_hint_no(self):
 
1515
        # When the target format says packing makes no difference, pack is not
 
1516
        # called.
 
1517
        self.run_stream('1.9', 'rich-root-pack', False)
 
1518
 
 
1519
    def test_sink_format_hint_yes(self):
 
1520
        # When the target format says packing makes a difference, pack is
 
1521
        # called.
 
1522
        self.run_stream('1.9', '2a', True)
 
1523
 
 
1524
    def test_sink_format_same_no(self):
 
1525
        # When the formats are the same, pack is not called.
 
1526
        self.run_stream('2a', '2a', False)
 
1527
 
 
1528
    def test_IDS_format_hint_no(self):
 
1529
        # When the target format says packing makes no difference, pack is not
 
1530
        # called.
 
1531
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1532
 
 
1533
    def test_IDS_format_hint_yes(self):
 
1534
        # When the target format says packing makes a difference, pack is
 
1535
        # called.
 
1536
        self.run_fetch('1.9', '2a', True)
 
1537
 
 
1538
    def test_IDS_format_same_no(self):
 
1539
        # When the formats are the same, pack is not called.
 
1540
        self.run_fetch('2a', '2a', False)