~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Johan Walles
  • Date: 2009-05-07 05:08:46 UTC
  • mfrom: (4342 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4343.
  • Revision ID: johan.walles@gmail.com-20090507050846-nkwvcyauf1eh653q
MergeĀ fromĀ upstream.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2005, 2009 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
23
23
 
24
 
from gzip import GzipFile
25
24
from itertools import chain, izip
26
25
from StringIO import StringIO
27
26
 
28
27
from bzrlib import (
29
28
    errors,
30
 
    graph as _mod_graph,
31
29
    groupcompress,
32
30
    knit as _mod_knit,
33
31
    osutils,
34
32
    progress,
35
 
    transport,
36
 
    ui,
37
33
    )
38
34
from bzrlib.errors import (
39
35
                           RevisionNotPresent,
40
36
                           RevisionAlreadyPresent,
 
37
                           WeaveParentMismatch
41
38
                           )
42
39
from bzrlib.knit import (
43
40
    cleanup_pack_knit,
44
41
    make_file_factory,
45
42
    make_pack_factory,
 
43
    KnitAnnotateFactory,
 
44
    KnitPlainFactory,
46
45
    )
47
46
from bzrlib.tests import (
48
47
    TestCase,
49
48
    TestCaseWithMemoryTransport,
50
49
    TestNotApplicable,
51
50
    TestSkipped,
 
51
    condition_isinstance,
 
52
    split_suite_by_condition,
 
53
    multiply_tests,
52
54
    )
53
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
 
56
from bzrlib.trace import mutter
 
57
from bzrlib.transport import get_transport
54
58
from bzrlib.transport.memory import MemoryTransport
 
59
from bzrlib.tsort import topo_sort
 
60
from bzrlib.tuned_gzip import GzipFile
55
61
import bzrlib.versionedfile as versionedfile
56
62
from bzrlib.versionedfile import (
57
63
    ConstantMapper,
61
67
    make_versioned_files_factory,
62
68
    )
63
69
from bzrlib.weave import WeaveFile
64
 
from bzrlib.weavefile import write_weave
65
 
from bzrlib.tests.scenarios import load_tests_apply_scenarios
66
 
 
67
 
 
68
 
load_tests = load_tests_apply_scenarios
 
70
from bzrlib.weavefile import read_weave, write_weave
 
71
 
 
72
 
 
73
def load_tests(standard_tests, module, loader):
 
74
    """Parameterize VersionedFiles tests for different implementations."""
 
75
    to_adapt, result = split_suite_by_condition(
 
76
        standard_tests, condition_isinstance(TestVersionedFiles))
 
77
    # We want to be sure of behaviour for:
 
78
    # weaves prefix layout (weave texts)
 
79
    # individually named weaves (weave inventories)
 
80
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 
81
    #                   as it is the most complex mapper.
 
82
    # individually named knits
 
83
    # individual no-graph knits in packs (signatures)
 
84
    # individual graph knits in packs (inventories)
 
85
    # individual graph nocompression knits in packs (revisions)
 
86
    # plain text knits in packs (texts)
 
87
    len_one_scenarios = [
 
88
        ('weave-named', {
 
89
            'cleanup':None,
 
90
            'factory':make_versioned_files_factory(WeaveFile,
 
91
                ConstantMapper('inventory')),
 
92
            'graph':True,
 
93
            'key_length':1,
 
94
            'support_partial_insertion': False,
 
95
            }),
 
96
        ('named-knit', {
 
97
            'cleanup':None,
 
98
            'factory':make_file_factory(False, ConstantMapper('revisions')),
 
99
            'graph':True,
 
100
            'key_length':1,
 
101
            'support_partial_insertion': False,
 
102
            }),
 
103
        ('named-nograph-nodelta-knit-pack', {
 
104
            'cleanup':cleanup_pack_knit,
 
105
            'factory':make_pack_factory(False, False, 1),
 
106
            'graph':False,
 
107
            'key_length':1,
 
108
            'support_partial_insertion': False,
 
109
            }),
 
110
        ('named-graph-knit-pack', {
 
111
            'cleanup':cleanup_pack_knit,
 
112
            'factory':make_pack_factory(True, True, 1),
 
113
            'graph':True,
 
114
            'key_length':1,
 
115
            'support_partial_insertion': True,
 
116
            }),
 
117
        ('named-graph-nodelta-knit-pack', {
 
118
            'cleanup':cleanup_pack_knit,
 
119
            'factory':make_pack_factory(True, False, 1),
 
120
            'graph':True,
 
121
            'key_length':1,
 
122
            'support_partial_insertion': False,
 
123
            }),
 
124
        ('groupcompress-nograph', {
 
125
            'cleanup':groupcompress.cleanup_pack_group,
 
126
            'factory':groupcompress.make_pack_factory(False, False, 1),
 
127
            'graph': False,
 
128
            'key_length':1,
 
129
            'support_partial_insertion':False,
 
130
            }),
 
131
        ]
 
132
    len_two_scenarios = [
 
133
        ('weave-prefix', {
 
134
            'cleanup':None,
 
135
            'factory':make_versioned_files_factory(WeaveFile,
 
136
                PrefixMapper()),
 
137
            'graph':True,
 
138
            'key_length':2,
 
139
            'support_partial_insertion': False,
 
140
            }),
 
141
        ('annotated-knit-escape', {
 
142
            'cleanup':None,
 
143
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
 
144
            'graph':True,
 
145
            'key_length':2,
 
146
            'support_partial_insertion': False,
 
147
            }),
 
148
        ('plain-knit-pack', {
 
149
            'cleanup':cleanup_pack_knit,
 
150
            'factory':make_pack_factory(True, True, 2),
 
151
            'graph':True,
 
152
            'key_length':2,
 
153
            'support_partial_insertion': True,
 
154
            }),
 
155
        ('groupcompress', {
 
156
            'cleanup':groupcompress.cleanup_pack_group,
 
157
            'factory':groupcompress.make_pack_factory(True, False, 1),
 
158
            'graph': True,
 
159
            'key_length':1,
 
160
            'support_partial_insertion':False,
 
161
            }),
 
162
        ]
 
163
    scenarios = len_one_scenarios + len_two_scenarios
 
164
    return multiply_tests(to_adapt, scenarios, result)
69
165
 
70
166
 
71
167
def get_diamond_vf(f, trailing_eol=True, left_only=False):
636
732
        # the ordering here is to make a tree so that dumb searches have
637
733
        # more changes to muck up.
638
734
 
639
 
        class InstrumentedProgress(progress.ProgressTask):
 
735
        class InstrumentedProgress(progress.DummyProgress):
640
736
 
641
737
            def __init__(self):
642
 
                progress.ProgressTask.__init__(self)
 
738
 
 
739
                progress.DummyProgress.__init__(self)
643
740
                self.updates = []
644
741
 
645
742
            def update(self, msg=None, current=None, total=None):
751
848
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
752
849
 
753
850
    def test_readonly_mode(self):
754
 
        t = self.get_transport()
 
851
        transport = get_transport(self.get_url('.'))
755
852
        factory = self.get_factory()
756
 
        vf = factory('id', t, 0777, create=True, access_mode='w')
757
 
        vf = factory('id', t, access_mode='r')
 
853
        vf = factory('id', transport, 0777, create=True, access_mode='w')
 
854
        vf = factory('id', transport, access_mode='r')
758
855
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
759
856
        self.assertRaises(errors.ReadOnlyError,
760
857
                          vf.add_lines_with_ghosts,
782
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
783
880
 
784
881
    def get_file(self, name='foo'):
785
 
        return WeaveFile(name, self.get_transport(),
786
 
                         create=True,
787
 
                         get_scope=self.get_transaction)
 
882
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
 
883
            get_scope=self.get_transaction)
788
884
 
789
885
    def get_file_corrupted_text(self):
790
 
        w = WeaveFile('foo', self.get_transport(),
791
 
                      create=True,
792
 
                      get_scope=self.get_transaction)
 
886
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
887
            get_scope=self.get_transaction)
793
888
        w.add_lines('v1', [], ['hello\n'])
794
889
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
795
890
 
823
918
        return w
824
919
 
825
920
    def reopen_file(self, name='foo', create=False):
826
 
        return WeaveFile(name, self.get_transport(),
827
 
                         create=create,
828
 
                         get_scope=self.get_transaction)
 
921
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
 
922
            get_scope=self.get_transaction)
829
923
 
830
924
    def test_no_implicit_create(self):
831
925
        self.assertRaises(errors.NoSuchFile,
832
926
                          WeaveFile,
833
927
                          'foo',
834
 
                          self.get_transport(),
 
928
                          get_transport(self.get_url('.')),
835
929
                          get_scope=self.get_transaction)
836
930
 
837
931
    def get_factory(self):
904
998
        # we should be able to read from http with a versioned file.
905
999
        vf = self.get_file()
906
1000
        # try an empty file access
907
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
908
 
                self.get_readonly_url('.')))
 
1001
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
909
1002
        self.assertEqual([], readonly_vf.versions())
910
 
 
911
 
    def test_readonly_http_works_with_feeling(self):
912
 
        # we should be able to read from http with a versioned file.
913
 
        vf = self.get_file()
914
1003
        # now with feeling.
915
1004
        vf.add_lines('1', [], ['a\n'])
916
1005
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
917
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
918
 
                self.get_readonly_url('.')))
 
1006
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
919
1007
        self.assertEqual(['1', '2'], vf.versions())
920
 
        self.assertEqual(['1', '2'], readonly_vf.versions())
921
1008
        for version in readonly_vf.versions():
922
1009
            readonly_vf.get_lines(version)
923
1010
 
925
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
926
1013
 
927
1014
    def get_file(self):
928
 
        return WeaveFile('foo', self.get_transport(),
929
 
                         create=True,
930
 
                         get_scope=self.get_transaction)
 
1015
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
1016
            get_scope=self.get_transaction)
931
1017
 
932
1018
    def get_factory(self):
933
1019
        return WeaveFile
1177
1263
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1178
1264
 
1179
1265
    def get_file(self, name='foo'):
1180
 
        return WeaveFile(name, self.get_transport(),
1181
 
                         create=True)
 
1266
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1182
1267
 
1183
1268
    def log_contents(self, w):
1184
1269
        self.log('weave is:')
1377
1462
class TestVersionedFiles(TestCaseWithMemoryTransport):
1378
1463
    """Tests for the multiple-file variant of VersionedFile."""
1379
1464
 
1380
 
    # We want to be sure of behaviour for:
1381
 
    # weaves prefix layout (weave texts)
1382
 
    # individually named weaves (weave inventories)
1383
 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
1384
 
    #                   as it is the most complex mapper.
1385
 
    # individually named knits
1386
 
    # individual no-graph knits in packs (signatures)
1387
 
    # individual graph knits in packs (inventories)
1388
 
    # individual graph nocompression knits in packs (revisions)
1389
 
    # plain text knits in packs (texts)
1390
 
    len_one_scenarios = [
1391
 
        ('weave-named', {
1392
 
            'cleanup':None,
1393
 
            'factory':make_versioned_files_factory(WeaveFile,
1394
 
                ConstantMapper('inventory')),
1395
 
            'graph':True,
1396
 
            'key_length':1,
1397
 
            'support_partial_insertion': False,
1398
 
            }),
1399
 
        ('named-knit', {
1400
 
            'cleanup':None,
1401
 
            'factory':make_file_factory(False, ConstantMapper('revisions')),
1402
 
            'graph':True,
1403
 
            'key_length':1,
1404
 
            'support_partial_insertion': False,
1405
 
            }),
1406
 
        ('named-nograph-nodelta-knit-pack', {
1407
 
            'cleanup':cleanup_pack_knit,
1408
 
            'factory':make_pack_factory(False, False, 1),
1409
 
            'graph':False,
1410
 
            'key_length':1,
1411
 
            'support_partial_insertion': False,
1412
 
            }),
1413
 
        ('named-graph-knit-pack', {
1414
 
            'cleanup':cleanup_pack_knit,
1415
 
            'factory':make_pack_factory(True, True, 1),
1416
 
            'graph':True,
1417
 
            'key_length':1,
1418
 
            'support_partial_insertion': True,
1419
 
            }),
1420
 
        ('named-graph-nodelta-knit-pack', {
1421
 
            'cleanup':cleanup_pack_knit,
1422
 
            'factory':make_pack_factory(True, False, 1),
1423
 
            'graph':True,
1424
 
            'key_length':1,
1425
 
            'support_partial_insertion': False,
1426
 
            }),
1427
 
        ('groupcompress-nograph', {
1428
 
            'cleanup':groupcompress.cleanup_pack_group,
1429
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
1430
 
            'graph': False,
1431
 
            'key_length':1,
1432
 
            'support_partial_insertion':False,
1433
 
            }),
1434
 
        ]
1435
 
    len_two_scenarios = [
1436
 
        ('weave-prefix', {
1437
 
            'cleanup':None,
1438
 
            'factory':make_versioned_files_factory(WeaveFile,
1439
 
                PrefixMapper()),
1440
 
            'graph':True,
1441
 
            'key_length':2,
1442
 
            'support_partial_insertion': False,
1443
 
            }),
1444
 
        ('annotated-knit-escape', {
1445
 
            'cleanup':None,
1446
 
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1447
 
            'graph':True,
1448
 
            'key_length':2,
1449
 
            'support_partial_insertion': False,
1450
 
            }),
1451
 
        ('plain-knit-pack', {
1452
 
            'cleanup':cleanup_pack_knit,
1453
 
            'factory':make_pack_factory(True, True, 2),
1454
 
            'graph':True,
1455
 
            'key_length':2,
1456
 
            'support_partial_insertion': True,
1457
 
            }),
1458
 
        ('groupcompress', {
1459
 
            'cleanup':groupcompress.cleanup_pack_group,
1460
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
1461
 
            'graph': True,
1462
 
            'key_length':1,
1463
 
            'support_partial_insertion':False,
1464
 
            }),
1465
 
        ]
1466
 
 
1467
 
    scenarios = len_one_scenarios + len_two_scenarios
1468
 
 
1469
1465
    def get_versionedfiles(self, relpath='files'):
1470
1466
        transport = self.get_transport(relpath)
1471
1467
        if relpath != '.':
1472
1468
            transport.mkdir('.')
1473
1469
        files = self.factory(transport)
1474
1470
        if self.cleanup is not None:
1475
 
            self.addCleanup(self.cleanup, files)
 
1471
            self.addCleanup(lambda:self.cleanup(files))
1476
1472
        return files
1477
1473
 
1478
 
    def get_simple_key(self, suffix):
1479
 
        """Return a key for the object under test."""
1480
 
        if self.key_length == 1:
1481
 
            return (suffix,)
1482
 
        else:
1483
 
            return ('FileA',) + (suffix,)
1484
 
 
1485
 
    def test_add_fallback_implies_without_fallbacks(self):
1486
 
        f = self.get_versionedfiles('files')
1487
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1488
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1489
 
                                    % (f.__class__.__name__,))
1490
 
        g = self.get_versionedfiles('fallback')
1491
 
        key_a = self.get_simple_key('a')
1492
 
        g.add_lines(key_a, [], ['\n'])
1493
 
        f.add_fallback_versioned_files(g)
1494
 
        self.assertTrue(key_a in f.get_parent_map([key_a]))
1495
 
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1496
 
 
1497
 
    def test_add_lines(self):
1498
 
        f = self.get_versionedfiles()
1499
 
        key0 = self.get_simple_key('r0')
1500
 
        key1 = self.get_simple_key('r1')
1501
 
        key2 = self.get_simple_key('r2')
1502
 
        keyf = self.get_simple_key('foo')
1503
 
        f.add_lines(key0, [], ['a\n', 'b\n'])
1504
 
        if self.graph:
1505
 
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
1506
 
        else:
1507
 
            f.add_lines(key1, [], ['b\n', 'c\n'])
1508
 
        keys = f.keys()
1509
 
        self.assertTrue(key0 in keys)
1510
 
        self.assertTrue(key1 in keys)
1511
 
        records = []
1512
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1513
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1514
 
        records.sort()
1515
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1516
 
 
1517
 
    def test__add_text(self):
1518
 
        f = self.get_versionedfiles()
1519
 
        key0 = self.get_simple_key('r0')
1520
 
        key1 = self.get_simple_key('r1')
1521
 
        key2 = self.get_simple_key('r2')
1522
 
        keyf = self.get_simple_key('foo')
1523
 
        f._add_text(key0, [], 'a\nb\n')
1524
 
        if self.graph:
1525
 
            f._add_text(key1, [key0], 'b\nc\n')
1526
 
        else:
1527
 
            f._add_text(key1, [], 'b\nc\n')
1528
 
        keys = f.keys()
1529
 
        self.assertTrue(key0 in keys)
1530
 
        self.assertTrue(key1 in keys)
1531
 
        records = []
1532
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1533
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1534
 
        records.sort()
1535
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1536
 
 
1537
1474
    def test_annotate(self):
1538
1475
        files = self.get_versionedfiles()
1539
1476
        self.get_diamond_files(files)
1573
1510
        self.assertRaises(RevisionNotPresent,
1574
1511
            files.annotate, prefix + ('missing-key',))
1575
1512
 
1576
 
    def test_check_no_parameters(self):
1577
 
        files = self.get_versionedfiles()
1578
 
 
1579
 
    def test_check_progressbar_parameter(self):
1580
 
        """A progress bar can be supplied because check can be a generator."""
1581
 
        pb = ui.ui_factory.nested_progress_bar()
1582
 
        self.addCleanup(pb.finished)
1583
 
        files = self.get_versionedfiles()
1584
 
        files.check(progress_bar=pb)
1585
 
 
1586
 
    def test_check_with_keys_becomes_generator(self):
1587
 
        files = self.get_versionedfiles()
1588
 
        self.get_diamond_files(files)
1589
 
        keys = files.keys()
1590
 
        entries = files.check(keys=keys)
1591
 
        seen = set()
1592
 
        # Texts output should be fulltexts.
1593
 
        self.capture_stream(files, entries, seen.add,
1594
 
            files.get_parent_map(keys), require_fulltext=True)
1595
 
        # All texts should be output.
1596
 
        self.assertEqual(set(keys), seen)
1597
 
 
1598
 
    def test_clear_cache(self):
1599
 
        files = self.get_versionedfiles()
1600
 
        files.clear_cache()
1601
 
 
1602
1513
    def test_construct(self):
1603
1514
        """Each parameterised test can be constructed on a transport."""
1604
1515
        files = self.get_versionedfiles()
1609
1520
            trailing_eol=trailing_eol, nograph=not self.graph,
1610
1521
            left_only=left_only, nokeys=nokeys)
1611
1522
 
1612
 
    def _add_content_nostoresha(self, add_lines):
 
1523
    def test_add_lines_nostoresha(self):
1613
1524
        """When nostore_sha is supplied using old content raises."""
1614
1525
        vf = self.get_versionedfiles()
1615
1526
        empty_text = ('a', [])
1617
1528
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1618
1529
        shas = []
1619
1530
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1620
 
            if add_lines:
1621
 
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1622
 
                                         lines)
1623
 
            else:
1624
 
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1625
 
                                         ''.join(lines))
 
1531
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1626
1532
            shas.append(sha)
1627
1533
        # we now have a copy of all the lines in the vf.
1628
1534
        for sha, (version, lines) in zip(
1631
1537
            self.assertRaises(errors.ExistingContent,
1632
1538
                vf.add_lines, new_key, [], lines,
1633
1539
                nostore_sha=sha)
1634
 
            self.assertRaises(errors.ExistingContent,
1635
 
                vf._add_text, new_key, [], ''.join(lines),
1636
 
                nostore_sha=sha)
1637
1540
            # and no new version should have been added.
1638
1541
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1639
1542
            self.assertEqual('absent', record.storage_kind)
1640
1543
 
1641
 
    def test_add_lines_nostoresha(self):
1642
 
        self._add_content_nostoresha(add_lines=True)
1643
 
 
1644
 
    def test__add_text_nostoresha(self):
1645
 
        self._add_content_nostoresha(add_lines=False)
1646
 
 
1647
1544
    def test_add_lines_return(self):
1648
1545
        files = self.get_versionedfiles()
1649
1546
        # save code by using the stock data insertion helper.
1756
1653
            f.get_record_stream([key_b], 'unordered', True
1757
1654
                ).next().get_bytes_as('fulltext'))
1758
1655
 
1759
 
    def test_get_known_graph_ancestry(self):
1760
 
        f = self.get_versionedfiles()
1761
 
        if not self.graph:
1762
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1763
 
        key_a = self.get_simple_key('a')
1764
 
        key_b = self.get_simple_key('b')
1765
 
        key_c = self.get_simple_key('c')
1766
 
        # A
1767
 
        # |\
1768
 
        # | B
1769
 
        # |/
1770
 
        # C
1771
 
        f.add_lines(key_a, [], ['\n'])
1772
 
        f.add_lines(key_b, [key_a], ['\n'])
1773
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1774
 
        kg = f.get_known_graph_ancestry([key_c])
1775
 
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1776
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1777
 
 
1778
 
    def test_known_graph_with_fallbacks(self):
1779
 
        f = self.get_versionedfiles('files')
1780
 
        if not self.graph:
1781
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1782
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1783
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1784
 
                                    % (f.__class__.__name__,))
1785
 
        key_a = self.get_simple_key('a')
1786
 
        key_b = self.get_simple_key('b')
1787
 
        key_c = self.get_simple_key('c')
1788
 
        # A     only in fallback
1789
 
        # |\
1790
 
        # | B
1791
 
        # |/
1792
 
        # C
1793
 
        g = self.get_versionedfiles('fallback')
1794
 
        g.add_lines(key_a, [], ['\n'])
1795
 
        f.add_fallback_versioned_files(g)
1796
 
        f.add_lines(key_b, [key_a], ['\n'])
1797
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1798
 
        kg = f.get_known_graph_ancestry([key_c])
1799
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1800
 
 
1801
1656
    def test_get_record_stream_empty(self):
1802
1657
        """An empty stream can be requested without error."""
1803
1658
        f = self.get_versionedfiles()
1814
1669
             'knit-delta-closure', 'knit-delta-closure-ref',
1815
1670
             'groupcompress-block', 'groupcompress-block-ref'])
1816
1671
 
1817
 
    def capture_stream(self, f, entries, on_seen, parents,
1818
 
        require_fulltext=False):
 
1672
    def capture_stream(self, f, entries, on_seen, parents):
1819
1673
        """Capture a stream for testing."""
1820
1674
        for factory in entries:
1821
1675
            on_seen(factory.key)
1826
1680
            self.assertEqual(parents[factory.key], factory.parents)
1827
1681
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1828
1682
                str)
1829
 
            if require_fulltext:
1830
 
                factory.get_bytes_as('fulltext')
1831
1683
 
1832
1684
    def test_get_record_stream_interface(self):
1833
1685
        """each item in a stream has to provide a regular interface."""
1840
1692
        self.capture_stream(files, entries, seen.add, parent_map)
1841
1693
        self.assertEqual(set(keys), seen)
1842
1694
 
 
1695
    def get_simple_key(self, suffix):
 
1696
        """Return a key for the object under test."""
 
1697
        if self.key_length == 1:
 
1698
            return (suffix,)
 
1699
        else:
 
1700
            return ('FileA',) + (suffix,)
 
1701
 
1843
1702
    def get_keys_and_sort_order(self):
1844
1703
        """Get diamond test keys list, and their sort ordering."""
1845
1704
        if self.key_length == 1:
2208
2067
        else:
2209
2068
            return None
2210
2069
 
2211
 
    def test_get_annotator(self):
2212
 
        files = self.get_versionedfiles()
2213
 
        self.get_diamond_files(files)
2214
 
        origin_key = self.get_simple_key('origin')
2215
 
        base_key = self.get_simple_key('base')
2216
 
        left_key = self.get_simple_key('left')
2217
 
        right_key = self.get_simple_key('right')
2218
 
        merged_key = self.get_simple_key('merged')
2219
 
        # annotator = files.get_annotator()
2220
 
        # introduced full text
2221
 
        origins, lines = files.get_annotator().annotate(origin_key)
2222
 
        self.assertEqual([(origin_key,)], origins)
2223
 
        self.assertEqual(['origin\n'], lines)
2224
 
        # a delta
2225
 
        origins, lines = files.get_annotator().annotate(base_key)
2226
 
        self.assertEqual([(base_key,)], origins)
2227
 
        # a merge
2228
 
        origins, lines = files.get_annotator().annotate(merged_key)
2229
 
        if self.graph:
2230
 
            self.assertEqual([
2231
 
                (base_key,),
2232
 
                (left_key,),
2233
 
                (right_key,),
2234
 
                (merged_key,),
2235
 
                ], origins)
2236
 
        else:
2237
 
            # Without a graph everything is new.
2238
 
            self.assertEqual([
2239
 
                (merged_key,),
2240
 
                (merged_key,),
2241
 
                (merged_key,),
2242
 
                (merged_key,),
2243
 
                ], origins)
2244
 
        self.assertRaises(RevisionNotPresent,
2245
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2246
 
 
2247
2070
    def test_get_parent_map(self):
2248
2071
        files = self.get_versionedfiles()
2249
2072
        if self.key_length == 1:
2452
2275
        else:
2453
2276
            self.assertIdenticalVersionedFile(source, files)
2454
2277
 
2455
 
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2456
 
        """An out of order stream can either error or work."""
2457
 
        if not self.graph:
2458
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
2459
 
        # Create a reasonably long chain of records based on each other, where
2460
 
        # most will be deltas.
2461
 
        source = self.get_versionedfiles('source')
2462
 
        parents = ()
2463
 
        keys = []
2464
 
        content = [('same same %d\n' % n) for n in range(500)]
2465
 
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2466
 
            key = ('key-' + letter,)
2467
 
            if self.key_length == 2:
2468
 
                key = ('prefix',) + key
2469
 
            content.append('content for ' + letter + '\n')
2470
 
            source.add_lines(key, parents, content)
2471
 
            keys.append(key)
2472
 
            parents = (key,)
2473
 
        # Create a stream of these records, excluding the first record that the
2474
 
        # rest ultimately depend upon, and insert it into a new vf.
2475
 
        streams = []
2476
 
        for key in reversed(keys):
2477
 
            streams.append(source.get_record_stream([key], 'unordered', False))
2478
 
        deltas = chain(*streams[:-1])
2479
 
        files = self.get_versionedfiles()
2480
 
        try:
2481
 
            files.insert_record_stream(deltas)
2482
 
        except RevisionNotPresent:
2483
 
            # Must not have corrupted the file.
2484
 
            files.check()
2485
 
        else:
2486
 
            # Must only report either just the first key as a missing parent,
2487
 
            # no key as missing (for nodelta scenarios).
2488
 
            missing = set(files.get_missing_compression_parent_keys())
2489
 
            missing.discard(keys[0])
2490
 
            self.assertEqual(set(), missing)
2491
 
 
2492
2278
    def get_knit_delta_source(self):
2493
2279
        """Get a source that can produce a stream with knit delta records,
2494
2280
        regardless of this test's scenario.
2562
2348
        # the ordering here is to make a tree so that dumb searches have
2563
2349
        # more changes to muck up.
2564
2350
 
2565
 
        class InstrumentedProgress(progress.ProgressTask):
 
2351
        class InstrumentedProgress(progress.DummyProgress):
2566
2352
 
2567
2353
            def __init__(self):
2568
 
                progress.ProgressTask.__init__(self)
 
2354
 
 
2355
                progress.DummyProgress.__init__(self)
2569
2356
                self.updates = []
2570
2357
 
2571
2358
            def update(self, msg=None, current=None, total=None):
2760
2547
        self.assertRaises(NotImplementedError,
2761
2548
                self.texts.add_mpdiffs, [])
2762
2549
 
2763
 
    def test_check_noerrors(self):
2764
 
        self.texts.check()
 
2550
    def test_check(self):
 
2551
        self.assertTrue(self.texts.check())
2765
2552
 
2766
2553
    def test_insert_record_stream(self):
2767
2554
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,