~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-03-17 07:05:37 UTC
  • mfrom: (4152.1.2 branch.stacked.streams)
  • Revision ID: pqm@pqm.ubuntu.com-20090317070537-zaud24vjs2szna87
(robertc) Add client-side streaming from stacked branches (over
        bzr:// protocols) when the sort order is compatible with doing
        that. (Robert Collins, Andrew Bennetts)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2009 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
15
15
#
16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
19
 
20
20
 
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
24
24
from itertools import chain, izip
25
25
from StringIO import StringIO
26
26
 
 
27
import bzrlib
27
28
from bzrlib import (
28
29
    errors,
29
 
    graph as _mod_graph,
30
 
    groupcompress,
31
 
    knit as _mod_knit,
32
30
    osutils,
33
31
    progress,
34
 
    transport,
35
 
    ui,
36
32
    )
37
33
from bzrlib.errors import (
38
34
                           RevisionNotPresent,
39
35
                           RevisionAlreadyPresent,
40
36
                           WeaveParentMismatch
41
37
                           )
 
38
from bzrlib import knit as _mod_knit
42
39
from bzrlib.knit import (
43
40
    cleanup_pack_knit,
44
41
    make_file_factory,
46
43
    KnitAnnotateFactory,
47
44
    KnitPlainFactory,
48
45
    )
 
46
from bzrlib.symbol_versioning import one_four, one_five
49
47
from bzrlib.tests import (
50
48
    TestCase,
51
49
    TestCaseWithMemoryTransport,
57
55
    )
58
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
59
57
from bzrlib.trace import mutter
 
58
from bzrlib.transport import get_transport
60
59
from bzrlib.transport.memory import MemoryTransport
61
60
from bzrlib.tsort import topo_sort
62
61
from bzrlib.tuned_gzip import GzipFile
123
122
            'key_length':1,
124
123
            'support_partial_insertion': False,
125
124
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
125
        ]
134
126
    len_two_scenarios = [
135
127
        ('weave-prefix', {
154
146
            'key_length':2,
155
147
            'support_partial_insertion': True,
156
148
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
149
        ]
165
150
    scenarios = len_one_scenarios + len_two_scenarios
166
151
    return multiply_tests(to_adapt, scenarios, result)
195
180
 
196
181
 
197
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
198
 
    nograph=False, nokeys=False):
 
183
    nograph=False):
199
184
    """Get a diamond graph to exercise deltas and merges.
200
185
 
201
186
    This creates a 5-node graph in files. If files supports 2-length keys two
208
193
    :param nograph: If True, do not provide parents to the add_lines calls;
209
194
        this is useful for tests that need inserted data but have graphless
210
195
        stores.
211
 
    :param nokeys: If True, pass None is as the key for all insertions.
212
 
        Currently implies nograph.
213
196
    :return: The results of the add_lines calls.
214
197
    """
215
 
    if nokeys:
216
 
        nograph = True
217
198
    if key_length == 1:
218
199
        prefixes = [()]
219
200
    else:
230
211
        else:
231
212
            result = [prefix + suffix for suffix in suffix_list]
232
213
            return result
233
 
    def get_key(suffix):
234
 
        if nokeys:
235
 
            return (None, )
236
 
        else:
237
 
            return (suffix,)
238
214
    # we loop over each key because that spreads the inserts across prefixes,
239
215
    # which is how commit operates.
240
216
    for prefix in prefixes:
241
 
        result.append(files.add_lines(prefix + get_key('origin'), (),
 
217
        result.append(files.add_lines(prefix + ('origin',), (),
242
218
            ['origin' + last_char]))
243
219
    for prefix in prefixes:
244
 
        result.append(files.add_lines(prefix + get_key('base'),
 
220
        result.append(files.add_lines(prefix + ('base',),
245
221
            get_parents([('origin',)]), ['base' + last_char]))
246
222
    for prefix in prefixes:
247
 
        result.append(files.add_lines(prefix + get_key('left'),
 
223
        result.append(files.add_lines(prefix + ('left',),
248
224
            get_parents([('base',)]),
249
225
            ['base\n', 'left' + last_char]))
250
226
    if not left_only:
251
227
        for prefix in prefixes:
252
 
            result.append(files.add_lines(prefix + get_key('right'),
 
228
            result.append(files.add_lines(prefix + ('right',),
253
229
                get_parents([('base',)]),
254
230
                ['base\n', 'right' + last_char]))
255
231
        for prefix in prefixes:
256
 
            result.append(files.add_lines(prefix + get_key('merged'),
 
232
            result.append(files.add_lines(prefix + ('merged',),
257
233
                get_parents([('left',), ('right',)]),
258
234
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
259
235
    return result
734
710
        # the ordering here is to make a tree so that dumb searches have
735
711
        # more changes to muck up.
736
712
 
737
 
        class InstrumentedProgress(progress.ProgressTask):
 
713
        class InstrumentedProgress(progress.DummyProgress):
738
714
 
739
715
            def __init__(self):
740
 
                progress.ProgressTask.__init__(self)
 
716
 
 
717
                progress.DummyProgress.__init__(self)
741
718
                self.updates = []
742
719
 
743
720
            def update(self, msg=None, current=None, total=None):
849
826
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
827
 
851
828
    def test_readonly_mode(self):
852
 
        t = transport.get_transport(self.get_url('.'))
 
829
        transport = get_transport(self.get_url('.'))
853
830
        factory = self.get_factory()
854
 
        vf = factory('id', t, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', t, access_mode='r')
 
831
        vf = factory('id', transport, 0777, create=True, access_mode='w')
 
832
        vf = factory('id', transport, access_mode='r')
856
833
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
834
        self.assertRaises(errors.ReadOnlyError,
858
835
                          vf.add_lines_with_ghosts,
880
857
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
858
 
882
859
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
884
 
                         create=True,
885
 
                         get_scope=self.get_transaction)
 
860
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
 
861
            get_scope=self.get_transaction)
886
862
 
887
863
    def get_file_corrupted_text(self):
888
 
        w = WeaveFile('foo', transport.get_transport(self.get_url('.')),
889
 
                      create=True,
890
 
                      get_scope=self.get_transaction)
 
864
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
865
            get_scope=self.get_transaction)
891
866
        w.add_lines('v1', [], ['hello\n'])
892
867
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
893
868
 
921
896
        return w
922
897
 
923
898
    def reopen_file(self, name='foo', create=False):
924
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
925
 
                         create=create,
926
 
                         get_scope=self.get_transaction)
 
899
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
 
900
            get_scope=self.get_transaction)
927
901
 
928
902
    def test_no_implicit_create(self):
929
903
        self.assertRaises(errors.NoSuchFile,
930
904
                          WeaveFile,
931
905
                          'foo',
932
 
                          transport.get_transport(self.get_url('.')),
 
906
                          get_transport(self.get_url('.')),
933
907
                          get_scope=self.get_transaction)
934
908
 
935
909
    def get_factory(self):
1002
976
        # we should be able to read from http with a versioned file.
1003
977
        vf = self.get_file()
1004
978
        # try an empty file access
1005
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
1006
 
                self.get_readonly_url('.')))
 
979
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
980
        self.assertEqual([], readonly_vf.versions())
1008
 
 
1009
 
    def test_readonly_http_works_with_feeling(self):
1010
 
        # we should be able to read from http with a versioned file.
1011
 
        vf = self.get_file()
1012
981
        # now with feeling.
1013
982
        vf.add_lines('1', [], ['a\n'])
1014
983
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1015
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
1016
 
                self.get_readonly_url('.')))
 
984
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1017
985
        self.assertEqual(['1', '2'], vf.versions())
1018
 
        self.assertEqual(['1', '2'], readonly_vf.versions())
1019
986
        for version in readonly_vf.versions():
1020
987
            readonly_vf.get_lines(version)
1021
988
 
1023
990
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1024
991
 
1025
992
    def get_file(self):
1026
 
        return WeaveFile('foo', transport.get_transport(self.get_url('.')),
1027
 
                         create=True,
1028
 
                         get_scope=self.get_transaction)
 
993
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
994
            get_scope=self.get_transaction)
1029
995
 
1030
996
    def get_factory(self):
1031
997
        return WeaveFile
1164
1130
            """
1165
1131
        result = """\
1166
1132
            line 1
1167
 
<<<<<<<\x20
1168
 
            line 2
1169
 
=======
1170
 
>>>>>>>\x20
1171
1133
            """
1172
1134
        self._test_merge_from_strings(base, a, b, result)
1173
1135
 
1275
1237
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1276
1238
 
1277
1239
    def get_file(self, name='foo'):
1278
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
1279
 
                         create=True)
 
1240
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1280
1241
 
1281
1242
    def log_contents(self, w):
1282
1243
        self.log('weave is:')
1481
1442
            transport.mkdir('.')
1482
1443
        files = self.factory(transport)
1483
1444
        if self.cleanup is not None:
1484
 
            self.addCleanup(self.cleanup, files)
 
1445
            self.addCleanup(lambda:self.cleanup(files))
1485
1446
        return files
1486
1447
 
1487
 
    def get_simple_key(self, suffix):
1488
 
        """Return a key for the object under test."""
1489
 
        if self.key_length == 1:
1490
 
            return (suffix,)
1491
 
        else:
1492
 
            return ('FileA',) + (suffix,)
1493
 
 
1494
 
    def test_add_lines(self):
1495
 
        f = self.get_versionedfiles()
1496
 
        key0 = self.get_simple_key('r0')
1497
 
        key1 = self.get_simple_key('r1')
1498
 
        key2 = self.get_simple_key('r2')
1499
 
        keyf = self.get_simple_key('foo')
1500
 
        f.add_lines(key0, [], ['a\n', 'b\n'])
1501
 
        if self.graph:
1502
 
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
1503
 
        else:
1504
 
            f.add_lines(key1, [], ['b\n', 'c\n'])
1505
 
        keys = f.keys()
1506
 
        self.assertTrue(key0 in keys)
1507
 
        self.assertTrue(key1 in keys)
1508
 
        records = []
1509
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1510
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1511
 
        records.sort()
1512
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1513
 
 
1514
 
    def test__add_text(self):
1515
 
        f = self.get_versionedfiles()
1516
 
        key0 = self.get_simple_key('r0')
1517
 
        key1 = self.get_simple_key('r1')
1518
 
        key2 = self.get_simple_key('r2')
1519
 
        keyf = self.get_simple_key('foo')
1520
 
        f._add_text(key0, [], 'a\nb\n')
1521
 
        if self.graph:
1522
 
            f._add_text(key1, [key0], 'b\nc\n')
1523
 
        else:
1524
 
            f._add_text(key1, [], 'b\nc\n')
1525
 
        keys = f.keys()
1526
 
        self.assertTrue(key0 in keys)
1527
 
        self.assertTrue(key1 in keys)
1528
 
        records = []
1529
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1530
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1531
 
        records.sort()
1532
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1533
 
 
1534
1448
    def test_annotate(self):
1535
1449
        files = self.get_versionedfiles()
1536
1450
        self.get_diamond_files(files)
1570
1484
        self.assertRaises(RevisionNotPresent,
1571
1485
            files.annotate, prefix + ('missing-key',))
1572
1486
 
1573
 
    def test_check_no_parameters(self):
1574
 
        files = self.get_versionedfiles()
1575
 
 
1576
 
    def test_check_progressbar_parameter(self):
1577
 
        """A progress bar can be supplied because check can be a generator."""
1578
 
        pb = ui.ui_factory.nested_progress_bar()
1579
 
        self.addCleanup(pb.finished)
1580
 
        files = self.get_versionedfiles()
1581
 
        files.check(progress_bar=pb)
1582
 
 
1583
 
    def test_check_with_keys_becomes_generator(self):
1584
 
        files = self.get_versionedfiles()
1585
 
        self.get_diamond_files(files)
1586
 
        keys = files.keys()
1587
 
        entries = files.check(keys=keys)
1588
 
        seen = set()
1589
 
        # Texts output should be fulltexts.
1590
 
        self.capture_stream(files, entries, seen.add,
1591
 
            files.get_parent_map(keys), require_fulltext=True)
1592
 
        # All texts should be output.
1593
 
        self.assertEqual(set(keys), seen)
1594
 
 
1595
 
    def test_clear_cache(self):
1596
 
        files = self.get_versionedfiles()
1597
 
        files.clear_cache()
1598
 
 
1599
1487
    def test_construct(self):
1600
1488
        """Each parameterised test can be constructed on a transport."""
1601
1489
        files = self.get_versionedfiles()
1602
1490
 
1603
 
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1604
 
        nokeys=False):
 
1491
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1605
1492
        return get_diamond_files(files, self.key_length,
1606
1493
            trailing_eol=trailing_eol, nograph=not self.graph,
1607
 
            left_only=left_only, nokeys=nokeys)
 
1494
            left_only=left_only)
1608
1495
 
1609
 
    def _add_content_nostoresha(self, add_lines):
 
1496
    def test_add_lines_nostoresha(self):
1610
1497
        """When nostore_sha is supplied using old content raises."""
1611
1498
        vf = self.get_versionedfiles()
1612
1499
        empty_text = ('a', [])
1614
1501
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1615
1502
        shas = []
1616
1503
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1617
 
            if add_lines:
1618
 
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1619
 
                                         lines)
1620
 
            else:
1621
 
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1622
 
                                         ''.join(lines))
 
1504
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1623
1505
            shas.append(sha)
1624
1506
        # we now have a copy of all the lines in the vf.
1625
1507
        for sha, (version, lines) in zip(
1628
1510
            self.assertRaises(errors.ExistingContent,
1629
1511
                vf.add_lines, new_key, [], lines,
1630
1512
                nostore_sha=sha)
1631
 
            self.assertRaises(errors.ExistingContent,
1632
 
                vf._add_text, new_key, [], ''.join(lines),
1633
 
                nostore_sha=sha)
1634
1513
            # and no new version should have been added.
1635
1514
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1636
1515
            self.assertEqual('absent', record.storage_kind)
1637
1516
 
1638
 
    def test_add_lines_nostoresha(self):
1639
 
        self._add_content_nostoresha(add_lines=True)
1640
 
 
1641
 
    def test__add_text_nostoresha(self):
1642
 
        self._add_content_nostoresha(add_lines=False)
1643
 
 
1644
1517
    def test_add_lines_return(self):
1645
1518
        files = self.get_versionedfiles()
1646
1519
        # save code by using the stock data insertion helper.
1672
1545
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1673
1546
                results)
1674
1547
 
1675
 
    def test_add_lines_no_key_generates_chk_key(self):
1676
 
        files = self.get_versionedfiles()
1677
 
        # save code by using the stock data insertion helper.
1678
 
        adds = self.get_diamond_files(files, nokeys=True)
1679
 
        results = []
1680
 
        # We can only validate the first 2 elements returned from add_lines.
1681
 
        for add in adds:
1682
 
            self.assertEqual(3, len(add))
1683
 
            results.append(add[:2])
1684
 
        if self.key_length == 1:
1685
 
            self.assertEqual([
1686
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1687
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1688
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1689
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1690
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1691
 
                results)
1692
 
            # Check the added items got CHK keys.
1693
 
            self.assertEqual(set([
1694
 
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1695
 
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1696
 
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1697
 
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1698
 
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1699
 
                ]),
1700
 
                files.keys())
1701
 
        elif self.key_length == 2:
1702
 
            self.assertEqual([
1703
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1704
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1705
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1706
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1707
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1708
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1709
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1710
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1711
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1712
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1713
 
                results)
1714
 
            # Check the added items got CHK keys.
1715
 
            self.assertEqual(set([
1716
 
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1717
 
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1718
 
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1719
 
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1720
 
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1721
 
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1722
 
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1723
 
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1724
 
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1725
 
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1726
 
                ]),
1727
 
                files.keys())
1728
 
 
1729
1548
    def test_empty_lines(self):
1730
1549
        """Empty files can be stored."""
1731
1550
        f = self.get_versionedfiles()
1753
1572
            f.get_record_stream([key_b], 'unordered', True
1754
1573
                ).next().get_bytes_as('fulltext'))
1755
1574
 
1756
 
    def test_get_known_graph_ancestry(self):
1757
 
        f = self.get_versionedfiles()
1758
 
        if not self.graph:
1759
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1760
 
        key_a = self.get_simple_key('a')
1761
 
        key_b = self.get_simple_key('b')
1762
 
        key_c = self.get_simple_key('c')
1763
 
        # A
1764
 
        # |\
1765
 
        # | B
1766
 
        # |/
1767
 
        # C
1768
 
        f.add_lines(key_a, [], ['\n'])
1769
 
        f.add_lines(key_b, [key_a], ['\n'])
1770
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1771
 
        kg = f.get_known_graph_ancestry([key_c])
1772
 
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1773
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1774
 
 
1775
 
    def test_known_graph_with_fallbacks(self):
1776
 
        f = self.get_versionedfiles('files')
1777
 
        if not self.graph:
1778
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1779
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1780
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1781
 
                                    % (f.__class__.__name__,))
1782
 
        key_a = self.get_simple_key('a')
1783
 
        key_b = self.get_simple_key('b')
1784
 
        key_c = self.get_simple_key('c')
1785
 
        # A     only in fallback
1786
 
        # |\
1787
 
        # | B
1788
 
        # |/
1789
 
        # C
1790
 
        g = self.get_versionedfiles('fallback')
1791
 
        g.add_lines(key_a, [], ['\n'])
1792
 
        f.add_fallback_versioned_files(g)
1793
 
        f.add_lines(key_b, [key_a], ['\n'])
1794
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1795
 
        kg = f.get_known_graph_ancestry([key_c])
1796
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1797
 
 
1798
1575
    def test_get_record_stream_empty(self):
1799
1576
        """An empty stream can be requested without error."""
1800
1577
        f = self.get_versionedfiles()
1808
1585
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1809
1586
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1810
1587
             'knit-delta-gz',
1811
 
             'knit-delta-closure', 'knit-delta-closure-ref',
1812
 
             'groupcompress-block', 'groupcompress-block-ref'])
 
1588
             'knit-delta-closure', 'knit-delta-closure-ref'])
1813
1589
 
1814
 
    def capture_stream(self, f, entries, on_seen, parents,
1815
 
        require_fulltext=False):
 
1590
    def capture_stream(self, f, entries, on_seen, parents):
1816
1591
        """Capture a stream for testing."""
1817
1592
        for factory in entries:
1818
1593
            on_seen(factory.key)
1819
1594
            self.assertValidStorageKind(factory.storage_kind)
1820
 
            if factory.sha1 is not None:
1821
 
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1822
 
                    factory.sha1)
 
1595
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
 
1596
                factory.sha1)
1823
1597
            self.assertEqual(parents[factory.key], factory.parents)
1824
1598
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1825
1599
                str)
1826
 
            if require_fulltext:
1827
 
                factory.get_bytes_as('fulltext')
1828
1600
 
1829
1601
    def test_get_record_stream_interface(self):
1830
1602
        """each item in a stream has to provide a regular interface."""
1837
1609
        self.capture_stream(files, entries, seen.add, parent_map)
1838
1610
        self.assertEqual(set(keys), seen)
1839
1611
 
 
1612
    def get_simple_key(self, suffix):
 
1613
        """Return a key for the object under test."""
 
1614
        if self.key_length == 1:
 
1615
            return (suffix,)
 
1616
        else:
 
1617
            return ('FileA',) + (suffix,)
 
1618
 
1840
1619
    def get_keys_and_sort_order(self):
1841
1620
        """Get diamond test keys list, and their sort ordering."""
1842
1621
        if self.key_length == 1:
1957
1736
        for factory in entries:
1958
1737
            seen.add(factory.key)
1959
1738
            self.assertValidStorageKind(factory.storage_kind)
1960
 
            if factory.sha1 is not None:
1961
 
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1962
 
                                 factory.sha1)
 
1739
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1740
                factory.sha1)
1963
1741
            self.assertEqual(parent_map[factory.key], factory.parents)
1964
1742
            # currently no stream emits mpdiff
1965
1743
            self.assertRaises(errors.UnavailableRepresentation,
2163
1941
                self.assertEqual(None, factory.parents)
2164
1942
            else:
2165
1943
                self.assertValidStorageKind(factory.storage_kind)
2166
 
                if factory.sha1 is not None:
2167
 
                    sha1 = files.get_sha1s([factory.key])[factory.key]
2168
 
                    self.assertEqual(sha1, factory.sha1)
 
1944
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1945
                    factory.sha1)
2169
1946
                self.assertEqual(parents[factory.key], factory.parents)
2170
1947
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2171
1948
                    str)
2205
1982
        else:
2206
1983
            return None
2207
1984
 
2208
 
    def test_get_annotator(self):
2209
 
        files = self.get_versionedfiles()
2210
 
        self.get_diamond_files(files)
2211
 
        origin_key = self.get_simple_key('origin')
2212
 
        base_key = self.get_simple_key('base')
2213
 
        left_key = self.get_simple_key('left')
2214
 
        right_key = self.get_simple_key('right')
2215
 
        merged_key = self.get_simple_key('merged')
2216
 
        # annotator = files.get_annotator()
2217
 
        # introduced full text
2218
 
        origins, lines = files.get_annotator().annotate(origin_key)
2219
 
        self.assertEqual([(origin_key,)], origins)
2220
 
        self.assertEqual(['origin\n'], lines)
2221
 
        # a delta
2222
 
        origins, lines = files.get_annotator().annotate(base_key)
2223
 
        self.assertEqual([(base_key,)], origins)
2224
 
        # a merge
2225
 
        origins, lines = files.get_annotator().annotate(merged_key)
2226
 
        if self.graph:
2227
 
            self.assertEqual([
2228
 
                (base_key,),
2229
 
                (left_key,),
2230
 
                (right_key,),
2231
 
                (merged_key,),
2232
 
                ], origins)
2233
 
        else:
2234
 
            # Without a graph everything is new.
2235
 
            self.assertEqual([
2236
 
                (merged_key,),
2237
 
                (merged_key,),
2238
 
                (merged_key,),
2239
 
                (merged_key,),
2240
 
                ], origins)
2241
 
        self.assertRaises(RevisionNotPresent,
2242
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2243
 
 
2244
1985
    def test_get_parent_map(self):
2245
1986
        files = self.get_versionedfiles()
2246
1987
        if self.key_length == 1:
2449
2190
        else:
2450
2191
            self.assertIdenticalVersionedFile(source, files)
2451
2192
 
2452
 
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2453
 
        """An out of order stream can either error or work."""
2454
 
        if not self.graph:
2455
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
2456
 
        # Create a reasonably long chain of records based on each other, where
2457
 
        # most will be deltas.
2458
 
        source = self.get_versionedfiles('source')
2459
 
        parents = ()
2460
 
        keys = []
2461
 
        content = [('same same %d\n' % n) for n in range(500)]
2462
 
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2463
 
            key = ('key-' + letter,)
2464
 
            if self.key_length == 2:
2465
 
                key = ('prefix',) + key
2466
 
            content.append('content for ' + letter + '\n')
2467
 
            source.add_lines(key, parents, content)
2468
 
            keys.append(key)
2469
 
            parents = (key,)
2470
 
        # Create a stream of these records, excluding the first record that the
2471
 
        # rest ultimately depend upon, and insert it into a new vf.
2472
 
        streams = []
2473
 
        for key in reversed(keys):
2474
 
            streams.append(source.get_record_stream([key], 'unordered', False))
2475
 
        deltas = chain(*streams[:-1])
2476
 
        files = self.get_versionedfiles()
2477
 
        try:
2478
 
            files.insert_record_stream(deltas)
2479
 
        except RevisionNotPresent:
2480
 
            # Must not have corrupted the file.
2481
 
            files.check()
2482
 
        else:
2483
 
            # Must only report either just the first key as a missing parent,
2484
 
            # no key as missing (for nodelta scenarios).
2485
 
            missing = set(files.get_missing_compression_parent_keys())
2486
 
            missing.discard(keys[0])
2487
 
            self.assertEqual(set(), missing)
2488
 
 
2489
2193
    def get_knit_delta_source(self):
2490
2194
        """Get a source that can produce a stream with knit delta records,
2491
2195
        regardless of this test's scenario.
2559
2263
        # the ordering here is to make a tree so that dumb searches have
2560
2264
        # more changes to muck up.
2561
2265
 
2562
 
        class InstrumentedProgress(progress.ProgressTask):
 
2266
        class InstrumentedProgress(progress.DummyProgress):
2563
2267
 
2564
2268
            def __init__(self):
2565
 
                progress.ProgressTask.__init__(self)
 
2269
 
 
2270
                progress.DummyProgress.__init__(self)
2566
2271
                self.updates = []
2567
2272
 
2568
2273
            def update(self, msg=None, current=None, total=None):
2757
2462
        self.assertRaises(NotImplementedError,
2758
2463
                self.texts.add_mpdiffs, [])
2759
2464
 
2760
 
    def test_check_noerrors(self):
2761
 
        self.texts.check()
 
2465
    def test_check(self):
 
2466
        self.assertTrue(self.texts.check())
2762
2467
 
2763
2468
    def test_insert_record_stream(self):
2764
2469
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,