~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Robert Collins
  • Date: 2009-03-16 07:44:05 UTC
  • mto: This revision was merged to the branch mainline in revision 4149.
  • Revision ID: robertc@robertcollins.net-20090316074405-t9guvf13rj4mlhuk
More test fallout, but all caught now.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2009 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
15
15
#
16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
19
 
20
20
 
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
24
24
from itertools import chain, izip
25
25
from StringIO import StringIO
26
26
 
 
27
import bzrlib
27
28
from bzrlib import (
28
29
    errors,
29
 
    graph as _mod_graph,
30
 
    groupcompress,
31
 
    knit as _mod_knit,
32
30
    osutils,
33
31
    progress,
34
 
    ui,
35
32
    )
36
33
from bzrlib.errors import (
37
34
                           RevisionNotPresent,
38
35
                           RevisionAlreadyPresent,
39
36
                           WeaveParentMismatch
40
37
                           )
 
38
from bzrlib import knit as _mod_knit
41
39
from bzrlib.knit import (
42
40
    cleanup_pack_knit,
43
41
    make_file_factory,
45
43
    KnitAnnotateFactory,
46
44
    KnitPlainFactory,
47
45
    )
 
46
from bzrlib.symbol_versioning import one_four, one_five
48
47
from bzrlib.tests import (
49
48
    TestCase,
50
49
    TestCaseWithMemoryTransport,
123
122
            'key_length':1,
124
123
            'support_partial_insertion': False,
125
124
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
125
        ]
134
126
    len_two_scenarios = [
135
127
        ('weave-prefix', {
154
146
            'key_length':2,
155
147
            'support_partial_insertion': True,
156
148
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
149
        ]
165
150
    scenarios = len_one_scenarios + len_two_scenarios
166
151
    return multiply_tests(to_adapt, scenarios, result)
195
180
 
196
181
 
197
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
198
 
    nograph=False, nokeys=False):
 
183
    nograph=False):
199
184
    """Get a diamond graph to exercise deltas and merges.
200
185
 
201
186
    This creates a 5-node graph in files. If files supports 2-length keys two
208
193
    :param nograph: If True, do not provide parents to the add_lines calls;
209
194
        this is useful for tests that need inserted data but have graphless
210
195
        stores.
211
 
    :param nokeys: If True, pass None is as the key for all insertions.
212
 
        Currently implies nograph.
213
196
    :return: The results of the add_lines calls.
214
197
    """
215
 
    if nokeys:
216
 
        nograph = True
217
198
    if key_length == 1:
218
199
        prefixes = [()]
219
200
    else:
230
211
        else:
231
212
            result = [prefix + suffix for suffix in suffix_list]
232
213
            return result
233
 
    def get_key(suffix):
234
 
        if nokeys:
235
 
            return (None, )
236
 
        else:
237
 
            return (suffix,)
238
214
    # we loop over each key because that spreads the inserts across prefixes,
239
215
    # which is how commit operates.
240
216
    for prefix in prefixes:
241
 
        result.append(files.add_lines(prefix + get_key('origin'), (),
 
217
        result.append(files.add_lines(prefix + ('origin',), (),
242
218
            ['origin' + last_char]))
243
219
    for prefix in prefixes:
244
 
        result.append(files.add_lines(prefix + get_key('base'),
 
220
        result.append(files.add_lines(prefix + ('base',),
245
221
            get_parents([('origin',)]), ['base' + last_char]))
246
222
    for prefix in prefixes:
247
 
        result.append(files.add_lines(prefix + get_key('left'),
 
223
        result.append(files.add_lines(prefix + ('left',),
248
224
            get_parents([('base',)]),
249
225
            ['base\n', 'left' + last_char]))
250
226
    if not left_only:
251
227
        for prefix in prefixes:
252
 
            result.append(files.add_lines(prefix + get_key('right'),
 
228
            result.append(files.add_lines(prefix + ('right',),
253
229
                get_parents([('base',)]),
254
230
                ['base\n', 'right' + last_char]))
255
231
        for prefix in prefixes:
256
 
            result.append(files.add_lines(prefix + get_key('merged'),
 
232
            result.append(files.add_lines(prefix + ('merged',),
257
233
                get_parents([('left',), ('right',)]),
258
234
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
259
235
    return result
734
710
        # the ordering here is to make a tree so that dumb searches have
735
711
        # more changes to muck up.
736
712
 
737
 
        class InstrumentedProgress(progress.ProgressTask):
 
713
        class InstrumentedProgress(progress.DummyProgress):
738
714
 
739
715
            def __init__(self):
740
 
                progress.ProgressTask.__init__(self)
 
716
 
 
717
                progress.DummyProgress.__init__(self)
741
718
                self.updates = []
742
719
 
743
720
            def update(self, msg=None, current=None, total=None):
1153
1130
            """
1154
1131
        result = """\
1155
1132
            line 1
1156
 
<<<<<<<\x20
1157
 
            line 2
1158
 
=======
1159
 
>>>>>>>\x20
1160
1133
            """
1161
1134
        self._test_merge_from_strings(base, a, b, result)
1162
1135
 
1469
1442
            transport.mkdir('.')
1470
1443
        files = self.factory(transport)
1471
1444
        if self.cleanup is not None:
1472
 
            self.addCleanup(self.cleanup, files)
 
1445
            self.addCleanup(lambda:self.cleanup(files))
1473
1446
        return files
1474
1447
 
1475
 
    def get_simple_key(self, suffix):
1476
 
        """Return a key for the object under test."""
1477
 
        if self.key_length == 1:
1478
 
            return (suffix,)
1479
 
        else:
1480
 
            return ('FileA',) + (suffix,)
1481
 
 
1482
 
    def test_add_lines(self):
1483
 
        f = self.get_versionedfiles()
1484
 
        key0 = self.get_simple_key('r0')
1485
 
        key1 = self.get_simple_key('r1')
1486
 
        key2 = self.get_simple_key('r2')
1487
 
        keyf = self.get_simple_key('foo')
1488
 
        f.add_lines(key0, [], ['a\n', 'b\n'])
1489
 
        if self.graph:
1490
 
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
1491
 
        else:
1492
 
            f.add_lines(key1, [], ['b\n', 'c\n'])
1493
 
        keys = f.keys()
1494
 
        self.assertTrue(key0 in keys)
1495
 
        self.assertTrue(key1 in keys)
1496
 
        records = []
1497
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1499
 
        records.sort()
1500
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1501
 
 
1502
 
    def test__add_text(self):
1503
 
        f = self.get_versionedfiles()
1504
 
        key0 = self.get_simple_key('r0')
1505
 
        key1 = self.get_simple_key('r1')
1506
 
        key2 = self.get_simple_key('r2')
1507
 
        keyf = self.get_simple_key('foo')
1508
 
        f._add_text(key0, [], 'a\nb\n')
1509
 
        if self.graph:
1510
 
            f._add_text(key1, [key0], 'b\nc\n')
1511
 
        else:
1512
 
            f._add_text(key1, [], 'b\nc\n')
1513
 
        keys = f.keys()
1514
 
        self.assertTrue(key0 in keys)
1515
 
        self.assertTrue(key1 in keys)
1516
 
        records = []
1517
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1519
 
        records.sort()
1520
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1521
 
 
1522
1448
    def test_annotate(self):
1523
1449
        files = self.get_versionedfiles()
1524
1450
        self.get_diamond_files(files)
1558
1484
        self.assertRaises(RevisionNotPresent,
1559
1485
            files.annotate, prefix + ('missing-key',))
1560
1486
 
1561
 
    def test_check_no_parameters(self):
1562
 
        files = self.get_versionedfiles()
1563
 
 
1564
 
    def test_check_progressbar_parameter(self):
1565
 
        """A progress bar can be supplied because check can be a generator."""
1566
 
        pb = ui.ui_factory.nested_progress_bar()
1567
 
        self.addCleanup(pb.finished)
1568
 
        files = self.get_versionedfiles()
1569
 
        files.check(progress_bar=pb)
1570
 
 
1571
 
    def test_check_with_keys_becomes_generator(self):
1572
 
        files = self.get_versionedfiles()
1573
 
        self.get_diamond_files(files)
1574
 
        keys = files.keys()
1575
 
        entries = files.check(keys=keys)
1576
 
        seen = set()
1577
 
        # Texts output should be fulltexts.
1578
 
        self.capture_stream(files, entries, seen.add,
1579
 
            files.get_parent_map(keys), require_fulltext=True)
1580
 
        # All texts should be output.
1581
 
        self.assertEqual(set(keys), seen)
1582
 
 
1583
 
    def test_clear_cache(self):
1584
 
        files = self.get_versionedfiles()
1585
 
        files.clear_cache()
1586
 
 
1587
1487
    def test_construct(self):
1588
1488
        """Each parameterised test can be constructed on a transport."""
1589
1489
        files = self.get_versionedfiles()
1590
1490
 
1591
 
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1592
 
        nokeys=False):
 
1491
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1593
1492
        return get_diamond_files(files, self.key_length,
1594
1493
            trailing_eol=trailing_eol, nograph=not self.graph,
1595
 
            left_only=left_only, nokeys=nokeys)
 
1494
            left_only=left_only)
1596
1495
 
1597
 
    def _add_content_nostoresha(self, add_lines):
 
1496
    def test_add_lines_nostoresha(self):
1598
1497
        """When nostore_sha is supplied using old content raises."""
1599
1498
        vf = self.get_versionedfiles()
1600
1499
        empty_text = ('a', [])
1602
1501
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1603
1502
        shas = []
1604
1503
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1605
 
            if add_lines:
1606
 
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1607
 
                                         lines)
1608
 
            else:
1609
 
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1610
 
                                         ''.join(lines))
 
1504
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1611
1505
            shas.append(sha)
1612
1506
        # we now have a copy of all the lines in the vf.
1613
1507
        for sha, (version, lines) in zip(
1616
1510
            self.assertRaises(errors.ExistingContent,
1617
1511
                vf.add_lines, new_key, [], lines,
1618
1512
                nostore_sha=sha)
1619
 
            self.assertRaises(errors.ExistingContent,
1620
 
                vf._add_text, new_key, [], ''.join(lines),
1621
 
                nostore_sha=sha)
1622
1513
            # and no new version should have been added.
1623
1514
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1624
1515
            self.assertEqual('absent', record.storage_kind)
1625
1516
 
1626
 
    def test_add_lines_nostoresha(self):
1627
 
        self._add_content_nostoresha(add_lines=True)
1628
 
 
1629
 
    def test__add_text_nostoresha(self):
1630
 
        self._add_content_nostoresha(add_lines=False)
1631
 
 
1632
1517
    def test_add_lines_return(self):
1633
1518
        files = self.get_versionedfiles()
1634
1519
        # save code by using the stock data insertion helper.
1660
1545
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1661
1546
                results)
1662
1547
 
1663
 
    def test_add_lines_no_key_generates_chk_key(self):
1664
 
        files = self.get_versionedfiles()
1665
 
        # save code by using the stock data insertion helper.
1666
 
        adds = self.get_diamond_files(files, nokeys=True)
1667
 
        results = []
1668
 
        # We can only validate the first 2 elements returned from add_lines.
1669
 
        for add in adds:
1670
 
            self.assertEqual(3, len(add))
1671
 
            results.append(add[:2])
1672
 
        if self.key_length == 1:
1673
 
            self.assertEqual([
1674
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1677
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1678
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
 
                results)
1680
 
            # Check the added items got CHK keys.
1681
 
            self.assertEqual(set([
1682
 
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
 
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
 
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
 
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
 
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1687
 
                ]),
1688
 
                files.keys())
1689
 
        elif self.key_length == 2:
1690
 
            self.assertEqual([
1691
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1700
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1701
 
                results)
1702
 
            # Check the added items got CHK keys.
1703
 
            self.assertEqual(set([
1704
 
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
 
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
 
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1707
 
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1708
 
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1709
 
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1710
 
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1711
 
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
 
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
 
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1714
 
                ]),
1715
 
                files.keys())
1716
 
 
1717
1548
    def test_empty_lines(self):
1718
1549
        """Empty files can be stored."""
1719
1550
        f = self.get_versionedfiles()
1741
1572
            f.get_record_stream([key_b], 'unordered', True
1742
1573
                ).next().get_bytes_as('fulltext'))
1743
1574
 
1744
 
    def test_get_known_graph_ancestry(self):
1745
 
        f = self.get_versionedfiles()
1746
 
        if not self.graph:
1747
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1748
 
        key_a = self.get_simple_key('a')
1749
 
        key_b = self.get_simple_key('b')
1750
 
        key_c = self.get_simple_key('c')
1751
 
        # A
1752
 
        # |\
1753
 
        # | B
1754
 
        # |/
1755
 
        # C
1756
 
        f.add_lines(key_a, [], ['\n'])
1757
 
        f.add_lines(key_b, [key_a], ['\n'])
1758
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1759
 
        kg = f.get_known_graph_ancestry([key_c])
1760
 
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1762
 
 
1763
 
    def test_known_graph_with_fallbacks(self):
1764
 
        f = self.get_versionedfiles('files')
1765
 
        if not self.graph:
1766
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1767
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1768
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1769
 
                                    % (f.__class__.__name__,))
1770
 
        key_a = self.get_simple_key('a')
1771
 
        key_b = self.get_simple_key('b')
1772
 
        key_c = self.get_simple_key('c')
1773
 
        # A     only in fallback
1774
 
        # |\
1775
 
        # | B
1776
 
        # |/
1777
 
        # C
1778
 
        g = self.get_versionedfiles('fallback')
1779
 
        g.add_lines(key_a, [], ['\n'])
1780
 
        f.add_fallback_versioned_files(g)
1781
 
        f.add_lines(key_b, [key_a], ['\n'])
1782
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1783
 
        kg = f.get_known_graph_ancestry([key_c])
1784
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1785
 
 
1786
1575
    def test_get_record_stream_empty(self):
1787
1576
        """An empty stream can be requested without error."""
1788
1577
        f = self.get_versionedfiles()
1796
1585
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1797
1586
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1798
1587
             'knit-delta-gz',
1799
 
             'knit-delta-closure', 'knit-delta-closure-ref',
1800
 
             'groupcompress-block', 'groupcompress-block-ref'])
 
1588
             'knit-delta-closure', 'knit-delta-closure-ref'])
1801
1589
 
1802
 
    def capture_stream(self, f, entries, on_seen, parents,
1803
 
        require_fulltext=False):
 
1590
    def capture_stream(self, f, entries, on_seen, parents):
1804
1591
        """Capture a stream for testing."""
1805
1592
        for factory in entries:
1806
1593
            on_seen(factory.key)
1807
1594
            self.assertValidStorageKind(factory.storage_kind)
1808
 
            if factory.sha1 is not None:
1809
 
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1810
 
                    factory.sha1)
 
1595
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
 
1596
                factory.sha1)
1811
1597
            self.assertEqual(parents[factory.key], factory.parents)
1812
1598
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1813
1599
                str)
1814
 
            if require_fulltext:
1815
 
                factory.get_bytes_as('fulltext')
1816
1600
 
1817
1601
    def test_get_record_stream_interface(self):
1818
1602
        """each item in a stream has to provide a regular interface."""
1825
1609
        self.capture_stream(files, entries, seen.add, parent_map)
1826
1610
        self.assertEqual(set(keys), seen)
1827
1611
 
 
1612
    def get_simple_key(self, suffix):
 
1613
        """Return a key for the object under test."""
 
1614
        if self.key_length == 1:
 
1615
            return (suffix,)
 
1616
        else:
 
1617
            return ('FileA',) + (suffix,)
 
1618
 
1828
1619
    def get_keys_and_sort_order(self):
1829
1620
        """Get diamond test keys list, and their sort ordering."""
1830
1621
        if self.key_length == 1:
1945
1736
        for factory in entries:
1946
1737
            seen.add(factory.key)
1947
1738
            self.assertValidStorageKind(factory.storage_kind)
1948
 
            if factory.sha1 is not None:
1949
 
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1950
 
                                 factory.sha1)
 
1739
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1740
                factory.sha1)
1951
1741
            self.assertEqual(parent_map[factory.key], factory.parents)
1952
1742
            # currently no stream emits mpdiff
1953
1743
            self.assertRaises(errors.UnavailableRepresentation,
2151
1941
                self.assertEqual(None, factory.parents)
2152
1942
            else:
2153
1943
                self.assertValidStorageKind(factory.storage_kind)
2154
 
                if factory.sha1 is not None:
2155
 
                    sha1 = files.get_sha1s([factory.key])[factory.key]
2156
 
                    self.assertEqual(sha1, factory.sha1)
 
1944
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1945
                    factory.sha1)
2157
1946
                self.assertEqual(parents[factory.key], factory.parents)
2158
1947
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2159
1948
                    str)
2193
1982
        else:
2194
1983
            return None
2195
1984
 
2196
 
    def test_get_annotator(self):
2197
 
        files = self.get_versionedfiles()
2198
 
        self.get_diamond_files(files)
2199
 
        origin_key = self.get_simple_key('origin')
2200
 
        base_key = self.get_simple_key('base')
2201
 
        left_key = self.get_simple_key('left')
2202
 
        right_key = self.get_simple_key('right')
2203
 
        merged_key = self.get_simple_key('merged')
2204
 
        # annotator = files.get_annotator()
2205
 
        # introduced full text
2206
 
        origins, lines = files.get_annotator().annotate(origin_key)
2207
 
        self.assertEqual([(origin_key,)], origins)
2208
 
        self.assertEqual(['origin\n'], lines)
2209
 
        # a delta
2210
 
        origins, lines = files.get_annotator().annotate(base_key)
2211
 
        self.assertEqual([(base_key,)], origins)
2212
 
        # a merge
2213
 
        origins, lines = files.get_annotator().annotate(merged_key)
2214
 
        if self.graph:
2215
 
            self.assertEqual([
2216
 
                (base_key,),
2217
 
                (left_key,),
2218
 
                (right_key,),
2219
 
                (merged_key,),
2220
 
                ], origins)
2221
 
        else:
2222
 
            # Without a graph everything is new.
2223
 
            self.assertEqual([
2224
 
                (merged_key,),
2225
 
                (merged_key,),
2226
 
                (merged_key,),
2227
 
                (merged_key,),
2228
 
                ], origins)
2229
 
        self.assertRaises(RevisionNotPresent,
2230
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2231
 
 
2232
1985
    def test_get_parent_map(self):
2233
1986
        files = self.get_versionedfiles()
2234
1987
        if self.key_length == 1:
2437
2190
        else:
2438
2191
            self.assertIdenticalVersionedFile(source, files)
2439
2192
 
2440
 
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2441
 
        """An out of order stream can either error or work."""
2442
 
        if not self.graph:
2443
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
2444
 
        # Create a reasonably long chain of records based on each other, where
2445
 
        # most will be deltas.
2446
 
        source = self.get_versionedfiles('source')
2447
 
        parents = ()
2448
 
        keys = []
2449
 
        content = [('same same %d\n' % n) for n in range(500)]
2450
 
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2451
 
            key = ('key-' + letter,)
2452
 
            if self.key_length == 2:
2453
 
                key = ('prefix',) + key
2454
 
            content.append('content for ' + letter + '\n')
2455
 
            source.add_lines(key, parents, content)
2456
 
            keys.append(key)
2457
 
            parents = (key,)
2458
 
        # Create a stream of these records, excluding the first record that the
2459
 
        # rest ultimately depend upon, and insert it into a new vf.
2460
 
        streams = []
2461
 
        for key in reversed(keys):
2462
 
            streams.append(source.get_record_stream([key], 'unordered', False))
2463
 
        deltas = chain(*streams[:-1])
2464
 
        files = self.get_versionedfiles()
2465
 
        try:
2466
 
            files.insert_record_stream(deltas)
2467
 
        except RevisionNotPresent:
2468
 
            # Must not have corrupted the file.
2469
 
            files.check()
2470
 
        else:
2471
 
            # Must only report either just the first key as a missing parent,
2472
 
            # no key as missing (for nodelta scenarios).
2473
 
            missing = set(files.get_missing_compression_parent_keys())
2474
 
            missing.discard(keys[0])
2475
 
            self.assertEqual(set(), missing)
2476
 
 
2477
2193
    def get_knit_delta_source(self):
2478
2194
        """Get a source that can produce a stream with knit delta records,
2479
2195
        regardless of this test's scenario.
2547
2263
        # the ordering here is to make a tree so that dumb searches have
2548
2264
        # more changes to muck up.
2549
2265
 
2550
 
        class InstrumentedProgress(progress.ProgressTask):
 
2266
        class InstrumentedProgress(progress.DummyProgress):
2551
2267
 
2552
2268
            def __init__(self):
2553
 
                progress.ProgressTask.__init__(self)
 
2269
 
 
2270
                progress.DummyProgress.__init__(self)
2554
2271
                self.updates = []
2555
2272
 
2556
2273
            def update(self, msg=None, current=None, total=None):
2745
2462
        self.assertRaises(NotImplementedError,
2746
2463
                self.texts.add_mpdiffs, [])
2747
2464
 
2748
 
    def test_check_noerrors(self):
2749
 
        self.texts.check()
 
2465
    def test_check(self):
 
2466
        self.assertTrue(self.texts.check())
2750
2467
 
2751
2468
    def test_insert_record_stream(self):
2752
2469
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,