~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_knit.py

  • Committer: Jelmer Vernooij
  • Date: 2009-02-23 20:55:58 UTC
  • mfrom: (4034 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4053.
  • Revision ID: jelmer@samba.org-20090223205558-1cx2k4w1zgs8r5qa
Merge bzr.dev.

Show diffs side-by-side

added added

removed removed

Lines of Context:
42
42
    KnitSequenceMatcher,
43
43
    KnitVersionedFiles,
44
44
    PlainKnitContent,
 
45
    _VFContentMapGenerator,
45
46
    _DirectPackAccess,
46
47
    _KndxIndex,
47
48
    _KnitGraphIndex,
63
64
from bzrlib.versionedfile import (
64
65
    AbsentContentFactory,
65
66
    ConstantMapper,
 
67
    network_bytes_to_kind_and_offset,
66
68
    RecordingVersionedFilesDecorator,
67
69
    )
68
70
 
297
299
        access = self.get_access()
298
300
        memos = access.add_raw_records([('key', 10)], '1234567890')
299
301
        self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
300
 
 
 
302
 
301
303
    def test_add_several_raw_records(self):
302
304
        """add_raw_records with many records and read some back."""
303
305
        access = self.get_access()
1237
1239
            else:
1238
1240
                raise
1239
1241
 
 
1242
    def test_scan_unvalidated_index_not_implemented(self):
 
1243
        transport = MockTransport()
 
1244
        index = self.get_knit_index(transport, 'filename', 'r')
 
1245
        self.assertRaises(
 
1246
            NotImplementedError, index.scan_unvalidated_index,
 
1247
            'dummy graph_index')
 
1248
        self.assertRaises(
 
1249
            NotImplementedError, index.get_missing_compression_parents)
 
1250
 
1240
1251
    def test_short_line(self):
1241
1252
        transport = MockTransport([
1242
1253
            _KndxIndex.HEADER,
1296
1307
class TestBadShaError(KnitTests):
1297
1308
    """Tests for handling of sha errors."""
1298
1309
 
1299
 
    def test_exception_has_text(self):
 
1310
    def test_sha_exception_has_text(self):
1300
1311
        # having the failed text included in the error allows for recovery.
1301
1312
        source = self.make_test_knit()
1302
1313
        target = self.make_test_knit(name="target")
1313
1324
        target.insert_record_stream(
1314
1325
            source.get_record_stream([broken], 'unordered', False))
1315
1326
        err = self.assertRaises(errors.KnitCorrupt,
1316
 
            target.get_record_stream([broken], 'unordered', True).next)
 
1327
            target.get_record_stream([broken], 'unordered', True
 
1328
            ).next().get_bytes_as, 'chunked')
1317
1329
        self.assertEqual(['gam\n', 'bar\n'], err.content)
1318
1330
        # Test for formatting with live data
1319
1331
        self.assertStartsWith(str(err), "Knit ")
1524
1536
            [('parent',)])])
1525
1537
        # but neither should have added data:
1526
1538
        self.assertEqual([[], [], [], []], self.caught_entries)
1527
 
        
 
1539
 
1528
1540
    def test_add_version_different_dup(self):
1529
1541
        index = self.two_graph_index(deltas=True, catch_adds=True)
1530
1542
        # change options
1536
1548
        self.assertRaises(errors.KnitCorrupt, index.add_records,
1537
1549
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1538
1550
        self.assertEqual([], self.caught_entries)
1539
 
        
 
1551
 
1540
1552
    def test_add_versions_nodeltas(self):
1541
1553
        index = self.two_graph_index(catch_adds=True)
1542
1554
        index.add_records([
1584
1596
            [('parent',)])])
1585
1597
        # but neither should have added data.
1586
1598
        self.assertEqual([[], [], [], []], self.caught_entries)
1587
 
        
 
1599
 
1588
1600
    def test_add_versions_different_dup(self):
1589
1601
        index = self.two_graph_index(deltas=True, catch_adds=True)
1590
1602
        # change options
1601
1613
             (('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1602
1614
        self.assertEqual([], self.caught_entries)
1603
1615
 
 
1616
    def make_g_index_missing_compression_parent(self):
 
1617
        graph_index = self.make_g_index('missing_comp', 2,
 
1618
            [(('tip', ), ' 100 78',
 
1619
              ([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
 
1620
        return graph_index
 
1621
 
 
1622
    def make_g_index_no_external_refs(self):
 
1623
        graph_index = self.make_g_index('no_external_refs', 2,
 
1624
            [(('rev', ), ' 100 78',
 
1625
              ([('parent', ), ('ghost', )], []))])
 
1626
        return graph_index
 
1627
 
 
1628
    def test_add_good_unvalidated_index(self):
 
1629
        unvalidated = self.make_g_index_no_external_refs()
 
1630
        combined = CombinedGraphIndex([unvalidated])
 
1631
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
 
1632
        index.scan_unvalidated_index(unvalidated)
 
1633
        self.assertEqual(frozenset(), index.get_missing_compression_parents())
 
1634
 
 
1635
    def test_add_incomplete_unvalidated_index(self):
 
1636
        unvalidated = self.make_g_index_missing_compression_parent()
 
1637
        combined = CombinedGraphIndex([unvalidated])
 
1638
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
 
1639
        index.scan_unvalidated_index(unvalidated)
 
1640
        # This also checks that its only the compression parent that is
 
1641
        # examined, otherwise 'ghost' would also be reported as a missing
 
1642
        # parent.
 
1643
        self.assertEqual(
 
1644
            frozenset([('missing-parent',)]),
 
1645
            index.get_missing_compression_parents())
 
1646
 
 
1647
    def test_add_unvalidated_index_with_present_external_references(self):
 
1648
        index = self.two_graph_index(deltas=True)
 
1649
        # Ugly hack to get at one of the underlying GraphIndex objects that
 
1650
        # two_graph_index built.
 
1651
        unvalidated = index._graph_index._indices[1]
 
1652
        # 'parent' is an external ref of _indices[1] (unvalidated), but is
 
1653
        # present in _indices[0].
 
1654
        index.scan_unvalidated_index(unvalidated)
 
1655
        self.assertEqual(frozenset(), index.get_missing_compression_parents())
 
1656
 
 
1657
    def make_new_missing_parent_g_index(self, name):
 
1658
        missing_parent = name + '-missing-parent'
 
1659
        graph_index = self.make_g_index(name, 2,
 
1660
            [((name + 'tip', ), ' 100 78',
 
1661
              ([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
 
1662
        return graph_index
 
1663
 
 
1664
    def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
 
1665
        g_index_1 = self.make_new_missing_parent_g_index('one')
 
1666
        g_index_2 = self.make_new_missing_parent_g_index('two')
 
1667
        combined = CombinedGraphIndex([g_index_1, g_index_2])
 
1668
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
 
1669
        index.scan_unvalidated_index(g_index_1)
 
1670
        index.scan_unvalidated_index(g_index_2)
 
1671
        self.assertEqual(
 
1672
            frozenset([('one-missing-parent',), ('two-missing-parent',)]),
 
1673
            index.get_missing_compression_parents())
 
1674
 
 
1675
    def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
 
1676
        graph_index_a = self.make_g_index('one', 2,
 
1677
            [(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
 
1678
             (('child-of-two', ), ' 100 78',
 
1679
              ([('parent-two',)], [('parent-two',)]))])
 
1680
        graph_index_b = self.make_g_index('two', 2,
 
1681
            [(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
 
1682
             (('child-of-one', ), ' 100 78',
 
1683
              ([('parent-one',)], [('parent-one',)]))])
 
1684
        combined = CombinedGraphIndex([graph_index_a, graph_index_b])
 
1685
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
 
1686
        index.scan_unvalidated_index(graph_index_a)
 
1687
        index.scan_unvalidated_index(graph_index_b)
 
1688
        self.assertEqual(
 
1689
            frozenset([]), index.get_missing_compression_parents())
 
1690
 
1604
1691
 
1605
1692
class TestNoParentsGraphIndexKnit(KnitTests):
1606
1693
    """Tests for knits using _KnitGraphIndex with no parents."""
1614
1701
        size = trans.put_file(name, stream)
1615
1702
        return GraphIndex(trans, name, size)
1616
1703
 
 
1704
    def test_add_good_unvalidated_index(self):
 
1705
        unvalidated = self.make_g_index('unvalidated')
 
1706
        combined = CombinedGraphIndex([unvalidated])
 
1707
        index = _KnitGraphIndex(combined, lambda: True, parents=False)
 
1708
        index.scan_unvalidated_index(unvalidated)
 
1709
        self.assertEqual(frozenset(),
 
1710
            index.get_missing_compression_parents())
 
1711
 
1617
1712
    def test_parents_deltas_incompatible(self):
1618
1713
        index = CombinedGraphIndex([])
1619
1714
        self.assertRaises(errors.KnitError, _KnitGraphIndex, lambda:True,
1700
1795
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
1701
1796
        # but neither should have added data.
1702
1797
        self.assertEqual([[], [], [], []], self.caught_entries)
1703
 
        
 
1798
 
1704
1799
    def test_add_version_different_dup(self):
1705
1800
        index = self.two_graph_index(catch_adds=True)
1706
1801
        # change options
1714
1809
        self.assertRaises(errors.KnitCorrupt, index.add_records,
1715
1810
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1716
1811
        self.assertEqual([], self.caught_entries)
1717
 
        
 
1812
 
1718
1813
    def test_add_versions(self):
1719
1814
        index = self.two_graph_index(catch_adds=True)
1720
1815
        index.add_records([
1752
1847
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
1753
1848
        # but neither should have added data.
1754
1849
        self.assertEqual([[], [], [], []], self.caught_entries)
1755
 
        
 
1850
 
1756
1851
    def test_add_versions_different_dup(self):
1757
1852
        index = self.two_graph_index(catch_adds=True)
1758
1853
        # change options
1838
1933
 
1839
1934
    def test_check(self):
1840
1935
        # At the moment checking a stacked knit does implicitly check the
1841
 
        # fallback files.  
 
1936
        # fallback files.
1842
1937
        basis, test = self.get_basis_and_test_knit()
1843
1938
        test.check()
1844
1939
 
1936
2031
                True).next()
1937
2032
            self.assertEqual(record.key, result[0])
1938
2033
            self.assertEqual(record.sha1, result[1])
1939
 
            self.assertEqual(record.storage_kind, result[2])
 
2034
            # We used to check that the storage kind matched, but actually it
 
2035
            # depends on whether it was sourced from the basis, or in a single
 
2036
            # group, because asking for full texts returns proxy objects to a
 
2037
            # _ContentMapGenerator object; so checking the kind is unneeded.
1940
2038
            self.assertEqual(record.get_bytes_as('fulltext'), result[3])
1941
2039
        # It's not strictly minimal, but it seems reasonable for now for it to
1942
2040
        # ask which fallbacks have which parents.
2079
2177
 
2080
2178
    def test_iter_lines_added_or_present_in_keys(self):
2081
2179
        # Lines from the basis are returned, and lines for a given key are only
2082
 
        # returned once. 
 
2180
        # returned once.
2083
2181
        key1 = ('foo1',)
2084
2182
        key2 = ('foo2',)
2085
2183
        # all sources are asked for keys:
2173
2271
        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2174
2272
        self.assertEqual('unordered', last_call[2])
2175
2273
        self.assertEqual(True, last_call[3])
 
2274
 
 
2275
 
 
2276
class TestNetworkBehaviour(KnitTests):
 
2277
    """Tests for getting data out of/into knits over the network."""
 
2278
 
 
2279
    def test_include_delta_closure_generates_a_knit_delta_closure(self):
 
2280
        vf = self.make_test_knit(name='test')
 
2281
        # put in three texts, giving ft, delta, delta
 
2282
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2283
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2284
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
 
2285
        # But heuristics could interfere, so check what happened:
 
2286
        self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
 
2287
            [record.storage_kind for record in
 
2288
             vf.get_record_stream([('base',), ('d1',), ('d2',)],
 
2289
                'topological', False)])
 
2290
        # generate a stream of just the deltas include_delta_closure=True,
 
2291
        # serialise to the network, and check that we get a delta closure on the wire.
 
2292
        stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
 
2293
        netb = [record.get_bytes_as(record.storage_kind) for record in stream]
 
2294
        # The first bytes should be a memo from _ContentMapGenerator, and the
 
2295
        # second bytes should be empty (because its a API proxy not something
 
2296
        # for wire serialisation.
 
2297
        self.assertEqual('', netb[1])
 
2298
        bytes = netb[0]
 
2299
        kind, line_end = network_bytes_to_kind_and_offset(bytes)
 
2300
        self.assertEqual('knit-delta-closure', kind)
 
2301
 
 
2302
 
 
2303
class TestContentMapGenerator(KnitTests):
 
2304
    """Tests for ContentMapGenerator"""
 
2305
 
 
2306
    def test_get_record_stream_gives_records(self):
 
2307
        vf = self.make_test_knit(name='test')
 
2308
        # put in three texts, giving ft, delta, delta
 
2309
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2310
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2311
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
 
2312
        keys = [('d1',), ('d2',)]
 
2313
        generator = _VFContentMapGenerator(vf, keys,
 
2314
            global_map=vf.get_parent_map(keys))
 
2315
        for record in generator.get_record_stream():
 
2316
            if record.key == ('d1',):
 
2317
                self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
 
2318
            else:
 
2319
                self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
 
2320
 
 
2321
    def test_get_record_stream_kinds_are_raw(self):
 
2322
        vf = self.make_test_knit(name='test')
 
2323
        # put in three texts, giving ft, delta, delta
 
2324
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2325
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2326
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
 
2327
        keys = [('base',), ('d1',), ('d2',)]
 
2328
        generator = _VFContentMapGenerator(vf, keys,
 
2329
            global_map=vf.get_parent_map(keys))
 
2330
        kinds = {('base',): 'knit-delta-closure',
 
2331
            ('d1',): 'knit-delta-closure-ref',
 
2332
            ('d2',): 'knit-delta-closure-ref',
 
2333
            }
 
2334
        for record in generator.get_record_stream():
 
2335
            self.assertEqual(kinds[record.key], record.storage_kind)