~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/per_versionedfile.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2010-09-01 08:02:42 UTC
  • mfrom: (5390.3.3 faster-revert-593560)
  • Revision ID: pqm@pqm.ubuntu.com-20100901080242-esg62ody4frwmy66
(spiv) Avoid repeatedly calling self.target.all_file_ids() in
 InterTree.iter_changes. (Andrew Bennetts)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
26
26
 
27
27
from bzrlib import (
28
28
    errors,
 
29
    graph as _mod_graph,
29
30
    groupcompress,
30
31
    knit as _mod_knit,
31
32
    osutils,
32
33
    progress,
 
34
    transport,
 
35
    ui,
33
36
    )
34
37
from bzrlib.errors import (
35
38
                           RevisionNotPresent,
54
57
    )
55
58
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
59
from bzrlib.trace import mutter
57
 
from bzrlib.transport import get_transport
58
60
from bzrlib.transport.memory import MemoryTransport
59
61
from bzrlib.tsort import topo_sort
60
62
from bzrlib.tuned_gzip import GzipFile
732
734
        # the ordering here is to make a tree so that dumb searches have
733
735
        # more changes to muck up.
734
736
 
735
 
        class InstrumentedProgress(progress.DummyProgress):
 
737
        class InstrumentedProgress(progress.ProgressTask):
736
738
 
737
739
            def __init__(self):
738
 
 
739
 
                progress.DummyProgress.__init__(self)
 
740
                progress.ProgressTask.__init__(self)
740
741
                self.updates = []
741
742
 
742
743
            def update(self, msg=None, current=None, total=None):
848
849
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
849
850
 
850
851
    def test_readonly_mode(self):
851
 
        transport = get_transport(self.get_url('.'))
 
852
        t = transport.get_transport(self.get_url('.'))
852
853
        factory = self.get_factory()
853
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
854
 
        vf = factory('id', transport, access_mode='r')
 
854
        vf = factory('id', t, 0777, create=True, access_mode='w')
 
855
        vf = factory('id', t, access_mode='r')
855
856
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
857
        self.assertRaises(errors.ReadOnlyError,
857
858
                          vf.add_lines_with_ghosts,
879
880
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
880
881
 
881
882
    def get_file(self, name='foo'):
882
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
 
            get_scope=self.get_transaction)
 
883
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
884
                         create=True,
 
885
                         get_scope=self.get_transaction)
884
886
 
885
887
    def get_file_corrupted_text(self):
886
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
 
            get_scope=self.get_transaction)
 
888
        w = WeaveFile('foo', transport.get_transport(self.get_url('.')),
 
889
                      create=True,
 
890
                      get_scope=self.get_transaction)
888
891
        w.add_lines('v1', [], ['hello\n'])
889
892
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
890
893
 
918
921
        return w
919
922
 
920
923
    def reopen_file(self, name='foo', create=False):
921
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
 
            get_scope=self.get_transaction)
 
924
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
925
                         create=create,
 
926
                         get_scope=self.get_transaction)
923
927
 
924
928
    def test_no_implicit_create(self):
925
929
        self.assertRaises(errors.NoSuchFile,
926
930
                          WeaveFile,
927
931
                          'foo',
928
 
                          get_transport(self.get_url('.')),
 
932
                          transport.get_transport(self.get_url('.')),
929
933
                          get_scope=self.get_transaction)
930
934
 
931
935
    def get_factory(self):
998
1002
        # we should be able to read from http with a versioned file.
999
1003
        vf = self.get_file()
1000
1004
        # try an empty file access
1001
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
1005
        readonly_vf = self.get_factory()('foo', transport.get_transport(
 
1006
                self.get_readonly_url('.')))
1002
1007
        self.assertEqual([], readonly_vf.versions())
 
1008
 
 
1009
    def test_readonly_http_works_with_feeling(self):
 
1010
        # we should be able to read from http with a versioned file.
 
1011
        vf = self.get_file()
1003
1012
        # now with feeling.
1004
1013
        vf.add_lines('1', [], ['a\n'])
1005
1014
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
1015
        readonly_vf = self.get_factory()('foo', transport.get_transport(
 
1016
                self.get_readonly_url('.')))
1007
1017
        self.assertEqual(['1', '2'], vf.versions())
 
1018
        self.assertEqual(['1', '2'], readonly_vf.versions())
1008
1019
        for version in readonly_vf.versions():
1009
1020
            readonly_vf.get_lines(version)
1010
1021
 
1012
1023
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1013
1024
 
1014
1025
    def get_file(self):
1015
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
 
            get_scope=self.get_transaction)
 
1026
        return WeaveFile('foo', transport.get_transport(self.get_url('.')),
 
1027
                         create=True,
 
1028
                         get_scope=self.get_transaction)
1017
1029
 
1018
1030
    def get_factory(self):
1019
1031
        return WeaveFile
1152
1164
            """
1153
1165
        result = """\
1154
1166
            line 1
 
1167
<<<<<<<\x20
 
1168
            line 2
 
1169
=======
 
1170
>>>>>>>\x20
1155
1171
            """
1156
1172
        self._test_merge_from_strings(base, a, b, result)
1157
1173
 
1259
1275
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1260
1276
 
1261
1277
    def get_file(self, name='foo'):
1262
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1278
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
1279
                         create=True)
1263
1280
 
1264
1281
    def log_contents(self, w):
1265
1282
        self.log('weave is:')
1464
1481
            transport.mkdir('.')
1465
1482
        files = self.factory(transport)
1466
1483
        if self.cleanup is not None:
1467
 
            self.addCleanup(lambda:self.cleanup(files))
 
1484
            self.addCleanup(self.cleanup, files)
1468
1485
        return files
1469
1486
 
 
1487
    def get_simple_key(self, suffix):
 
1488
        """Return a key for the object under test."""
 
1489
        if self.key_length == 1:
 
1490
            return (suffix,)
 
1491
        else:
 
1492
            return ('FileA',) + (suffix,)
 
1493
 
 
1494
    def test_add_lines(self):
 
1495
        f = self.get_versionedfiles()
 
1496
        key0 = self.get_simple_key('r0')
 
1497
        key1 = self.get_simple_key('r1')
 
1498
        key2 = self.get_simple_key('r2')
 
1499
        keyf = self.get_simple_key('foo')
 
1500
        f.add_lines(key0, [], ['a\n', 'b\n'])
 
1501
        if self.graph:
 
1502
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
 
1503
        else:
 
1504
            f.add_lines(key1, [], ['b\n', 'c\n'])
 
1505
        keys = f.keys()
 
1506
        self.assertTrue(key0 in keys)
 
1507
        self.assertTrue(key1 in keys)
 
1508
        records = []
 
1509
        for record in f.get_record_stream([key0, key1], 'unordered', True):
 
1510
            records.append((record.key, record.get_bytes_as('fulltext')))
 
1511
        records.sort()
 
1512
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
 
1513
 
 
1514
    def test__add_text(self):
 
1515
        f = self.get_versionedfiles()
 
1516
        key0 = self.get_simple_key('r0')
 
1517
        key1 = self.get_simple_key('r1')
 
1518
        key2 = self.get_simple_key('r2')
 
1519
        keyf = self.get_simple_key('foo')
 
1520
        f._add_text(key0, [], 'a\nb\n')
 
1521
        if self.graph:
 
1522
            f._add_text(key1, [key0], 'b\nc\n')
 
1523
        else:
 
1524
            f._add_text(key1, [], 'b\nc\n')
 
1525
        keys = f.keys()
 
1526
        self.assertTrue(key0 in keys)
 
1527
        self.assertTrue(key1 in keys)
 
1528
        records = []
 
1529
        for record in f.get_record_stream([key0, key1], 'unordered', True):
 
1530
            records.append((record.key, record.get_bytes_as('fulltext')))
 
1531
        records.sort()
 
1532
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
 
1533
 
1470
1534
    def test_annotate(self):
1471
1535
        files = self.get_versionedfiles()
1472
1536
        self.get_diamond_files(files)
1506
1570
        self.assertRaises(RevisionNotPresent,
1507
1571
            files.annotate, prefix + ('missing-key',))
1508
1572
 
 
1573
    def test_check_no_parameters(self):
 
1574
        files = self.get_versionedfiles()
 
1575
 
 
1576
    def test_check_progressbar_parameter(self):
 
1577
        """A progress bar can be supplied because check can be a generator."""
 
1578
        pb = ui.ui_factory.nested_progress_bar()
 
1579
        self.addCleanup(pb.finished)
 
1580
        files = self.get_versionedfiles()
 
1581
        files.check(progress_bar=pb)
 
1582
 
 
1583
    def test_check_with_keys_becomes_generator(self):
 
1584
        files = self.get_versionedfiles()
 
1585
        self.get_diamond_files(files)
 
1586
        keys = files.keys()
 
1587
        entries = files.check(keys=keys)
 
1588
        seen = set()
 
1589
        # Texts output should be fulltexts.
 
1590
        self.capture_stream(files, entries, seen.add,
 
1591
            files.get_parent_map(keys), require_fulltext=True)
 
1592
        # All texts should be output.
 
1593
        self.assertEqual(set(keys), seen)
 
1594
 
 
1595
    def test_clear_cache(self):
 
1596
        files = self.get_versionedfiles()
 
1597
        files.clear_cache()
 
1598
 
1509
1599
    def test_construct(self):
1510
1600
        """Each parameterised test can be constructed on a transport."""
1511
1601
        files = self.get_versionedfiles()
1516
1606
            trailing_eol=trailing_eol, nograph=not self.graph,
1517
1607
            left_only=left_only, nokeys=nokeys)
1518
1608
 
1519
 
    def test_add_lines_nostoresha(self):
 
1609
    def _add_content_nostoresha(self, add_lines):
1520
1610
        """When nostore_sha is supplied using old content raises."""
1521
1611
        vf = self.get_versionedfiles()
1522
1612
        empty_text = ('a', [])
1524
1614
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1525
1615
        shas = []
1526
1616
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1527
 
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
 
1617
            if add_lines:
 
1618
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
 
1619
                                         lines)
 
1620
            else:
 
1621
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
 
1622
                                         ''.join(lines))
1528
1623
            shas.append(sha)
1529
1624
        # we now have a copy of all the lines in the vf.
1530
1625
        for sha, (version, lines) in zip(
1533
1628
            self.assertRaises(errors.ExistingContent,
1534
1629
                vf.add_lines, new_key, [], lines,
1535
1630
                nostore_sha=sha)
 
1631
            self.assertRaises(errors.ExistingContent,
 
1632
                vf._add_text, new_key, [], ''.join(lines),
 
1633
                nostore_sha=sha)
1536
1634
            # and no new version should have been added.
1537
1635
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1538
1636
            self.assertEqual('absent', record.storage_kind)
1539
1637
 
 
1638
    def test_add_lines_nostoresha(self):
 
1639
        self._add_content_nostoresha(add_lines=True)
 
1640
 
 
1641
    def test__add_text_nostoresha(self):
 
1642
        self._add_content_nostoresha(add_lines=False)
 
1643
 
1540
1644
    def test_add_lines_return(self):
1541
1645
        files = self.get_versionedfiles()
1542
1646
        # save code by using the stock data insertion helper.
1649
1753
            f.get_record_stream([key_b], 'unordered', True
1650
1754
                ).next().get_bytes_as('fulltext'))
1651
1755
 
 
1756
    def test_get_known_graph_ancestry(self):
 
1757
        f = self.get_versionedfiles()
 
1758
        if not self.graph:
 
1759
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1760
        key_a = self.get_simple_key('a')
 
1761
        key_b = self.get_simple_key('b')
 
1762
        key_c = self.get_simple_key('c')
 
1763
        # A
 
1764
        # |\
 
1765
        # | B
 
1766
        # |/
 
1767
        # C
 
1768
        f.add_lines(key_a, [], ['\n'])
 
1769
        f.add_lines(key_b, [key_a], ['\n'])
 
1770
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1771
        kg = f.get_known_graph_ancestry([key_c])
 
1772
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
 
1773
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1774
 
 
1775
    def test_known_graph_with_fallbacks(self):
 
1776
        f = self.get_versionedfiles('files')
 
1777
        if not self.graph:
 
1778
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1779
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1780
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1781
                                    % (f.__class__.__name__,))
 
1782
        key_a = self.get_simple_key('a')
 
1783
        key_b = self.get_simple_key('b')
 
1784
        key_c = self.get_simple_key('c')
 
1785
        # A     only in fallback
 
1786
        # |\
 
1787
        # | B
 
1788
        # |/
 
1789
        # C
 
1790
        g = self.get_versionedfiles('fallback')
 
1791
        g.add_lines(key_a, [], ['\n'])
 
1792
        f.add_fallback_versioned_files(g)
 
1793
        f.add_lines(key_b, [key_a], ['\n'])
 
1794
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1795
        kg = f.get_known_graph_ancestry([key_c])
 
1796
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1797
 
1652
1798
    def test_get_record_stream_empty(self):
1653
1799
        """An empty stream can be requested without error."""
1654
1800
        f = self.get_versionedfiles()
1665
1811
             'knit-delta-closure', 'knit-delta-closure-ref',
1666
1812
             'groupcompress-block', 'groupcompress-block-ref'])
1667
1813
 
1668
 
    def capture_stream(self, f, entries, on_seen, parents):
 
1814
    def capture_stream(self, f, entries, on_seen, parents,
 
1815
        require_fulltext=False):
1669
1816
        """Capture a stream for testing."""
1670
1817
        for factory in entries:
1671
1818
            on_seen(factory.key)
1676
1823
            self.assertEqual(parents[factory.key], factory.parents)
1677
1824
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1678
1825
                str)
 
1826
            if require_fulltext:
 
1827
                factory.get_bytes_as('fulltext')
1679
1828
 
1680
1829
    def test_get_record_stream_interface(self):
1681
1830
        """each item in a stream has to provide a regular interface."""
1688
1837
        self.capture_stream(files, entries, seen.add, parent_map)
1689
1838
        self.assertEqual(set(keys), seen)
1690
1839
 
1691
 
    def get_simple_key(self, suffix):
1692
 
        """Return a key for the object under test."""
1693
 
        if self.key_length == 1:
1694
 
            return (suffix,)
1695
 
        else:
1696
 
            return ('FileA',) + (suffix,)
1697
 
 
1698
1840
    def get_keys_and_sort_order(self):
1699
1841
        """Get diamond test keys list, and their sort ordering."""
1700
1842
        if self.key_length == 1:
2063
2205
        else:
2064
2206
            return None
2065
2207
 
 
2208
    def test_get_annotator(self):
 
2209
        files = self.get_versionedfiles()
 
2210
        self.get_diamond_files(files)
 
2211
        origin_key = self.get_simple_key('origin')
 
2212
        base_key = self.get_simple_key('base')
 
2213
        left_key = self.get_simple_key('left')
 
2214
        right_key = self.get_simple_key('right')
 
2215
        merged_key = self.get_simple_key('merged')
 
2216
        # annotator = files.get_annotator()
 
2217
        # introduced full text
 
2218
        origins, lines = files.get_annotator().annotate(origin_key)
 
2219
        self.assertEqual([(origin_key,)], origins)
 
2220
        self.assertEqual(['origin\n'], lines)
 
2221
        # a delta
 
2222
        origins, lines = files.get_annotator().annotate(base_key)
 
2223
        self.assertEqual([(base_key,)], origins)
 
2224
        # a merge
 
2225
        origins, lines = files.get_annotator().annotate(merged_key)
 
2226
        if self.graph:
 
2227
            self.assertEqual([
 
2228
                (base_key,),
 
2229
                (left_key,),
 
2230
                (right_key,),
 
2231
                (merged_key,),
 
2232
                ], origins)
 
2233
        else:
 
2234
            # Without a graph everything is new.
 
2235
            self.assertEqual([
 
2236
                (merged_key,),
 
2237
                (merged_key,),
 
2238
                (merged_key,),
 
2239
                (merged_key,),
 
2240
                ], origins)
 
2241
        self.assertRaises(RevisionNotPresent,
 
2242
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
 
2243
 
2066
2244
    def test_get_parent_map(self):
2067
2245
        files = self.get_versionedfiles()
2068
2246
        if self.key_length == 1:
2271
2449
        else:
2272
2450
            self.assertIdenticalVersionedFile(source, files)
2273
2451
 
 
2452
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
 
2453
        """An out of order stream can either error or work."""
 
2454
        if not self.graph:
 
2455
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
2456
        # Create a reasonably long chain of records based on each other, where
 
2457
        # most will be deltas.
 
2458
        source = self.get_versionedfiles('source')
 
2459
        parents = ()
 
2460
        keys = []
 
2461
        content = [('same same %d\n' % n) for n in range(500)]
 
2462
        for letter in 'abcdefghijklmnopqrstuvwxyz':
 
2463
            key = ('key-' + letter,)
 
2464
            if self.key_length == 2:
 
2465
                key = ('prefix',) + key
 
2466
            content.append('content for ' + letter + '\n')
 
2467
            source.add_lines(key, parents, content)
 
2468
            keys.append(key)
 
2469
            parents = (key,)
 
2470
        # Create a stream of these records, excluding the first record that the
 
2471
        # rest ultimately depend upon, and insert it into a new vf.
 
2472
        streams = []
 
2473
        for key in reversed(keys):
 
2474
            streams.append(source.get_record_stream([key], 'unordered', False))
 
2475
        deltas = chain(*streams[:-1])
 
2476
        files = self.get_versionedfiles()
 
2477
        try:
 
2478
            files.insert_record_stream(deltas)
 
2479
        except RevisionNotPresent:
 
2480
            # Must not have corrupted the file.
 
2481
            files.check()
 
2482
        else:
 
2483
            # Must only report either just the first key as a missing parent,
 
2484
            # no key as missing (for nodelta scenarios).
 
2485
            missing = set(files.get_missing_compression_parent_keys())
 
2486
            missing.discard(keys[0])
 
2487
            self.assertEqual(set(), missing)
 
2488
 
2274
2489
    def get_knit_delta_source(self):
2275
2490
        """Get a source that can produce a stream with knit delta records,
2276
2491
        regardless of this test's scenario.
2344
2559
        # the ordering here is to make a tree so that dumb searches have
2345
2560
        # more changes to muck up.
2346
2561
 
2347
 
        class InstrumentedProgress(progress.DummyProgress):
 
2562
        class InstrumentedProgress(progress.ProgressTask):
2348
2563
 
2349
2564
            def __init__(self):
2350
 
 
2351
 
                progress.DummyProgress.__init__(self)
 
2565
                progress.ProgressTask.__init__(self)
2352
2566
                self.updates = []
2353
2567
 
2354
2568
            def update(self, msg=None, current=None, total=None):
2543
2757
        self.assertRaises(NotImplementedError,
2544
2758
                self.texts.add_mpdiffs, [])
2545
2759
 
2546
 
    def test_check(self):
2547
 
        self.assertTrue(self.texts.check())
 
2760
    def test_check_noerrors(self):
 
2761
        self.texts.check()
2548
2762
 
2549
2763
    def test_insert_record_stream(self):
2550
2764
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,