~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/per_versionedfile.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2010-09-01 08:02:42 UTC
  • mfrom: (5390.3.3 faster-revert-593560)
  • Revision ID: pqm@pqm.ubuntu.com-20100901080242-esg62ody4frwmy66
(spiv) Avoid repeatedly calling self.target.all_file_ids() in
 InterTree.iter_changes. (Andrew Bennetts)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
26
26
 
27
27
from bzrlib import (
28
28
    errors,
 
29
    graph as _mod_graph,
29
30
    groupcompress,
30
31
    knit as _mod_knit,
31
32
    osutils,
32
33
    progress,
 
34
    transport,
 
35
    ui,
33
36
    )
34
37
from bzrlib.errors import (
35
38
                           RevisionNotPresent,
54
57
    )
55
58
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
59
from bzrlib.trace import mutter
57
 
from bzrlib.transport import get_transport
58
60
from bzrlib.transport.memory import MemoryTransport
59
61
from bzrlib.tsort import topo_sort
60
62
from bzrlib.tuned_gzip import GzipFile
732
734
        # the ordering here is to make a tree so that dumb searches have
733
735
        # more changes to muck up.
734
736
 
735
 
        class InstrumentedProgress(progress.DummyProgress):
 
737
        class InstrumentedProgress(progress.ProgressTask):
736
738
 
737
739
            def __init__(self):
738
 
 
739
 
                progress.DummyProgress.__init__(self)
 
740
                progress.ProgressTask.__init__(self)
740
741
                self.updates = []
741
742
 
742
743
            def update(self, msg=None, current=None, total=None):
848
849
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
849
850
 
850
851
    def test_readonly_mode(self):
851
 
        transport = get_transport(self.get_url('.'))
 
852
        t = transport.get_transport(self.get_url('.'))
852
853
        factory = self.get_factory()
853
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
854
 
        vf = factory('id', transport, access_mode='r')
 
854
        vf = factory('id', t, 0777, create=True, access_mode='w')
 
855
        vf = factory('id', t, access_mode='r')
855
856
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
857
        self.assertRaises(errors.ReadOnlyError,
857
858
                          vf.add_lines_with_ghosts,
879
880
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
880
881
 
881
882
    def get_file(self, name='foo'):
882
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
 
            get_scope=self.get_transaction)
 
883
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
884
                         create=True,
 
885
                         get_scope=self.get_transaction)
884
886
 
885
887
    def get_file_corrupted_text(self):
886
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
 
            get_scope=self.get_transaction)
 
888
        w = WeaveFile('foo', transport.get_transport(self.get_url('.')),
 
889
                      create=True,
 
890
                      get_scope=self.get_transaction)
888
891
        w.add_lines('v1', [], ['hello\n'])
889
892
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
890
893
 
918
921
        return w
919
922
 
920
923
    def reopen_file(self, name='foo', create=False):
921
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
 
            get_scope=self.get_transaction)
 
924
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
925
                         create=create,
 
926
                         get_scope=self.get_transaction)
923
927
 
924
928
    def test_no_implicit_create(self):
925
929
        self.assertRaises(errors.NoSuchFile,
926
930
                          WeaveFile,
927
931
                          'foo',
928
 
                          get_transport(self.get_url('.')),
 
932
                          transport.get_transport(self.get_url('.')),
929
933
                          get_scope=self.get_transaction)
930
934
 
931
935
    def get_factory(self):
998
1002
        # we should be able to read from http with a versioned file.
999
1003
        vf = self.get_file()
1000
1004
        # try an empty file access
1001
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
1005
        readonly_vf = self.get_factory()('foo', transport.get_transport(
 
1006
                self.get_readonly_url('.')))
1002
1007
        self.assertEqual([], readonly_vf.versions())
 
1008
 
 
1009
    def test_readonly_http_works_with_feeling(self):
 
1010
        # we should be able to read from http with a versioned file.
 
1011
        vf = self.get_file()
1003
1012
        # now with feeling.
1004
1013
        vf.add_lines('1', [], ['a\n'])
1005
1014
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
1015
        readonly_vf = self.get_factory()('foo', transport.get_transport(
 
1016
                self.get_readonly_url('.')))
1007
1017
        self.assertEqual(['1', '2'], vf.versions())
 
1018
        self.assertEqual(['1', '2'], readonly_vf.versions())
1008
1019
        for version in readonly_vf.versions():
1009
1020
            readonly_vf.get_lines(version)
1010
1021
 
1012
1023
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1013
1024
 
1014
1025
    def get_file(self):
1015
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
 
            get_scope=self.get_transaction)
 
1026
        return WeaveFile('foo', transport.get_transport(self.get_url('.')),
 
1027
                         create=True,
 
1028
                         get_scope=self.get_transaction)
1017
1029
 
1018
1030
    def get_factory(self):
1019
1031
        return WeaveFile
1263
1275
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1264
1276
 
1265
1277
    def get_file(self, name='foo'):
1266
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1278
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
 
1279
                         create=True)
1267
1280
 
1268
1281
    def log_contents(self, w):
1269
1282
        self.log('weave is:')
1468
1481
            transport.mkdir('.')
1469
1482
        files = self.factory(transport)
1470
1483
        if self.cleanup is not None:
1471
 
            self.addCleanup(lambda:self.cleanup(files))
 
1484
            self.addCleanup(self.cleanup, files)
1472
1485
        return files
1473
1486
 
1474
1487
    def get_simple_key(self, suffix):
1557
1570
        self.assertRaises(RevisionNotPresent,
1558
1571
            files.annotate, prefix + ('missing-key',))
1559
1572
 
1560
 
    def test_get_annotator(self):
 
1573
    def test_check_no_parameters(self):
 
1574
        files = self.get_versionedfiles()
 
1575
 
 
1576
    def test_check_progressbar_parameter(self):
 
1577
        """A progress bar can be supplied because check can be a generator."""
 
1578
        pb = ui.ui_factory.nested_progress_bar()
 
1579
        self.addCleanup(pb.finished)
 
1580
        files = self.get_versionedfiles()
 
1581
        files.check(progress_bar=pb)
 
1582
 
 
1583
    def test_check_with_keys_becomes_generator(self):
1561
1584
        files = self.get_versionedfiles()
1562
1585
        self.get_diamond_files(files)
1563
 
        origin_key = self.get_simple_key('origin')
1564
 
        base_key = self.get_simple_key('base')
1565
 
        left_key = self.get_simple_key('left')
1566
 
        right_key = self.get_simple_key('right')
1567
 
        merged_key = self.get_simple_key('merged')
1568
 
        # annotator = files.get_annotator()
1569
 
        # introduced full text
1570
 
        origins, lines = files.get_annotator().annotate(origin_key)
1571
 
        self.assertEqual([(origin_key,)], origins)
1572
 
        self.assertEqual(['origin\n'], lines)
1573
 
        # a delta
1574
 
        origins, lines = files.get_annotator().annotate(base_key)
1575
 
        self.assertEqual([(base_key,)], origins)
1576
 
        # a merge
1577
 
        origins, lines = files.get_annotator().annotate(merged_key)
1578
 
        if self.graph:
1579
 
            self.assertEqual([
1580
 
                (base_key,),
1581
 
                (left_key,),
1582
 
                (right_key,),
1583
 
                (merged_key,),
1584
 
                ], origins)
1585
 
        else:
1586
 
            # Without a graph everything is new.
1587
 
            self.assertEqual([
1588
 
                (merged_key,),
1589
 
                (merged_key,),
1590
 
                (merged_key,),
1591
 
                (merged_key,),
1592
 
                ], origins)
1593
 
        self.assertRaises(RevisionNotPresent,
1594
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
 
1586
        keys = files.keys()
 
1587
        entries = files.check(keys=keys)
 
1588
        seen = set()
 
1589
        # Texts output should be fulltexts.
 
1590
        self.capture_stream(files, entries, seen.add,
 
1591
            files.get_parent_map(keys), require_fulltext=True)
 
1592
        # All texts should be output.
 
1593
        self.assertEqual(set(keys), seen)
 
1594
 
 
1595
    def test_clear_cache(self):
 
1596
        files = self.get_versionedfiles()
 
1597
        files.clear_cache()
1595
1598
 
1596
1599
    def test_construct(self):
1597
1600
        """Each parameterised test can be constructed on a transport."""
1750
1753
            f.get_record_stream([key_b], 'unordered', True
1751
1754
                ).next().get_bytes_as('fulltext'))
1752
1755
 
 
1756
    def test_get_known_graph_ancestry(self):
 
1757
        f = self.get_versionedfiles()
 
1758
        if not self.graph:
 
1759
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1760
        key_a = self.get_simple_key('a')
 
1761
        key_b = self.get_simple_key('b')
 
1762
        key_c = self.get_simple_key('c')
 
1763
        # A
 
1764
        # |\
 
1765
        # | B
 
1766
        # |/
 
1767
        # C
 
1768
        f.add_lines(key_a, [], ['\n'])
 
1769
        f.add_lines(key_b, [key_a], ['\n'])
 
1770
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1771
        kg = f.get_known_graph_ancestry([key_c])
 
1772
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
 
1773
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1774
 
 
1775
    def test_known_graph_with_fallbacks(self):
 
1776
        f = self.get_versionedfiles('files')
 
1777
        if not self.graph:
 
1778
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1779
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1780
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1781
                                    % (f.__class__.__name__,))
 
1782
        key_a = self.get_simple_key('a')
 
1783
        key_b = self.get_simple_key('b')
 
1784
        key_c = self.get_simple_key('c')
 
1785
        # A     only in fallback
 
1786
        # |\
 
1787
        # | B
 
1788
        # |/
 
1789
        # C
 
1790
        g = self.get_versionedfiles('fallback')
 
1791
        g.add_lines(key_a, [], ['\n'])
 
1792
        f.add_fallback_versioned_files(g)
 
1793
        f.add_lines(key_b, [key_a], ['\n'])
 
1794
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1795
        kg = f.get_known_graph_ancestry([key_c])
 
1796
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1797
 
1753
1798
    def test_get_record_stream_empty(self):
1754
1799
        """An empty stream can be requested without error."""
1755
1800
        f = self.get_versionedfiles()
1766
1811
             'knit-delta-closure', 'knit-delta-closure-ref',
1767
1812
             'groupcompress-block', 'groupcompress-block-ref'])
1768
1813
 
1769
 
    def capture_stream(self, f, entries, on_seen, parents):
 
1814
    def capture_stream(self, f, entries, on_seen, parents,
 
1815
        require_fulltext=False):
1770
1816
        """Capture a stream for testing."""
1771
1817
        for factory in entries:
1772
1818
            on_seen(factory.key)
1777
1823
            self.assertEqual(parents[factory.key], factory.parents)
1778
1824
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1779
1825
                str)
 
1826
            if require_fulltext:
 
1827
                factory.get_bytes_as('fulltext')
1780
1828
 
1781
1829
    def test_get_record_stream_interface(self):
1782
1830
        """each item in a stream has to provide a regular interface."""
2157
2205
        else:
2158
2206
            return None
2159
2207
 
 
2208
    def test_get_annotator(self):
 
2209
        files = self.get_versionedfiles()
 
2210
        self.get_diamond_files(files)
 
2211
        origin_key = self.get_simple_key('origin')
 
2212
        base_key = self.get_simple_key('base')
 
2213
        left_key = self.get_simple_key('left')
 
2214
        right_key = self.get_simple_key('right')
 
2215
        merged_key = self.get_simple_key('merged')
 
2216
        # annotator = files.get_annotator()
 
2217
        # introduced full text
 
2218
        origins, lines = files.get_annotator().annotate(origin_key)
 
2219
        self.assertEqual([(origin_key,)], origins)
 
2220
        self.assertEqual(['origin\n'], lines)
 
2221
        # a delta
 
2222
        origins, lines = files.get_annotator().annotate(base_key)
 
2223
        self.assertEqual([(base_key,)], origins)
 
2224
        # a merge
 
2225
        origins, lines = files.get_annotator().annotate(merged_key)
 
2226
        if self.graph:
 
2227
            self.assertEqual([
 
2228
                (base_key,),
 
2229
                (left_key,),
 
2230
                (right_key,),
 
2231
                (merged_key,),
 
2232
                ], origins)
 
2233
        else:
 
2234
            # Without a graph everything is new.
 
2235
            self.assertEqual([
 
2236
                (merged_key,),
 
2237
                (merged_key,),
 
2238
                (merged_key,),
 
2239
                (merged_key,),
 
2240
                ], origins)
 
2241
        self.assertRaises(RevisionNotPresent,
 
2242
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
 
2243
 
2160
2244
    def test_get_parent_map(self):
2161
2245
        files = self.get_versionedfiles()
2162
2246
        if self.key_length == 1:
2365
2449
        else:
2366
2450
            self.assertIdenticalVersionedFile(source, files)
2367
2451
 
 
2452
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
 
2453
        """An out of order stream can either error or work."""
 
2454
        if not self.graph:
 
2455
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
2456
        # Create a reasonably long chain of records based on each other, where
 
2457
        # most will be deltas.
 
2458
        source = self.get_versionedfiles('source')
 
2459
        parents = ()
 
2460
        keys = []
 
2461
        content = [('same same %d\n' % n) for n in range(500)]
 
2462
        for letter in 'abcdefghijklmnopqrstuvwxyz':
 
2463
            key = ('key-' + letter,)
 
2464
            if self.key_length == 2:
 
2465
                key = ('prefix',) + key
 
2466
            content.append('content for ' + letter + '\n')
 
2467
            source.add_lines(key, parents, content)
 
2468
            keys.append(key)
 
2469
            parents = (key,)
 
2470
        # Create a stream of these records, excluding the first record that the
 
2471
        # rest ultimately depend upon, and insert it into a new vf.
 
2472
        streams = []
 
2473
        for key in reversed(keys):
 
2474
            streams.append(source.get_record_stream([key], 'unordered', False))
 
2475
        deltas = chain(*streams[:-1])
 
2476
        files = self.get_versionedfiles()
 
2477
        try:
 
2478
            files.insert_record_stream(deltas)
 
2479
        except RevisionNotPresent:
 
2480
            # Must not have corrupted the file.
 
2481
            files.check()
 
2482
        else:
 
2483
            # Must only report either just the first key as a missing parent,
 
2484
            # no key as missing (for nodelta scenarios).
 
2485
            missing = set(files.get_missing_compression_parent_keys())
 
2486
            missing.discard(keys[0])
 
2487
            self.assertEqual(set(), missing)
 
2488
 
2368
2489
    def get_knit_delta_source(self):
2369
2490
        """Get a source that can produce a stream with knit delta records,
2370
2491
        regardless of this test's scenario.
2438
2559
        # the ordering here is to make a tree so that dumb searches have
2439
2560
        # more changes to muck up.
2440
2561
 
2441
 
        class InstrumentedProgress(progress.DummyProgress):
 
2562
        class InstrumentedProgress(progress.ProgressTask):
2442
2563
 
2443
2564
            def __init__(self):
2444
 
 
2445
 
                progress.DummyProgress.__init__(self)
 
2565
                progress.ProgressTask.__init__(self)
2446
2566
                self.updates = []
2447
2567
 
2448
2568
            def update(self, msg=None, current=None, total=None):
2637
2757
        self.assertRaises(NotImplementedError,
2638
2758
                self.texts.add_mpdiffs, [])
2639
2759
 
2640
 
    def test_check(self):
2641
 
        self.assertTrue(self.texts.check())
 
2760
    def test_check_noerrors(self):
 
2761
        self.texts.check()
2642
2762
 
2643
2763
    def test_insert_record_stream(self):
2644
2764
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,