~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-08-03 07:15:11 UTC
  • mfrom: (4580.1.2 408199-check-2a)
  • Revision ID: pqm@pqm.ubuntu.com-20090803071511-dwb041qzak0vjzdk
(mbp) check blackbox tests now handle the root being included in the
        file-id count

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2009 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
26
26
 
27
27
from bzrlib import (
28
28
    errors,
29
 
    graph as _mod_graph,
30
29
    groupcompress,
31
30
    knit as _mod_knit,
32
31
    osutils,
33
32
    progress,
34
 
    transport,
35
 
    ui,
36
33
    )
37
34
from bzrlib.errors import (
38
35
                           RevisionNotPresent,
57
54
    )
58
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
59
56
from bzrlib.trace import mutter
 
57
from bzrlib.transport import get_transport
60
58
from bzrlib.transport.memory import MemoryTransport
61
59
from bzrlib.tsort import topo_sort
62
60
from bzrlib.tuned_gzip import GzipFile
734
732
        # the ordering here is to make a tree so that dumb searches have
735
733
        # more changes to muck up.
736
734
 
737
 
        class InstrumentedProgress(progress.ProgressTask):
 
735
        class InstrumentedProgress(progress.DummyProgress):
738
736
 
739
737
            def __init__(self):
740
 
                progress.ProgressTask.__init__(self)
 
738
 
 
739
                progress.DummyProgress.__init__(self)
741
740
                self.updates = []
742
741
 
743
742
            def update(self, msg=None, current=None, total=None):
849
848
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
849
 
851
850
    def test_readonly_mode(self):
852
 
        t = transport.get_transport(self.get_url('.'))
 
851
        transport = get_transport(self.get_url('.'))
853
852
        factory = self.get_factory()
854
 
        vf = factory('id', t, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', t, access_mode='r')
 
853
        vf = factory('id', transport, 0777, create=True, access_mode='w')
 
854
        vf = factory('id', transport, access_mode='r')
856
855
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
856
        self.assertRaises(errors.ReadOnlyError,
858
857
                          vf.add_lines_with_ghosts,
880
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
880
 
882
881
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
884
 
                         create=True,
885
 
                         get_scope=self.get_transaction)
 
882
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
 
883
            get_scope=self.get_transaction)
886
884
 
887
885
    def get_file_corrupted_text(self):
888
 
        w = WeaveFile('foo', transport.get_transport(self.get_url('.')),
889
 
                      create=True,
890
 
                      get_scope=self.get_transaction)
 
886
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
887
            get_scope=self.get_transaction)
891
888
        w.add_lines('v1', [], ['hello\n'])
892
889
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
893
890
 
921
918
        return w
922
919
 
923
920
    def reopen_file(self, name='foo', create=False):
924
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
925
 
                         create=create,
926
 
                         get_scope=self.get_transaction)
 
921
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
 
922
            get_scope=self.get_transaction)
927
923
 
928
924
    def test_no_implicit_create(self):
929
925
        self.assertRaises(errors.NoSuchFile,
930
926
                          WeaveFile,
931
927
                          'foo',
932
 
                          transport.get_transport(self.get_url('.')),
 
928
                          get_transport(self.get_url('.')),
933
929
                          get_scope=self.get_transaction)
934
930
 
935
931
    def get_factory(self):
1002
998
        # we should be able to read from http with a versioned file.
1003
999
        vf = self.get_file()
1004
1000
        # try an empty file access
1005
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
1006
 
                self.get_readonly_url('.')))
 
1001
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
1002
        self.assertEqual([], readonly_vf.versions())
1008
 
 
1009
 
    def test_readonly_http_works_with_feeling(self):
1010
 
        # we should be able to read from http with a versioned file.
1011
 
        vf = self.get_file()
1012
1003
        # now with feeling.
1013
1004
        vf.add_lines('1', [], ['a\n'])
1014
1005
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1015
 
        readonly_vf = self.get_factory()('foo', transport.get_transport(
1016
 
                self.get_readonly_url('.')))
 
1006
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1017
1007
        self.assertEqual(['1', '2'], vf.versions())
1018
 
        self.assertEqual(['1', '2'], readonly_vf.versions())
1019
1008
        for version in readonly_vf.versions():
1020
1009
            readonly_vf.get_lines(version)
1021
1010
 
1023
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1024
1013
 
1025
1014
    def get_file(self):
1026
 
        return WeaveFile('foo', transport.get_transport(self.get_url('.')),
1027
 
                         create=True,
1028
 
                         get_scope=self.get_transaction)
 
1015
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
 
1016
            get_scope=self.get_transaction)
1029
1017
 
1030
1018
    def get_factory(self):
1031
1019
        return WeaveFile
1275
1263
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1276
1264
 
1277
1265
    def get_file(self, name='foo'):
1278
 
        return WeaveFile(name, transport.get_transport(self.get_url('.')),
1279
 
                         create=True)
 
1266
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1280
1267
 
1281
1268
    def log_contents(self, w):
1282
1269
        self.log('weave is:')
1481
1468
            transport.mkdir('.')
1482
1469
        files = self.factory(transport)
1483
1470
        if self.cleanup is not None:
1484
 
            self.addCleanup(self.cleanup, files)
 
1471
            self.addCleanup(lambda:self.cleanup(files))
1485
1472
        return files
1486
1473
 
1487
1474
    def get_simple_key(self, suffix):
1570
1557
        self.assertRaises(RevisionNotPresent,
1571
1558
            files.annotate, prefix + ('missing-key',))
1572
1559
 
1573
 
    def test_check_no_parameters(self):
1574
 
        files = self.get_versionedfiles()
1575
 
 
1576
 
    def test_check_progressbar_parameter(self):
1577
 
        """A progress bar can be supplied because check can be a generator."""
1578
 
        pb = ui.ui_factory.nested_progress_bar()
1579
 
        self.addCleanup(pb.finished)
1580
 
        files = self.get_versionedfiles()
1581
 
        files.check(progress_bar=pb)
1582
 
 
1583
 
    def test_check_with_keys_becomes_generator(self):
 
1560
    def test_get_annotator(self):
1584
1561
        files = self.get_versionedfiles()
1585
1562
        self.get_diamond_files(files)
1586
 
        keys = files.keys()
1587
 
        entries = files.check(keys=keys)
1588
 
        seen = set()
1589
 
        # Texts output should be fulltexts.
1590
 
        self.capture_stream(files, entries, seen.add,
1591
 
            files.get_parent_map(keys), require_fulltext=True)
1592
 
        # All texts should be output.
1593
 
        self.assertEqual(set(keys), seen)
1594
 
 
1595
 
    def test_clear_cache(self):
1596
 
        files = self.get_versionedfiles()
1597
 
        files.clear_cache()
 
1563
        origin_key = self.get_simple_key('origin')
 
1564
        base_key = self.get_simple_key('base')
 
1565
        left_key = self.get_simple_key('left')
 
1566
        right_key = self.get_simple_key('right')
 
1567
        merged_key = self.get_simple_key('merged')
 
1568
        # annotator = files.get_annotator()
 
1569
        # introduced full text
 
1570
        origins, lines = files.get_annotator().annotate(origin_key)
 
1571
        self.assertEqual([(origin_key,)], origins)
 
1572
        self.assertEqual(['origin\n'], lines)
 
1573
        # a delta
 
1574
        origins, lines = files.get_annotator().annotate(base_key)
 
1575
        self.assertEqual([(base_key,)], origins)
 
1576
        # a merge
 
1577
        origins, lines = files.get_annotator().annotate(merged_key)
 
1578
        if self.graph:
 
1579
            self.assertEqual([
 
1580
                (base_key,),
 
1581
                (left_key,),
 
1582
                (right_key,),
 
1583
                (merged_key,),
 
1584
                ], origins)
 
1585
        else:
 
1586
            # Without a graph everything is new.
 
1587
            self.assertEqual([
 
1588
                (merged_key,),
 
1589
                (merged_key,),
 
1590
                (merged_key,),
 
1591
                (merged_key,),
 
1592
                ], origins)
 
1593
        self.assertRaises(RevisionNotPresent,
 
1594
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
1598
1595
 
1599
1596
    def test_construct(self):
1600
1597
        """Each parameterised test can be constructed on a transport."""
1753
1750
            f.get_record_stream([key_b], 'unordered', True
1754
1751
                ).next().get_bytes_as('fulltext'))
1755
1752
 
1756
 
    def test_get_known_graph_ancestry(self):
1757
 
        f = self.get_versionedfiles()
1758
 
        if not self.graph:
1759
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1760
 
        key_a = self.get_simple_key('a')
1761
 
        key_b = self.get_simple_key('b')
1762
 
        key_c = self.get_simple_key('c')
1763
 
        # A
1764
 
        # |\
1765
 
        # | B
1766
 
        # |/
1767
 
        # C
1768
 
        f.add_lines(key_a, [], ['\n'])
1769
 
        f.add_lines(key_b, [key_a], ['\n'])
1770
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1771
 
        kg = f.get_known_graph_ancestry([key_c])
1772
 
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1773
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1774
 
 
1775
 
    def test_known_graph_with_fallbacks(self):
1776
 
        f = self.get_versionedfiles('files')
1777
 
        if not self.graph:
1778
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1779
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1780
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1781
 
                                    % (f.__class__.__name__,))
1782
 
        key_a = self.get_simple_key('a')
1783
 
        key_b = self.get_simple_key('b')
1784
 
        key_c = self.get_simple_key('c')
1785
 
        # A     only in fallback
1786
 
        # |\
1787
 
        # | B
1788
 
        # |/
1789
 
        # C
1790
 
        g = self.get_versionedfiles('fallback')
1791
 
        g.add_lines(key_a, [], ['\n'])
1792
 
        f.add_fallback_versioned_files(g)
1793
 
        f.add_lines(key_b, [key_a], ['\n'])
1794
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1795
 
        kg = f.get_known_graph_ancestry([key_c])
1796
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1797
 
 
1798
1753
    def test_get_record_stream_empty(self):
1799
1754
        """An empty stream can be requested without error."""
1800
1755
        f = self.get_versionedfiles()
1811
1766
             'knit-delta-closure', 'knit-delta-closure-ref',
1812
1767
             'groupcompress-block', 'groupcompress-block-ref'])
1813
1768
 
1814
 
    def capture_stream(self, f, entries, on_seen, parents,
1815
 
        require_fulltext=False):
 
1769
    def capture_stream(self, f, entries, on_seen, parents):
1816
1770
        """Capture a stream for testing."""
1817
1771
        for factory in entries:
1818
1772
            on_seen(factory.key)
1823
1777
            self.assertEqual(parents[factory.key], factory.parents)
1824
1778
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1825
1779
                str)
1826
 
            if require_fulltext:
1827
 
                factory.get_bytes_as('fulltext')
1828
1780
 
1829
1781
    def test_get_record_stream_interface(self):
1830
1782
        """each item in a stream has to provide a regular interface."""
2205
2157
        else:
2206
2158
            return None
2207
2159
 
2208
 
    def test_get_annotator(self):
2209
 
        files = self.get_versionedfiles()
2210
 
        self.get_diamond_files(files)
2211
 
        origin_key = self.get_simple_key('origin')
2212
 
        base_key = self.get_simple_key('base')
2213
 
        left_key = self.get_simple_key('left')
2214
 
        right_key = self.get_simple_key('right')
2215
 
        merged_key = self.get_simple_key('merged')
2216
 
        # annotator = files.get_annotator()
2217
 
        # introduced full text
2218
 
        origins, lines = files.get_annotator().annotate(origin_key)
2219
 
        self.assertEqual([(origin_key,)], origins)
2220
 
        self.assertEqual(['origin\n'], lines)
2221
 
        # a delta
2222
 
        origins, lines = files.get_annotator().annotate(base_key)
2223
 
        self.assertEqual([(base_key,)], origins)
2224
 
        # a merge
2225
 
        origins, lines = files.get_annotator().annotate(merged_key)
2226
 
        if self.graph:
2227
 
            self.assertEqual([
2228
 
                (base_key,),
2229
 
                (left_key,),
2230
 
                (right_key,),
2231
 
                (merged_key,),
2232
 
                ], origins)
2233
 
        else:
2234
 
            # Without a graph everything is new.
2235
 
            self.assertEqual([
2236
 
                (merged_key,),
2237
 
                (merged_key,),
2238
 
                (merged_key,),
2239
 
                (merged_key,),
2240
 
                ], origins)
2241
 
        self.assertRaises(RevisionNotPresent,
2242
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2243
 
 
2244
2160
    def test_get_parent_map(self):
2245
2161
        files = self.get_versionedfiles()
2246
2162
        if self.key_length == 1:
2449
2365
        else:
2450
2366
            self.assertIdenticalVersionedFile(source, files)
2451
2367
 
2452
 
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2453
 
        """An out of order stream can either error or work."""
2454
 
        if not self.graph:
2455
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
2456
 
        # Create a reasonably long chain of records based on each other, where
2457
 
        # most will be deltas.
2458
 
        source = self.get_versionedfiles('source')
2459
 
        parents = ()
2460
 
        keys = []
2461
 
        content = [('same same %d\n' % n) for n in range(500)]
2462
 
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2463
 
            key = ('key-' + letter,)
2464
 
            if self.key_length == 2:
2465
 
                key = ('prefix',) + key
2466
 
            content.append('content for ' + letter + '\n')
2467
 
            source.add_lines(key, parents, content)
2468
 
            keys.append(key)
2469
 
            parents = (key,)
2470
 
        # Create a stream of these records, excluding the first record that the
2471
 
        # rest ultimately depend upon, and insert it into a new vf.
2472
 
        streams = []
2473
 
        for key in reversed(keys):
2474
 
            streams.append(source.get_record_stream([key], 'unordered', False))
2475
 
        deltas = chain(*streams[:-1])
2476
 
        files = self.get_versionedfiles()
2477
 
        try:
2478
 
            files.insert_record_stream(deltas)
2479
 
        except RevisionNotPresent:
2480
 
            # Must not have corrupted the file.
2481
 
            files.check()
2482
 
        else:
2483
 
            # Must only report either just the first key as a missing parent,
2484
 
            # no key as missing (for nodelta scenarios).
2485
 
            missing = set(files.get_missing_compression_parent_keys())
2486
 
            missing.discard(keys[0])
2487
 
            self.assertEqual(set(), missing)
2488
 
 
2489
2368
    def get_knit_delta_source(self):
2490
2369
        """Get a source that can produce a stream with knit delta records,
2491
2370
        regardless of this test's scenario.
2559
2438
        # the ordering here is to make a tree so that dumb searches have
2560
2439
        # more changes to muck up.
2561
2440
 
2562
 
        class InstrumentedProgress(progress.ProgressTask):
 
2441
        class InstrumentedProgress(progress.DummyProgress):
2563
2442
 
2564
2443
            def __init__(self):
2565
 
                progress.ProgressTask.__init__(self)
 
2444
 
 
2445
                progress.DummyProgress.__init__(self)
2566
2446
                self.updates = []
2567
2447
 
2568
2448
            def update(self, msg=None, current=None, total=None):
2757
2637
        self.assertRaises(NotImplementedError,
2758
2638
                self.texts.add_mpdiffs, [])
2759
2639
 
2760
 
    def test_check_noerrors(self):
2761
 
        self.texts.check()
 
2640
    def test_check(self):
 
2641
        self.assertTrue(self.texts.check())
2762
2642
 
2763
2643
    def test_insert_record_stream(self):
2764
2644
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,