~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/per_versionedfile.py

  • Committer: Robert Collins
  • Date: 2010-05-06 23:41:35 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506234135-yivbzczw1sejxnxc
Lock methods on ``Tree``, ``Branch`` and ``Repository`` are now
expected to return an object which can be used to unlock them. This reduces
duplicate code when using cleanups. The previous 'tokens's returned by
``Branch.lock_write`` and ``Repository.lock_write`` are now attributes
on the result of the lock_write. ``repository.RepositoryWriteLockResult``
and ``branch.BranchWriteLockResult`` document this. (Robert Collins)

``log._get_info_for_log_files`` now takes an add_cleanup callable.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
26
26
 
27
27
from bzrlib import (
28
28
    errors,
 
29
    graph as _mod_graph,
29
30
    groupcompress,
30
31
    knit as _mod_knit,
31
32
    osutils,
32
33
    progress,
 
34
    ui,
33
35
    )
34
36
from bzrlib.errors import (
35
37
                           RevisionNotPresent,
732
734
        # the ordering here is to make a tree so that dumb searches have
733
735
        # more changes to muck up.
734
736
 
735
 
        class InstrumentedProgress(progress.DummyProgress):
 
737
        class InstrumentedProgress(progress.ProgressTask):
736
738
 
737
739
            def __init__(self):
738
 
 
739
 
                progress.DummyProgress.__init__(self)
 
740
                progress.ProgressTask.__init__(self)
740
741
                self.updates = []
741
742
 
742
743
            def update(self, msg=None, current=None, total=None):
1468
1469
            transport.mkdir('.')
1469
1470
        files = self.factory(transport)
1470
1471
        if self.cleanup is not None:
1471
 
            self.addCleanup(lambda:self.cleanup(files))
 
1472
            self.addCleanup(self.cleanup, files)
1472
1473
        return files
1473
1474
 
1474
1475
    def get_simple_key(self, suffix):
1557
1558
        self.assertRaises(RevisionNotPresent,
1558
1559
            files.annotate, prefix + ('missing-key',))
1559
1560
 
1560
 
    def test_get_annotator(self):
 
1561
    def test_check_no_parameters(self):
 
1562
        files = self.get_versionedfiles()
 
1563
 
 
1564
    def test_check_progressbar_parameter(self):
 
1565
        """A progress bar can be supplied because check can be a generator."""
 
1566
        pb = ui.ui_factory.nested_progress_bar()
 
1567
        self.addCleanup(pb.finished)
 
1568
        files = self.get_versionedfiles()
 
1569
        files.check(progress_bar=pb)
 
1570
 
 
1571
    def test_check_with_keys_becomes_generator(self):
1561
1572
        files = self.get_versionedfiles()
1562
1573
        self.get_diamond_files(files)
1563
 
        origin_key = self.get_simple_key('origin')
1564
 
        base_key = self.get_simple_key('base')
1565
 
        left_key = self.get_simple_key('left')
1566
 
        right_key = self.get_simple_key('right')
1567
 
        merged_key = self.get_simple_key('merged')
1568
 
        # annotator = files.get_annotator()
1569
 
        # introduced full text
1570
 
        origins, lines = files.get_annotator().annotate(origin_key)
1571
 
        self.assertEqual([(origin_key,)], origins)
1572
 
        self.assertEqual(['origin\n'], lines)
1573
 
        # a delta
1574
 
        origins, lines = files.get_annotator().annotate(base_key)
1575
 
        self.assertEqual([(base_key,)], origins)
1576
 
        # a merge
1577
 
        origins, lines = files.get_annotator().annotate(merged_key)
1578
 
        if self.graph:
1579
 
            self.assertEqual([
1580
 
                (base_key,),
1581
 
                (left_key,),
1582
 
                (right_key,),
1583
 
                (merged_key,),
1584
 
                ], origins)
1585
 
        else:
1586
 
            # Without a graph everything is new.
1587
 
            self.assertEqual([
1588
 
                (merged_key,),
1589
 
                (merged_key,),
1590
 
                (merged_key,),
1591
 
                (merged_key,),
1592
 
                ], origins)
1593
 
        self.assertRaises(RevisionNotPresent,
1594
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
 
1574
        keys = files.keys()
 
1575
        entries = files.check(keys=keys)
 
1576
        seen = set()
 
1577
        # Texts output should be fulltexts.
 
1578
        self.capture_stream(files, entries, seen.add,
 
1579
            files.get_parent_map(keys), require_fulltext=True)
 
1580
        # All texts should be output.
 
1581
        self.assertEqual(set(keys), seen)
 
1582
 
 
1583
    def test_clear_cache(self):
 
1584
        files = self.get_versionedfiles()
 
1585
        files.clear_cache()
1595
1586
 
1596
1587
    def test_construct(self):
1597
1588
        """Each parameterised test can be constructed on a transport."""
1750
1741
            f.get_record_stream([key_b], 'unordered', True
1751
1742
                ).next().get_bytes_as('fulltext'))
1752
1743
 
 
1744
    def test_get_known_graph_ancestry(self):
 
1745
        f = self.get_versionedfiles()
 
1746
        if not self.graph:
 
1747
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1748
        key_a = self.get_simple_key('a')
 
1749
        key_b = self.get_simple_key('b')
 
1750
        key_c = self.get_simple_key('c')
 
1751
        # A
 
1752
        # |\
 
1753
        # | B
 
1754
        # |/
 
1755
        # C
 
1756
        f.add_lines(key_a, [], ['\n'])
 
1757
        f.add_lines(key_b, [key_a], ['\n'])
 
1758
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1759
        kg = f.get_known_graph_ancestry([key_c])
 
1760
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
 
1761
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1762
 
 
1763
    def test_known_graph_with_fallbacks(self):
 
1764
        f = self.get_versionedfiles('files')
 
1765
        if not self.graph:
 
1766
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
1767
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1768
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1769
                                    % (f.__class__.__name__,))
 
1770
        key_a = self.get_simple_key('a')
 
1771
        key_b = self.get_simple_key('b')
 
1772
        key_c = self.get_simple_key('c')
 
1773
        # A     only in fallback
 
1774
        # |\
 
1775
        # | B
 
1776
        # |/
 
1777
        # C
 
1778
        g = self.get_versionedfiles('fallback')
 
1779
        g.add_lines(key_a, [], ['\n'])
 
1780
        f.add_fallback_versioned_files(g)
 
1781
        f.add_lines(key_b, [key_a], ['\n'])
 
1782
        f.add_lines(key_c, [key_a, key_b], ['\n'])
 
1783
        kg = f.get_known_graph_ancestry([key_c])
 
1784
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
 
1785
 
1753
1786
    def test_get_record_stream_empty(self):
1754
1787
        """An empty stream can be requested without error."""
1755
1788
        f = self.get_versionedfiles()
1766
1799
             'knit-delta-closure', 'knit-delta-closure-ref',
1767
1800
             'groupcompress-block', 'groupcompress-block-ref'])
1768
1801
 
1769
 
    def capture_stream(self, f, entries, on_seen, parents):
 
1802
    def capture_stream(self, f, entries, on_seen, parents,
 
1803
        require_fulltext=False):
1770
1804
        """Capture a stream for testing."""
1771
1805
        for factory in entries:
1772
1806
            on_seen(factory.key)
1777
1811
            self.assertEqual(parents[factory.key], factory.parents)
1778
1812
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1779
1813
                str)
 
1814
            if require_fulltext:
 
1815
                factory.get_bytes_as('fulltext')
1780
1816
 
1781
1817
    def test_get_record_stream_interface(self):
1782
1818
        """each item in a stream has to provide a regular interface."""
2157
2193
        else:
2158
2194
            return None
2159
2195
 
 
2196
    def test_get_annotator(self):
 
2197
        files = self.get_versionedfiles()
 
2198
        self.get_diamond_files(files)
 
2199
        origin_key = self.get_simple_key('origin')
 
2200
        base_key = self.get_simple_key('base')
 
2201
        left_key = self.get_simple_key('left')
 
2202
        right_key = self.get_simple_key('right')
 
2203
        merged_key = self.get_simple_key('merged')
 
2204
        # annotator = files.get_annotator()
 
2205
        # introduced full text
 
2206
        origins, lines = files.get_annotator().annotate(origin_key)
 
2207
        self.assertEqual([(origin_key,)], origins)
 
2208
        self.assertEqual(['origin\n'], lines)
 
2209
        # a delta
 
2210
        origins, lines = files.get_annotator().annotate(base_key)
 
2211
        self.assertEqual([(base_key,)], origins)
 
2212
        # a merge
 
2213
        origins, lines = files.get_annotator().annotate(merged_key)
 
2214
        if self.graph:
 
2215
            self.assertEqual([
 
2216
                (base_key,),
 
2217
                (left_key,),
 
2218
                (right_key,),
 
2219
                (merged_key,),
 
2220
                ], origins)
 
2221
        else:
 
2222
            # Without a graph everything is new.
 
2223
            self.assertEqual([
 
2224
                (merged_key,),
 
2225
                (merged_key,),
 
2226
                (merged_key,),
 
2227
                (merged_key,),
 
2228
                ], origins)
 
2229
        self.assertRaises(RevisionNotPresent,
 
2230
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
 
2231
 
2160
2232
    def test_get_parent_map(self):
2161
2233
        files = self.get_versionedfiles()
2162
2234
        if self.key_length == 1:
2365
2437
        else:
2366
2438
            self.assertIdenticalVersionedFile(source, files)
2367
2439
 
 
2440
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
 
2441
        """An out of order stream can either error or work."""
 
2442
        if not self.graph:
 
2443
            raise TestNotApplicable('ancestry info only relevant with graph.')
 
2444
        # Create a reasonably long chain of records based on each other, where
 
2445
        # most will be deltas.
 
2446
        source = self.get_versionedfiles('source')
 
2447
        parents = ()
 
2448
        keys = []
 
2449
        content = [('same same %d\n' % n) for n in range(500)]
 
2450
        for letter in 'abcdefghijklmnopqrstuvwxyz':
 
2451
            key = ('key-' + letter,)
 
2452
            if self.key_length == 2:
 
2453
                key = ('prefix',) + key
 
2454
            content.append('content for ' + letter + '\n')
 
2455
            source.add_lines(key, parents, content)
 
2456
            keys.append(key)
 
2457
            parents = (key,)
 
2458
        # Create a stream of these records, excluding the first record that the
 
2459
        # rest ultimately depend upon, and insert it into a new vf.
 
2460
        streams = []
 
2461
        for key in reversed(keys):
 
2462
            streams.append(source.get_record_stream([key], 'unordered', False))
 
2463
        deltas = chain(*streams[:-1])
 
2464
        files = self.get_versionedfiles()
 
2465
        try:
 
2466
            files.insert_record_stream(deltas)
 
2467
        except RevisionNotPresent:
 
2468
            # Must not have corrupted the file.
 
2469
            files.check()
 
2470
        else:
 
2471
            # Must only report either just the first key as a missing parent,
 
2472
            # no key as missing (for nodelta scenarios).
 
2473
            missing = set(files.get_missing_compression_parent_keys())
 
2474
            missing.discard(keys[0])
 
2475
            self.assertEqual(set(), missing)
 
2476
 
2368
2477
    def get_knit_delta_source(self):
2369
2478
        """Get a source that can produce a stream with knit delta records,
2370
2479
        regardless of this test's scenario.
2438
2547
        # the ordering here is to make a tree so that dumb searches have
2439
2548
        # more changes to muck up.
2440
2549
 
2441
 
        class InstrumentedProgress(progress.DummyProgress):
 
2550
        class InstrumentedProgress(progress.ProgressTask):
2442
2551
 
2443
2552
            def __init__(self):
2444
 
 
2445
 
                progress.DummyProgress.__init__(self)
 
2553
                progress.ProgressTask.__init__(self)
2446
2554
                self.updates = []
2447
2555
 
2448
2556
            def update(self, msg=None, current=None, total=None):
2637
2745
        self.assertRaises(NotImplementedError,
2638
2746
                self.texts.add_mpdiffs, [])
2639
2747
 
2640
 
    def test_check(self):
2641
 
        self.assertTrue(self.texts.check())
 
2748
    def test_check_noerrors(self):
 
2749
        self.texts.check()
2642
2750
 
2643
2751
    def test_insert_record_stream(self):
2644
2752
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,