~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-03-06 06:48:25 UTC
  • mfrom: (4070.8.6 debug-config)
  • Revision ID: pqm@pqm.ubuntu.com-20090306064825-kbpwggw21dygeix6
(mbp) debug_flags configuration option

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2009 Canonical Ltd
 
1
# Copyright (C) 2005 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
15
15
#
16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
19
 
20
20
 
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
24
24
from itertools import chain, izip
25
25
from StringIO import StringIO
26
26
 
 
27
import bzrlib
27
28
from bzrlib import (
28
29
    errors,
29
 
    groupcompress,
30
 
    knit as _mod_knit,
31
30
    osutils,
32
31
    progress,
33
32
    )
36
35
                           RevisionAlreadyPresent,
37
36
                           WeaveParentMismatch
38
37
                           )
 
38
from bzrlib import knit as _mod_knit
39
39
from bzrlib.knit import (
40
40
    cleanup_pack_knit,
41
41
    make_file_factory,
43
43
    KnitAnnotateFactory,
44
44
    KnitPlainFactory,
45
45
    )
 
46
from bzrlib.symbol_versioning import one_four, one_five
46
47
from bzrlib.tests import (
47
48
    TestCase,
48
49
    TestCaseWithMemoryTransport,
49
50
    TestNotApplicable,
 
51
    TestScenarioApplier,
50
52
    TestSkipped,
51
53
    condition_isinstance,
52
54
    split_suite_by_condition,
53
 
    multiply_tests,
 
55
    iter_suite_tests,
54
56
    )
55
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
58
from bzrlib.trace import mutter
74
76
    """Parameterize VersionedFiles tests for different implementations."""
75
77
    to_adapt, result = split_suite_by_condition(
76
78
        standard_tests, condition_isinstance(TestVersionedFiles))
 
79
    len_one_adapter = TestScenarioApplier()
 
80
    len_two_adapter = TestScenarioApplier()
77
81
    # We want to be sure of behaviour for:
78
82
    # weaves prefix layout (weave texts)
79
83
    # individually named weaves (weave inventories)
84
88
    # individual graph knits in packs (inventories)
85
89
    # individual graph nocompression knits in packs (revisions)
86
90
    # plain text knits in packs (texts)
87
 
    len_one_scenarios = [
 
91
    len_one_adapter.scenarios = [
88
92
        ('weave-named', {
89
93
            'cleanup':None,
90
94
            'factory':make_versioned_files_factory(WeaveFile,
121
125
            'key_length':1,
122
126
            'support_partial_insertion': False,
123
127
            }),
124
 
        ('groupcompress-nograph', {
125
 
            'cleanup':groupcompress.cleanup_pack_group,
126
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
127
 
            'graph': False,
128
 
            'key_length':1,
129
 
            'support_partial_insertion':False,
130
 
            }),
131
128
        ]
132
 
    len_two_scenarios = [
 
129
    len_two_adapter.scenarios = [
133
130
        ('weave-prefix', {
134
131
            'cleanup':None,
135
132
            'factory':make_versioned_files_factory(WeaveFile,
152
149
            'key_length':2,
153
150
            'support_partial_insertion': True,
154
151
            }),
155
 
        ('groupcompress', {
156
 
            'cleanup':groupcompress.cleanup_pack_group,
157
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
158
 
            'graph': True,
159
 
            'key_length':1,
160
 
            'support_partial_insertion':False,
161
 
            }),
162
152
        ]
163
 
    scenarios = len_one_scenarios + len_two_scenarios
164
 
    return multiply_tests(to_adapt, scenarios, result)
 
153
    for test in iter_suite_tests(to_adapt):
 
154
        result.addTests(len_one_adapter.adapt(test))
 
155
        result.addTests(len_two_adapter.adapt(test))
 
156
    return result
165
157
 
166
158
 
167
159
def get_diamond_vf(f, trailing_eol=True, left_only=False):
193
185
 
194
186
 
195
187
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
196
 
    nograph=False, nokeys=False):
 
188
    nograph=False):
197
189
    """Get a diamond graph to exercise deltas and merges.
198
190
 
199
191
    This creates a 5-node graph in files. If files supports 2-length keys two
206
198
    :param nograph: If True, do not provide parents to the add_lines calls;
207
199
        this is useful for tests that need inserted data but have graphless
208
200
        stores.
209
 
    :param nokeys: If True, pass None is as the key for all insertions.
210
 
        Currently implies nograph.
211
201
    :return: The results of the add_lines calls.
212
202
    """
213
 
    if nokeys:
214
 
        nograph = True
215
203
    if key_length == 1:
216
204
        prefixes = [()]
217
205
    else:
228
216
        else:
229
217
            result = [prefix + suffix for suffix in suffix_list]
230
218
            return result
231
 
    def get_key(suffix):
232
 
        if nokeys:
233
 
            return (None, )
234
 
        else:
235
 
            return (suffix,)
236
219
    # we loop over each key because that spreads the inserts across prefixes,
237
220
    # which is how commit operates.
238
221
    for prefix in prefixes:
239
 
        result.append(files.add_lines(prefix + get_key('origin'), (),
 
222
        result.append(files.add_lines(prefix + ('origin',), (),
240
223
            ['origin' + last_char]))
241
224
    for prefix in prefixes:
242
 
        result.append(files.add_lines(prefix + get_key('base'),
 
225
        result.append(files.add_lines(prefix + ('base',),
243
226
            get_parents([('origin',)]), ['base' + last_char]))
244
227
    for prefix in prefixes:
245
 
        result.append(files.add_lines(prefix + get_key('left'),
 
228
        result.append(files.add_lines(prefix + ('left',),
246
229
            get_parents([('base',)]),
247
230
            ['base\n', 'left' + last_char]))
248
231
    if not left_only:
249
232
        for prefix in prefixes:
250
 
            result.append(files.add_lines(prefix + get_key('right'),
 
233
            result.append(files.add_lines(prefix + ('right',),
251
234
                get_parents([('base',)]),
252
235
                ['base\n', 'right' + last_char]))
253
236
        for prefix in prefixes:
254
 
            result.append(files.add_lines(prefix + get_key('merged'),
 
237
            result.append(files.add_lines(prefix + ('merged',),
255
238
                get_parents([('left',), ('right',)]),
256
239
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
257
240
    return result
768
751
                self.assertEqual(expected, progress.updates)
769
752
            return lines
770
753
        lines = iter_with_versions(['child', 'otherchild'],
771
 
                                   [('Walking content', 0, 2),
772
 
                                    ('Walking content', 1, 2),
773
 
                                    ('Walking content', 2, 2)])
 
754
                                   [('Walking content.', 0, 2),
 
755
                                    ('Walking content.', 1, 2),
 
756
                                    ('Walking content.', 2, 2)])
774
757
        # we must see child and otherchild
775
758
        self.assertTrue(lines[('child\n', 'child')] > 0)
776
759
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
777
760
        # we dont care if we got more than that.
778
761
 
779
762
        # test all lines
780
 
        lines = iter_with_versions(None, [('Walking content', 0, 5),
781
 
                                          ('Walking content', 1, 5),
782
 
                                          ('Walking content', 2, 5),
783
 
                                          ('Walking content', 3, 5),
784
 
                                          ('Walking content', 4, 5),
785
 
                                          ('Walking content', 5, 5)])
 
763
        lines = iter_with_versions(None, [('Walking content.', 0, 5),
 
764
                                          ('Walking content.', 1, 5),
 
765
                                          ('Walking content.', 2, 5),
 
766
                                          ('Walking content.', 3, 5),
 
767
                                          ('Walking content.', 4, 5),
 
768
                                          ('Walking content.', 5, 5)])
786
769
        # all lines must be seen at least once
787
770
        self.assertTrue(lines[('base\n', 'base')] > 0)
788
771
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
1510
1493
        """Each parameterised test can be constructed on a transport."""
1511
1494
        files = self.get_versionedfiles()
1512
1495
 
1513
 
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1514
 
        nokeys=False):
 
1496
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1515
1497
        return get_diamond_files(files, self.key_length,
1516
1498
            trailing_eol=trailing_eol, nograph=not self.graph,
1517
 
            left_only=left_only, nokeys=nokeys)
1518
 
 
1519
 
    def test_add_lines_nostoresha(self):
1520
 
        """When nostore_sha is supplied using old content raises."""
1521
 
        vf = self.get_versionedfiles()
1522
 
        empty_text = ('a', [])
1523
 
        sample_text_nl = ('b', ["foo\n", "bar\n"])
1524
 
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1525
 
        shas = []
1526
 
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1527
 
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1528
 
            shas.append(sha)
1529
 
        # we now have a copy of all the lines in the vf.
1530
 
        for sha, (version, lines) in zip(
1531
 
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1532
 
            new_key = self.get_simple_key(version + "2")
1533
 
            self.assertRaises(errors.ExistingContent,
1534
 
                vf.add_lines, new_key, [], lines,
1535
 
                nostore_sha=sha)
1536
 
            # and no new version should have been added.
1537
 
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1538
 
            self.assertEqual('absent', record.storage_kind)
 
1499
            left_only=left_only)
1539
1500
 
1540
1501
    def test_add_lines_return(self):
1541
1502
        files = self.get_versionedfiles()
1568
1529
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1569
1530
                results)
1570
1531
 
1571
 
    def test_add_lines_no_key_generates_chk_key(self):
1572
 
        files = self.get_versionedfiles()
1573
 
        # save code by using the stock data insertion helper.
1574
 
        adds = self.get_diamond_files(files, nokeys=True)
1575
 
        results = []
1576
 
        # We can only validate the first 2 elements returned from add_lines.
1577
 
        for add in adds:
1578
 
            self.assertEqual(3, len(add))
1579
 
            results.append(add[:2])
1580
 
        if self.key_length == 1:
1581
 
            self.assertEqual([
1582
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1583
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1584
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1585
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1586
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1587
 
                results)
1588
 
            # Check the added items got CHK keys.
1589
 
            self.assertEqual(set([
1590
 
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1591
 
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1592
 
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1593
 
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1594
 
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1595
 
                ]),
1596
 
                files.keys())
1597
 
        elif self.key_length == 2:
1598
 
            self.assertEqual([
1599
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1600
 
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1601
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1602
 
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1603
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1604
 
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1605
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1606
 
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1607
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1608
 
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1609
 
                results)
1610
 
            # Check the added items got CHK keys.
1611
 
            self.assertEqual(set([
1612
 
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1613
 
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1614
 
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1615
 
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1616
 
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1617
 
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1618
 
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1619
 
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1620
 
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1621
 
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1622
 
                ]),
1623
 
                files.keys())
1624
 
 
1625
1532
    def test_empty_lines(self):
1626
1533
        """Empty files can be stored."""
1627
1534
        f = self.get_versionedfiles()
1662
1569
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1663
1570
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1664
1571
             'knit-delta-gz',
1665
 
             'knit-delta-closure', 'knit-delta-closure-ref',
1666
 
             'groupcompress-block', 'groupcompress-block-ref'])
 
1572
             'knit-delta-closure', 'knit-delta-closure-ref'])
1667
1573
 
1668
1574
    def capture_stream(self, f, entries, on_seen, parents):
1669
1575
        """Capture a stream for testing."""
1670
1576
        for factory in entries:
1671
1577
            on_seen(factory.key)
1672
1578
            self.assertValidStorageKind(factory.storage_kind)
1673
 
            if factory.sha1 is not None:
1674
 
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1675
 
                    factory.sha1)
 
1579
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
 
1580
                factory.sha1)
1676
1581
            self.assertEqual(parents[factory.key], factory.parents)
1677
1582
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1678
1583
                str)
1715
1620
                }
1716
1621
        return keys, sort_order
1717
1622
 
1718
 
    def get_keys_and_groupcompress_sort_order(self):
1719
 
        """Get diamond test keys list, and their groupcompress sort ordering."""
1720
 
        if self.key_length == 1:
1721
 
            keys = [('merged',), ('left',), ('right',), ('base',)]
1722
 
            sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1723
 
        else:
1724
 
            keys = [
1725
 
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1726
 
                ('FileA', 'base'),
1727
 
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1728
 
                ('FileB', 'base'),
1729
 
                ]
1730
 
            sort_order = {
1731
 
                ('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1732
 
                ('FileA', 'base'):2,
1733
 
                ('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1734
 
                ('FileB', 'base'):5,
1735
 
                }
1736
 
        return keys, sort_order
1737
 
 
1738
1623
    def test_get_record_stream_interface_ordered(self):
1739
1624
        """each item in a stream has to provide a regular interface."""
1740
1625
        files = self.get_versionedfiles()
1768
1653
 
1769
1654
        self.assertStreamOrder(sort_order, seen, keys)
1770
1655
 
1771
 
    def test_get_record_stream_interface_groupcompress(self):
1772
 
        """each item in a stream has to provide a regular interface."""
1773
 
        files = self.get_versionedfiles()
1774
 
        self.get_diamond_files(files)
1775
 
        keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1776
 
        parent_map = files.get_parent_map(keys)
1777
 
        entries = files.get_record_stream(keys, 'groupcompress', False)
1778
 
        seen = []
1779
 
        self.capture_stream(files, entries, seen.append, parent_map)
1780
 
        self.assertStreamOrder(sort_order, seen, keys)
1781
 
 
1782
1656
    def assertStreamOrder(self, sort_order, seen, keys):
1783
1657
        self.assertEqual(len(set(seen)), len(keys))
1784
1658
        if self.key_length == 1:
1815
1689
        for factory in entries:
1816
1690
            seen.add(factory.key)
1817
1691
            self.assertValidStorageKind(factory.storage_kind)
1818
 
            if factory.sha1 is not None:
1819
 
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1820
 
                                 factory.sha1)
 
1692
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1693
                factory.sha1)
1821
1694
            self.assertEqual(parent_map[factory.key], factory.parents)
1822
1695
            # currently no stream emits mpdiff
1823
1696
            self.assertRaises(errors.UnavailableRepresentation,
2021
1894
                self.assertEqual(None, factory.parents)
2022
1895
            else:
2023
1896
                self.assertValidStorageKind(factory.storage_kind)
2024
 
                if factory.sha1 is not None:
2025
 
                    sha1 = files.get_sha1s([factory.key])[factory.key]
2026
 
                    self.assertEqual(sha1, factory.sha1)
 
1897
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
 
1898
                    factory.sha1)
2027
1899
                self.assertEqual(parents[factory.key], factory.parents)
2028
1900
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2029
1901
                    str)
2385
2257
            return lines
2386
2258
        lines = iter_with_keys(
2387
2259
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
2388
 
            [('Walking content', 0, 2),
2389
 
             ('Walking content', 1, 2),
2390
 
             ('Walking content', 2, 2)])
 
2260
            [('Walking content.', 0, 2),
 
2261
             ('Walking content.', 1, 2),
 
2262
             ('Walking content.', 2, 2)])
2391
2263
        # we must see child and otherchild
2392
2264
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2393
2265
        self.assertTrue(
2396
2268
 
2397
2269
        # test all lines
2398
2270
        lines = iter_with_keys(files.keys(),
2399
 
            [('Walking content', 0, 5),
2400
 
             ('Walking content', 1, 5),
2401
 
             ('Walking content', 2, 5),
2402
 
             ('Walking content', 3, 5),
2403
 
             ('Walking content', 4, 5),
2404
 
             ('Walking content', 5, 5)])
 
2271
            [('Walking content.', 0, 5),
 
2272
             ('Walking content.', 1, 5),
 
2273
             ('Walking content.', 2, 5),
 
2274
             ('Walking content.', 3, 5),
 
2275
             ('Walking content.', 4, 5),
 
2276
             ('Walking content.', 5, 5)])
2405
2277
        # all lines must be seen at least once
2406
2278
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2407
2279
        self.assertTrue(