~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/per_versionedfile.py

  • Committer: Patch Queue Manager
  • Date: 2016-04-21 04:10:52 UTC
  • mfrom: (6616.1.1 fix-en-user-guide)
  • Revision ID: pqm@pqm.ubuntu.com-20160421041052-clcye7ns1qcl2n7w
(richard-wilbur) Ensure build of English use guide always uses English text
 even when user's locale specifies a different language. (Jelmer Vernooij)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
23
23
 
 
24
from gzip import GzipFile
24
25
from itertools import chain, izip
25
26
from StringIO import StringIO
26
27
 
31
32
    knit as _mod_knit,
32
33
    osutils,
33
34
    progress,
 
35
    transport,
34
36
    ui,
35
37
    )
36
38
from bzrlib.errors import (
37
39
                           RevisionNotPresent,
38
40
                           RevisionAlreadyPresent,
39
 
                           WeaveParentMismatch
40
41
                           )
41
42
from bzrlib.knit import (
42
43
    cleanup_pack_knit,
43
44
    make_file_factory,
44
45
    make_pack_factory,
45
 
    KnitAnnotateFactory,
46
 
    KnitPlainFactory,
47
46
    )
48
47
from bzrlib.tests import (
49
48
    TestCase,
50
49
    TestCaseWithMemoryTransport,
51
50
    TestNotApplicable,
52
51
    TestSkipped,
53
 
    condition_isinstance,
54
 
    split_suite_by_condition,
55
 
    multiply_tests,
56
52
    )
57
53
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
 
from bzrlib.trace import mutter
59
 
from bzrlib.transport import get_transport
60
54
from bzrlib.transport.memory import MemoryTransport
61
 
from bzrlib.tsort import topo_sort
62
 
from bzrlib.tuned_gzip import GzipFile
63
55
import bzrlib.versionedfile as versionedfile
64
56
from bzrlib.versionedfile import (
65
57
    ConstantMapper,
69
61
    make_versioned_files_factory,
70
62
    )
71
63
from bzrlib.weave import WeaveFile
72
 
from bzrlib.weavefile import read_weave, write_weave
73
 
 
74
 
 
75
 
def load_tests(standard_tests, module, loader):
76
 
    """Parameterize VersionedFiles tests for different implementations."""
77
 
    to_adapt, result = split_suite_by_condition(
78
 
        standard_tests, condition_isinstance(TestVersionedFiles))
79
 
    # We want to be sure of behaviour for:
80
 
    # weaves prefix layout (weave texts)
81
 
    # individually named weaves (weave inventories)
82
 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
83
 
    #                   as it is the most complex mapper.
84
 
    # individually named knits
85
 
    # individual no-graph knits in packs (signatures)
86
 
    # individual graph knits in packs (inventories)
87
 
    # individual graph nocompression knits in packs (revisions)
88
 
    # plain text knits in packs (texts)
89
 
    len_one_scenarios = [
90
 
        ('weave-named', {
91
 
            'cleanup':None,
92
 
            'factory':make_versioned_files_factory(WeaveFile,
93
 
                ConstantMapper('inventory')),
94
 
            'graph':True,
95
 
            'key_length':1,
96
 
            'support_partial_insertion': False,
97
 
            }),
98
 
        ('named-knit', {
99
 
            'cleanup':None,
100
 
            'factory':make_file_factory(False, ConstantMapper('revisions')),
101
 
            'graph':True,
102
 
            'key_length':1,
103
 
            'support_partial_insertion': False,
104
 
            }),
105
 
        ('named-nograph-nodelta-knit-pack', {
106
 
            'cleanup':cleanup_pack_knit,
107
 
            'factory':make_pack_factory(False, False, 1),
108
 
            'graph':False,
109
 
            'key_length':1,
110
 
            'support_partial_insertion': False,
111
 
            }),
112
 
        ('named-graph-knit-pack', {
113
 
            'cleanup':cleanup_pack_knit,
114
 
            'factory':make_pack_factory(True, True, 1),
115
 
            'graph':True,
116
 
            'key_length':1,
117
 
            'support_partial_insertion': True,
118
 
            }),
119
 
        ('named-graph-nodelta-knit-pack', {
120
 
            'cleanup':cleanup_pack_knit,
121
 
            'factory':make_pack_factory(True, False, 1),
122
 
            'graph':True,
123
 
            'key_length':1,
124
 
            'support_partial_insertion': False,
125
 
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
 
        ]
134
 
    len_two_scenarios = [
135
 
        ('weave-prefix', {
136
 
            'cleanup':None,
137
 
            'factory':make_versioned_files_factory(WeaveFile,
138
 
                PrefixMapper()),
139
 
            'graph':True,
140
 
            'key_length':2,
141
 
            'support_partial_insertion': False,
142
 
            }),
143
 
        ('annotated-knit-escape', {
144
 
            'cleanup':None,
145
 
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
 
            'graph':True,
147
 
            'key_length':2,
148
 
            'support_partial_insertion': False,
149
 
            }),
150
 
        ('plain-knit-pack', {
151
 
            'cleanup':cleanup_pack_knit,
152
 
            'factory':make_pack_factory(True, True, 2),
153
 
            'graph':True,
154
 
            'key_length':2,
155
 
            'support_partial_insertion': True,
156
 
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
 
        ]
165
 
    scenarios = len_one_scenarios + len_two_scenarios
166
 
    return multiply_tests(to_adapt, scenarios, result)
 
64
from bzrlib.weavefile import write_weave
 
65
from bzrlib.tests.scenarios import load_tests_apply_scenarios
 
66
 
 
67
 
 
68
load_tests = load_tests_apply_scenarios
167
69
 
168
70
 
169
71
def get_diamond_vf(f, trailing_eol=True, left_only=False):
280
182
            versions = f.versions()
281
183
            self.assertTrue('r0' in versions)
282
184
            self.assertTrue('r1' in versions)
283
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
 
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
 
185
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
186
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
 
187
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
286
188
            self.assertEqual(2, len(f))
287
189
            self.assertEqual(2, f.num_versions())
288
190
 
314
216
            self.assertTrue('r0' in versions)
315
217
            self.assertTrue('r1' in versions)
316
218
            self.assertTrue('r2' in versions)
317
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
 
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
 
219
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
220
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
 
221
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
222
            self.assertEqual(3, f.num_versions())
321
223
            origins = f.annotate('r1')
322
 
            self.assertEquals(origins[0][0], 'r0')
323
 
            self.assertEquals(origins[1][0], 'r1')
 
224
            self.assertEqual(origins[0][0], 'r0')
 
225
            self.assertEqual(origins[1][0], 'r1')
324
226
            origins = f.annotate('r2')
325
 
            self.assertEquals(origins[0][0], 'r1')
326
 
            self.assertEquals(origins[1][0], 'r2')
 
227
            self.assertEqual(origins[0][0], 'r1')
 
228
            self.assertEqual(origins[1][0], 'r2')
327
229
 
328
230
        verify_file(f)
329
231
        f = self.reopen_file()
693
595
        f.add_lines('r0', [], ['a\n', 'b\n'])
694
596
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
597
        origins = f.annotate('r1')
696
 
        self.assertEquals(origins[0][0], 'r1')
697
 
        self.assertEquals(origins[1][0], 'r0')
 
598
        self.assertEqual(origins[0][0], 'r1')
 
599
        self.assertEqual(origins[1][0], 'r0')
698
600
 
699
601
        self.assertRaises(RevisionNotPresent,
700
602
            f.annotate, 'foo')
844
746
                                 ['base', 'a_ghost'],
845
747
                                 ['line\n', 'line_b\n', 'line_c\n'])
846
748
        origins = vf.annotate('references_ghost')
847
 
        self.assertEquals(('base', 'line\n'), origins[0])
848
 
        self.assertEquals(('base', 'line_b\n'), origins[1])
849
 
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
 
749
        self.assertEqual(('base', 'line\n'), origins[0])
 
750
        self.assertEqual(('base', 'line_b\n'), origins[1])
 
751
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
850
752
 
851
753
    def test_readonly_mode(self):
852
 
        transport = get_transport(self.get_url('.'))
 
754
        t = self.get_transport()
853
755
        factory = self.get_factory()
854
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', transport, access_mode='r')
 
756
        vf = factory('id', t, 0777, create=True, access_mode='w')
 
757
        vf = factory('id', t, access_mode='r')
856
758
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
759
        self.assertRaises(errors.ReadOnlyError,
858
760
                          vf.add_lines_with_ghosts,
880
782
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
783
 
882
784
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
 
            get_scope=self.get_transaction)
 
785
        return WeaveFile(name, self.get_transport(),
 
786
                         create=True,
 
787
                         get_scope=self.get_transaction)
885
788
 
886
789
    def get_file_corrupted_text(self):
887
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
 
            get_scope=self.get_transaction)
 
790
        w = WeaveFile('foo', self.get_transport(),
 
791
                      create=True,
 
792
                      get_scope=self.get_transaction)
889
793
        w.add_lines('v1', [], ['hello\n'])
890
794
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
795
 
919
823
        return w
920
824
 
921
825
    def reopen_file(self, name='foo', create=False):
922
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
 
            get_scope=self.get_transaction)
 
826
        return WeaveFile(name, self.get_transport(),
 
827
                         create=create,
 
828
                         get_scope=self.get_transaction)
924
829
 
925
830
    def test_no_implicit_create(self):
926
831
        self.assertRaises(errors.NoSuchFile,
927
832
                          WeaveFile,
928
833
                          'foo',
929
 
                          get_transport(self.get_url('.')),
 
834
                          self.get_transport(),
930
835
                          get_scope=self.get_transaction)
931
836
 
932
837
    def get_factory(self):
936
841
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
937
842
 
938
843
    def setUp(self):
939
 
        TestCaseWithMemoryTransport.setUp(self)
 
844
        super(TestPlanMergeVersionedFile, self).setUp()
940
845
        mapper = PrefixMapper()
941
846
        factory = make_file_factory(True, mapper)
942
847
        self.vf1 = factory(self.get_transport('root-1'))
999
904
        # we should be able to read from http with a versioned file.
1000
905
        vf = self.get_file()
1001
906
        # try an empty file access
1002
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
907
        readonly_vf = self.get_factory()('foo',
 
908
            transport.get_transport_from_url(self.get_readonly_url('.')))
1003
909
        self.assertEqual([], readonly_vf.versions())
 
910
 
 
911
    def test_readonly_http_works_with_feeling(self):
 
912
        # we should be able to read from http with a versioned file.
 
913
        vf = self.get_file()
1004
914
        # now with feeling.
1005
915
        vf.add_lines('1', [], ['a\n'])
1006
916
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
917
        readonly_vf = self.get_factory()('foo',
 
918
            transport.get_transport_from_url(self.get_readonly_url('.')))
1008
919
        self.assertEqual(['1', '2'], vf.versions())
 
920
        self.assertEqual(['1', '2'], readonly_vf.versions())
1009
921
        for version in readonly_vf.versions():
1010
922
            readonly_vf.get_lines(version)
1011
923
 
1013
925
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1014
926
 
1015
927
    def get_file(self):
1016
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
 
            get_scope=self.get_transaction)
 
928
        return WeaveFile('foo', self.get_transport(),
 
929
                         create=True,
 
930
                         get_scope=self.get_transaction)
1018
931
 
1019
932
    def get_factory(self):
1020
933
        return WeaveFile
1264
1177
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
1178
 
1266
1179
    def get_file(self, name='foo'):
1267
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1180
        return WeaveFile(name, self.get_transport(),
 
1181
                         create=True)
1268
1182
 
1269
1183
    def log_contents(self, w):
1270
1184
        self.log('weave is:')
1463
1377
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1378
    """Tests for the multiple-file variant of VersionedFile."""
1465
1379
 
 
1380
    # We want to be sure of behaviour for:
 
1381
    # weaves prefix layout (weave texts)
 
1382
    # individually named weaves (weave inventories)
 
1383
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 
1384
    #                   as it is the most complex mapper.
 
1385
    # individually named knits
 
1386
    # individual no-graph knits in packs (signatures)
 
1387
    # individual graph knits in packs (inventories)
 
1388
    # individual graph nocompression knits in packs (revisions)
 
1389
    # plain text knits in packs (texts)
 
1390
    len_one_scenarios = [
 
1391
        ('weave-named', {
 
1392
            'cleanup':None,
 
1393
            'factory':make_versioned_files_factory(WeaveFile,
 
1394
                ConstantMapper('inventory')),
 
1395
            'graph':True,
 
1396
            'key_length':1,
 
1397
            'support_partial_insertion': False,
 
1398
            }),
 
1399
        ('named-knit', {
 
1400
            'cleanup':None,
 
1401
            'factory':make_file_factory(False, ConstantMapper('revisions')),
 
1402
            'graph':True,
 
1403
            'key_length':1,
 
1404
            'support_partial_insertion': False,
 
1405
            }),
 
1406
        ('named-nograph-nodelta-knit-pack', {
 
1407
            'cleanup':cleanup_pack_knit,
 
1408
            'factory':make_pack_factory(False, False, 1),
 
1409
            'graph':False,
 
1410
            'key_length':1,
 
1411
            'support_partial_insertion': False,
 
1412
            }),
 
1413
        ('named-graph-knit-pack', {
 
1414
            'cleanup':cleanup_pack_knit,
 
1415
            'factory':make_pack_factory(True, True, 1),
 
1416
            'graph':True,
 
1417
            'key_length':1,
 
1418
            'support_partial_insertion': True,
 
1419
            }),
 
1420
        ('named-graph-nodelta-knit-pack', {
 
1421
            'cleanup':cleanup_pack_knit,
 
1422
            'factory':make_pack_factory(True, False, 1),
 
1423
            'graph':True,
 
1424
            'key_length':1,
 
1425
            'support_partial_insertion': False,
 
1426
            }),
 
1427
        ('groupcompress-nograph', {
 
1428
            'cleanup':groupcompress.cleanup_pack_group,
 
1429
            'factory':groupcompress.make_pack_factory(False, False, 1),
 
1430
            'graph': False,
 
1431
            'key_length':1,
 
1432
            'support_partial_insertion':False,
 
1433
            }),
 
1434
        ]
 
1435
    len_two_scenarios = [
 
1436
        ('weave-prefix', {
 
1437
            'cleanup':None,
 
1438
            'factory':make_versioned_files_factory(WeaveFile,
 
1439
                PrefixMapper()),
 
1440
            'graph':True,
 
1441
            'key_length':2,
 
1442
            'support_partial_insertion': False,
 
1443
            }),
 
1444
        ('annotated-knit-escape', {
 
1445
            'cleanup':None,
 
1446
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
 
1447
            'graph':True,
 
1448
            'key_length':2,
 
1449
            'support_partial_insertion': False,
 
1450
            }),
 
1451
        ('plain-knit-pack', {
 
1452
            'cleanup':cleanup_pack_knit,
 
1453
            'factory':make_pack_factory(True, True, 2),
 
1454
            'graph':True,
 
1455
            'key_length':2,
 
1456
            'support_partial_insertion': True,
 
1457
            }),
 
1458
        ('groupcompress', {
 
1459
            'cleanup':groupcompress.cleanup_pack_group,
 
1460
            'factory':groupcompress.make_pack_factory(True, False, 1),
 
1461
            'graph': True,
 
1462
            'key_length':1,
 
1463
            'support_partial_insertion':False,
 
1464
            }),
 
1465
        ]
 
1466
 
 
1467
    scenarios = len_one_scenarios + len_two_scenarios
 
1468
 
1466
1469
    def get_versionedfiles(self, relpath='files'):
1467
1470
        transport = self.get_transport(relpath)
1468
1471
        if relpath != '.':
1479
1482
        else:
1480
1483
            return ('FileA',) + (suffix,)
1481
1484
 
 
1485
    def test_add_fallback_implies_without_fallbacks(self):
 
1486
        f = self.get_versionedfiles('files')
 
1487
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1488
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1489
                                    % (f.__class__.__name__,))
 
1490
        g = self.get_versionedfiles('fallback')
 
1491
        key_a = self.get_simple_key('a')
 
1492
        g.add_lines(key_a, [], ['\n'])
 
1493
        f.add_fallback_versioned_files(g)
 
1494
        self.assertTrue(key_a in f.get_parent_map([key_a]))
 
1495
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1496
 
1482
1497
    def test_add_lines(self):
1483
1498
        f = self.get_versionedfiles()
1484
1499
        key0 = self.get_simple_key('r0')
2731
2746
        return ret
2732
2747
 
2733
2748
    def setUp(self):
2734
 
        TestCase.setUp(self)
 
2749
        super(VirtualVersionedFilesTests, self).setUp()
2735
2750
        self._lines = {}
2736
2751
        self._parent_map = {}
2737
2752
        self.texts = VirtualVersionedFiles(self._get_parent_map,
2753
2768
                          [])
2754
2769
 
2755
2770
    def test_get_sha1s_nonexistent(self):
2756
 
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
 
2771
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2757
2772
 
2758
2773
    def test_get_sha1s(self):
2759
2774
        self._lines["key"] = ["dataline1", "dataline2"]
2760
 
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
 
2775
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2776
                           self.texts.get_sha1s([("key",)]))
2762
2777
 
2763
2778
    def test_get_parent_map(self):
2764
2779
        self._parent_map = {"G": ("A", "B")}
2765
 
        self.assertEquals({("G",): (("A",),("B",))},
 
2780
        self.assertEqual({("G",): (("A",),("B",))},
2766
2781
                          self.texts.get_parent_map([("G",), ("L",)]))
2767
2782
 
2768
2783
    def test_get_record_stream(self):
2769
2784
        self._lines["A"] = ["FOO", "BAR"]
2770
2785
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2771
2786
        record = it.next()
2772
 
        self.assertEquals("chunked", record.storage_kind)
2773
 
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
 
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
 
2787
        self.assertEqual("chunked", record.storage_kind)
 
2788
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
 
2789
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2775
2790
 
2776
2791
    def test_get_record_stream_absent(self):
2777
2792
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2778
2793
        record = it.next()
2779
 
        self.assertEquals("absent", record.storage_kind)
 
2794
        self.assertEqual("absent", record.storage_kind)
2780
2795
 
2781
2796
    def test_iter_lines_added_or_present_in_keys(self):
2782
2797
        self._lines["A"] = ["FOO", "BAR"]
2783
2798
        self._lines["B"] = ["HEY"]
2784
2799
        self._lines["C"] = ["Alberta"]
2785
2800
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
 
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
 
2801
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2802
            sorted(list(it)))
2788
2803
 
2789
2804