36
38
from bzrlib.errors import (
37
39
RevisionNotPresent,
38
40
RevisionAlreadyPresent,
41
42
from bzrlib.knit import (
48
47
from bzrlib.tests import (
50
49
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
53
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
54
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
55
import bzrlib.versionedfile as versionedfile
64
56
from bzrlib.versionedfile import (
69
61
make_versioned_files_factory,
71
63
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
64
from bzrlib.weavefile import write_weave
65
from bzrlib.tests.scenarios import load_tests_apply_scenarios
68
load_tests = load_tests_apply_scenarios
169
71
def get_diamond_vf(f, trailing_eol=True, left_only=False):
280
182
versions = f.versions()
281
183
self.assertTrue('r0' in versions)
282
184
self.assertTrue('r1' in versions)
283
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
185
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
186
self.assertEqual(f.get_text('r0'), 'a\nb\n')
187
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
286
188
self.assertEqual(2, len(f))
287
189
self.assertEqual(2, f.num_versions())
314
216
self.assertTrue('r0' in versions)
315
217
self.assertTrue('r1' in versions)
316
218
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
219
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
220
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
221
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
222
self.assertEqual(3, f.num_versions())
321
223
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
224
self.assertEqual(origins[0][0], 'r0')
225
self.assertEqual(origins[1][0], 'r1')
324
226
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
227
self.assertEqual(origins[0][0], 'r1')
228
self.assertEqual(origins[1][0], 'r2')
329
231
f = self.reopen_file()
844
746
['base', 'a_ghost'],
845
747
['line\n', 'line_b\n', 'line_c\n'])
846
748
origins = vf.annotate('references_ghost')
847
self.assertEquals(('base', 'line\n'), origins[0])
848
self.assertEquals(('base', 'line_b\n'), origins[1])
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
749
self.assertEqual(('base', 'line\n'), origins[0])
750
self.assertEqual(('base', 'line_b\n'), origins[1])
751
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
851
753
def test_readonly_mode(self):
852
transport = get_transport(self.get_url('.'))
754
t = self.get_transport()
853
755
factory = self.get_factory()
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
756
vf = factory('id', t, 0777, create=True, access_mode='w')
757
vf = factory('id', t, access_mode='r')
856
758
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
759
self.assertRaises(errors.ReadOnlyError,
858
760
vf.add_lines_with_ghosts,
880
782
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
784
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
785
return WeaveFile(name, self.get_transport(),
787
get_scope=self.get_transaction)
886
789
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
790
w = WeaveFile('foo', self.get_transport(),
792
get_scope=self.get_transaction)
889
793
w.add_lines('v1', [], ['hello\n'])
890
794
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
921
825
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
826
return WeaveFile(name, self.get_transport(),
828
get_scope=self.get_transaction)
925
830
def test_no_implicit_create(self):
926
831
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
834
self.get_transport(),
930
835
get_scope=self.get_transaction)
932
837
def get_factory(self):
999
904
# we should be able to read from http with a versioned file.
1000
905
vf = self.get_file()
1001
906
# try an empty file access
1002
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
907
readonly_vf = self.get_factory()('foo',
908
transport.get_transport_from_url(self.get_readonly_url('.')))
1003
909
self.assertEqual([], readonly_vf.versions())
911
def test_readonly_http_works_with_feeling(self):
912
# we should be able to read from http with a versioned file.
1004
914
# now with feeling.
1005
915
vf.add_lines('1', [], ['a\n'])
1006
916
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
917
readonly_vf = self.get_factory()('foo',
918
transport.get_transport_from_url(self.get_readonly_url('.')))
1008
919
self.assertEqual(['1', '2'], vf.versions())
920
self.assertEqual(['1', '2'], readonly_vf.versions())
1009
921
for version in readonly_vf.versions():
1010
922
readonly_vf.get_lines(version)
1463
1377
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1378
"""Tests for the multiple-file variant of VersionedFile."""
1380
# We want to be sure of behaviour for:
1381
# weaves prefix layout (weave texts)
1382
# individually named weaves (weave inventories)
1383
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1384
# as it is the most complex mapper.
1385
# individually named knits
1386
# individual no-graph knits in packs (signatures)
1387
# individual graph knits in packs (inventories)
1388
# individual graph nocompression knits in packs (revisions)
1389
# plain text knits in packs (texts)
1390
len_one_scenarios = [
1393
'factory':make_versioned_files_factory(WeaveFile,
1394
ConstantMapper('inventory')),
1397
'support_partial_insertion': False,
1401
'factory':make_file_factory(False, ConstantMapper('revisions')),
1404
'support_partial_insertion': False,
1406
('named-nograph-nodelta-knit-pack', {
1407
'cleanup':cleanup_pack_knit,
1408
'factory':make_pack_factory(False, False, 1),
1411
'support_partial_insertion': False,
1413
('named-graph-knit-pack', {
1414
'cleanup':cleanup_pack_knit,
1415
'factory':make_pack_factory(True, True, 1),
1418
'support_partial_insertion': True,
1420
('named-graph-nodelta-knit-pack', {
1421
'cleanup':cleanup_pack_knit,
1422
'factory':make_pack_factory(True, False, 1),
1425
'support_partial_insertion': False,
1427
('groupcompress-nograph', {
1428
'cleanup':groupcompress.cleanup_pack_group,
1429
'factory':groupcompress.make_pack_factory(False, False, 1),
1432
'support_partial_insertion':False,
1435
len_two_scenarios = [
1438
'factory':make_versioned_files_factory(WeaveFile,
1442
'support_partial_insertion': False,
1444
('annotated-knit-escape', {
1446
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1449
'support_partial_insertion': False,
1451
('plain-knit-pack', {
1452
'cleanup':cleanup_pack_knit,
1453
'factory':make_pack_factory(True, True, 2),
1456
'support_partial_insertion': True,
1459
'cleanup':groupcompress.cleanup_pack_group,
1460
'factory':groupcompress.make_pack_factory(True, False, 1),
1463
'support_partial_insertion':False,
1467
scenarios = len_one_scenarios + len_two_scenarios
1466
1469
def get_versionedfiles(self, relpath='files'):
1467
1470
transport = self.get_transport(relpath)
1468
1471
if relpath != '.':
1480
1483
return ('FileA',) + (suffix,)
1485
def test_add_fallback_implies_without_fallbacks(self):
1486
f = self.get_versionedfiles('files')
1487
if getattr(f, 'add_fallback_versioned_files', None) is None:
1488
raise TestNotApplicable("%s doesn't support fallbacks"
1489
% (f.__class__.__name__,))
1490
g = self.get_versionedfiles('fallback')
1491
key_a = self.get_simple_key('a')
1492
g.add_lines(key_a, [], ['\n'])
1493
f.add_fallback_versioned_files(g)
1494
self.assertTrue(key_a in f.get_parent_map([key_a]))
1495
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1482
1497
def test_add_lines(self):
1483
1498
f = self.get_versionedfiles()
1484
1499
key0 = self.get_simple_key('r0')
2755
2770
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2771
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2758
2773
def test_get_sha1s(self):
2759
2774
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2775
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2776
self.texts.get_sha1s([("key",)]))
2763
2778
def test_get_parent_map(self):
2764
2779
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2780
self.assertEqual({("G",): (("A",),("B",))},
2766
2781
self.texts.get_parent_map([("G",), ("L",)]))
2768
2783
def test_get_record_stream(self):
2769
2784
self._lines["A"] = ["FOO", "BAR"]
2770
2785
it = self.texts.get_record_stream([("A",)], "unordered", True)
2771
2786
record = it.next()
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2787
self.assertEqual("chunked", record.storage_kind)
2788
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2789
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2776
2791
def test_get_record_stream_absent(self):
2777
2792
it = self.texts.get_record_stream([("A",)], "unordered", True)
2778
2793
record = it.next()
2779
self.assertEquals("absent", record.storage_kind)
2794
self.assertEqual("absent", record.storage_kind)
2781
2796
def test_iter_lines_added_or_present_in_keys(self):
2782
2797
self._lines["A"] = ["FOO", "BAR"]
2783
2798
self._lines["B"] = ["HEY"]
2784
2799
self._lines["C"] = ["Alberta"]
2785
2800
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2801
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2802
sorted(list(it)))