21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from gzip import GzipFile
25
24
from itertools import chain, izip
26
25
from StringIO import StringIO
28
27
from bzrlib import (
38
35
from bzrlib.errors import (
39
36
RevisionNotPresent,
40
37
RevisionAlreadyPresent,
42
40
from bzrlib.knit import (
47
47
from bzrlib.tests import (
49
49
TestCaseWithMemoryTransport,
53
split_suite_by_condition,
53
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
57
from bzrlib.trace import mutter
58
from bzrlib.transport import get_transport
54
59
from bzrlib.transport.memory import MemoryTransport
60
from bzrlib.tsort import topo_sort
61
from bzrlib.tuned_gzip import GzipFile
55
62
import bzrlib.versionedfile as versionedfile
56
63
from bzrlib.versionedfile import (
61
68
make_versioned_files_factory,
63
70
from bzrlib.weave import WeaveFile
64
from bzrlib.weavefile import write_weave
65
from bzrlib.tests.scenarios import load_tests_apply_scenarios
68
load_tests = load_tests_apply_scenarios
71
from bzrlib.weavefile import read_weave, write_weave
74
def load_tests(standard_tests, module, loader):
75
"""Parameterize VersionedFiles tests for different implementations."""
76
to_adapt, result = split_suite_by_condition(
77
standard_tests, condition_isinstance(TestVersionedFiles))
78
# We want to be sure of behaviour for:
79
# weaves prefix layout (weave texts)
80
# individually named weaves (weave inventories)
81
# annotated knits - prefix|hash|hash-escape layout, we test the third only
82
# as it is the most complex mapper.
83
# individually named knits
84
# individual no-graph knits in packs (signatures)
85
# individual graph knits in packs (inventories)
86
# individual graph nocompression knits in packs (revisions)
87
# plain text knits in packs (texts)
91
'factory':make_versioned_files_factory(WeaveFile,
92
ConstantMapper('inventory')),
95
'support_partial_insertion': False,
99
'factory':make_file_factory(False, ConstantMapper('revisions')),
102
'support_partial_insertion': False,
104
('named-nograph-nodelta-knit-pack', {
105
'cleanup':cleanup_pack_knit,
106
'factory':make_pack_factory(False, False, 1),
109
'support_partial_insertion': False,
111
('named-graph-knit-pack', {
112
'cleanup':cleanup_pack_knit,
113
'factory':make_pack_factory(True, True, 1),
116
'support_partial_insertion': True,
118
('named-graph-nodelta-knit-pack', {
119
'cleanup':cleanup_pack_knit,
120
'factory':make_pack_factory(True, False, 1),
123
'support_partial_insertion': False,
125
('groupcompress-nograph', {
126
'cleanup':groupcompress.cleanup_pack_group,
127
'factory':groupcompress.make_pack_factory(False, False, 1),
130
'support_partial_insertion':False,
133
len_two_scenarios = [
136
'factory':make_versioned_files_factory(WeaveFile,
140
'support_partial_insertion': False,
142
('annotated-knit-escape', {
144
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
147
'support_partial_insertion': False,
149
('plain-knit-pack', {
150
'cleanup':cleanup_pack_knit,
151
'factory':make_pack_factory(True, True, 2),
154
'support_partial_insertion': True,
157
'cleanup':groupcompress.cleanup_pack_group,
158
'factory':groupcompress.make_pack_factory(True, False, 1),
161
'support_partial_insertion':False,
164
scenarios = len_one_scenarios + len_two_scenarios
165
return multiply_tests(to_adapt, scenarios, result)
71
168
def get_diamond_vf(f, trailing_eol=True, left_only=False):
751
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
753
851
def test_readonly_mode(self):
754
t = self.get_transport()
852
transport = get_transport(self.get_url('.'))
755
853
factory = self.get_factory()
756
vf = factory('id', t, 0777, create=True, access_mode='w')
757
vf = factory('id', t, access_mode='r')
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
758
856
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
759
857
self.assertRaises(errors.ReadOnlyError,
760
858
vf.add_lines_with_ghosts,
782
880
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
784
882
def get_file(self, name='foo'):
785
return WeaveFile(name, self.get_transport(),
787
get_scope=self.get_transaction)
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
789
886
def get_file_corrupted_text(self):
790
w = WeaveFile('foo', self.get_transport(),
792
get_scope=self.get_transaction)
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
793
889
w.add_lines('v1', [], ['hello\n'])
794
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
825
921
def reopen_file(self, name='foo', create=False):
826
return WeaveFile(name, self.get_transport(),
828
get_scope=self.get_transaction)
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
830
925
def test_no_implicit_create(self):
831
926
self.assertRaises(errors.NoSuchFile,
834
self.get_transport(),
929
get_transport(self.get_url('.')),
835
930
get_scope=self.get_transaction)
837
932
def get_factory(self):
904
999
# we should be able to read from http with a versioned file.
905
1000
vf = self.get_file()
906
1001
# try an empty file access
907
readonly_vf = self.get_factory()('foo', transport.get_transport(
908
self.get_readonly_url('.')))
1002
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
909
1003
self.assertEqual([], readonly_vf.versions())
911
def test_readonly_http_works_with_feeling(self):
912
# we should be able to read from http with a versioned file.
914
1004
# now with feeling.
915
1005
vf.add_lines('1', [], ['a\n'])
916
1006
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
917
readonly_vf = self.get_factory()('foo', transport.get_transport(
918
self.get_readonly_url('.')))
1007
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
919
1008
self.assertEqual(['1', '2'], vf.versions())
920
self.assertEqual(['1', '2'], readonly_vf.versions())
921
1009
for version in readonly_vf.versions():
922
1010
readonly_vf.get_lines(version)
1377
1463
class TestVersionedFiles(TestCaseWithMemoryTransport):
1378
1464
"""Tests for the multiple-file variant of VersionedFile."""
1380
# We want to be sure of behaviour for:
1381
# weaves prefix layout (weave texts)
1382
# individually named weaves (weave inventories)
1383
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1384
# as it is the most complex mapper.
1385
# individually named knits
1386
# individual no-graph knits in packs (signatures)
1387
# individual graph knits in packs (inventories)
1388
# individual graph nocompression knits in packs (revisions)
1389
# plain text knits in packs (texts)
1390
len_one_scenarios = [
1393
'factory':make_versioned_files_factory(WeaveFile,
1394
ConstantMapper('inventory')),
1397
'support_partial_insertion': False,
1401
'factory':make_file_factory(False, ConstantMapper('revisions')),
1404
'support_partial_insertion': False,
1406
('named-nograph-nodelta-knit-pack', {
1407
'cleanup':cleanup_pack_knit,
1408
'factory':make_pack_factory(False, False, 1),
1411
'support_partial_insertion': False,
1413
('named-graph-knit-pack', {
1414
'cleanup':cleanup_pack_knit,
1415
'factory':make_pack_factory(True, True, 1),
1418
'support_partial_insertion': True,
1420
('named-graph-nodelta-knit-pack', {
1421
'cleanup':cleanup_pack_knit,
1422
'factory':make_pack_factory(True, False, 1),
1425
'support_partial_insertion': False,
1427
('groupcompress-nograph', {
1428
'cleanup':groupcompress.cleanup_pack_group,
1429
'factory':groupcompress.make_pack_factory(False, False, 1),
1432
'support_partial_insertion':False,
1435
len_two_scenarios = [
1438
'factory':make_versioned_files_factory(WeaveFile,
1442
'support_partial_insertion': False,
1444
('annotated-knit-escape', {
1446
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1449
'support_partial_insertion': False,
1451
('plain-knit-pack', {
1452
'cleanup':cleanup_pack_knit,
1453
'factory':make_pack_factory(True, True, 2),
1456
'support_partial_insertion': True,
1459
'cleanup':groupcompress.cleanup_pack_group,
1460
'factory':groupcompress.make_pack_factory(True, False, 1),
1463
'support_partial_insertion':False,
1467
scenarios = len_one_scenarios + len_two_scenarios
1469
1466
def get_versionedfiles(self, relpath='files'):
1470
1467
transport = self.get_transport(relpath)
1471
1468
if relpath != '.':
1472
1469
transport.mkdir('.')
1473
1470
files = self.factory(transport)
1474
1471
if self.cleanup is not None:
1475
self.addCleanup(self.cleanup, files)
1472
self.addCleanup(lambda:self.cleanup(files))
1478
1475
def get_simple_key(self, suffix):
1483
1480
return ('FileA',) + (suffix,)
1485
def test_add_fallback_implies_without_fallbacks(self):
1486
f = self.get_versionedfiles('files')
1487
if getattr(f, 'add_fallback_versioned_files', None) is None:
1488
raise TestNotApplicable("%s doesn't support fallbacks"
1489
% (f.__class__.__name__,))
1490
g = self.get_versionedfiles('fallback')
1491
key_a = self.get_simple_key('a')
1492
g.add_lines(key_a, [], ['\n'])
1493
f.add_fallback_versioned_files(g)
1494
self.assertTrue(key_a in f.get_parent_map([key_a]))
1495
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1497
1482
def test_add_lines(self):
1498
1483
f = self.get_versionedfiles()
1499
1484
key0 = self.get_simple_key('r0')
1756
1737
f.get_record_stream([key_b], 'unordered', True
1757
1738
).next().get_bytes_as('fulltext'))
1759
def test_get_known_graph_ancestry(self):
1760
f = self.get_versionedfiles()
1762
raise TestNotApplicable('ancestry info only relevant with graph.')
1763
key_a = self.get_simple_key('a')
1764
key_b = self.get_simple_key('b')
1765
key_c = self.get_simple_key('c')
1771
f.add_lines(key_a, [], ['\n'])
1772
f.add_lines(key_b, [key_a], ['\n'])
1773
f.add_lines(key_c, [key_a, key_b], ['\n'])
1774
kg = f.get_known_graph_ancestry([key_c])
1775
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1776
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1778
def test_known_graph_with_fallbacks(self):
1779
f = self.get_versionedfiles('files')
1781
raise TestNotApplicable('ancestry info only relevant with graph.')
1782
if getattr(f, 'add_fallback_versioned_files', None) is None:
1783
raise TestNotApplicable("%s doesn't support fallbacks"
1784
% (f.__class__.__name__,))
1785
key_a = self.get_simple_key('a')
1786
key_b = self.get_simple_key('b')
1787
key_c = self.get_simple_key('c')
1788
# A only in fallback
1793
g = self.get_versionedfiles('fallback')
1794
g.add_lines(key_a, [], ['\n'])
1795
f.add_fallback_versioned_files(g)
1796
f.add_lines(key_b, [key_a], ['\n'])
1797
f.add_lines(key_c, [key_a, key_b], ['\n'])
1798
kg = f.get_known_graph_ancestry([key_c])
1799
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1801
1740
def test_get_record_stream_empty(self):
1802
1741
"""An empty stream can be requested without error."""
1803
1742
f = self.get_versionedfiles()
2453
2392
self.assertIdenticalVersionedFile(source, files)
2455
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2456
"""An out of order stream can either error or work."""
2458
raise TestNotApplicable('ancestry info only relevant with graph.')
2459
# Create a reasonably long chain of records based on each other, where
2460
# most will be deltas.
2461
source = self.get_versionedfiles('source')
2464
content = [('same same %d\n' % n) for n in range(500)]
2465
for letter in 'abcdefghijklmnopqrstuvwxyz':
2466
key = ('key-' + letter,)
2467
if self.key_length == 2:
2468
key = ('prefix',) + key
2469
content.append('content for ' + letter + '\n')
2470
source.add_lines(key, parents, content)
2473
# Create a stream of these records, excluding the first record that the
2474
# rest ultimately depend upon, and insert it into a new vf.
2476
for key in reversed(keys):
2477
streams.append(source.get_record_stream([key], 'unordered', False))
2478
deltas = chain(*streams[:-1])
2479
files = self.get_versionedfiles()
2481
files.insert_record_stream(deltas)
2482
except RevisionNotPresent:
2483
# Must not have corrupted the file.
2486
# Must only report either just the first key as a missing parent,
2487
# no key as missing (for nodelta scenarios).
2488
missing = set(files.get_missing_compression_parent_keys())
2489
missing.discard(keys[0])
2490
self.assertEqual(set(), missing)
2492
2394
def get_knit_delta_source(self):
2493
2395
"""Get a source that can produce a stream with knit delta records,
2494
2396
regardless of this test's scenario.