16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain
24
from gzip import GzipFile
25
from itertools import chain, izip
25
26
from StringIO import StringIO
28
28
from bzrlib import (
33
38
from bzrlib.errors import (
34
39
RevisionNotPresent,
35
40
RevisionAlreadyPresent,
38
from bzrlib import knit as _mod_knit
39
42
from bzrlib.knit import (
46
from bzrlib.symbol_versioning import one_four, one_five
47
47
from bzrlib.tests import (
49
49
TestCaseWithMemoryTransport,
53
split_suite_by_condition,
56
53
from bzrlib.tests.http_utils import TestCaseWithWebserver
57
from bzrlib.trace import mutter
58
from bzrlib.transport import get_transport
59
54
from bzrlib.transport.memory import MemoryTransport
60
from bzrlib.tsort import topo_sort
61
from bzrlib.tuned_gzip import GzipFile
62
55
import bzrlib.versionedfile as versionedfile
63
56
from bzrlib.versionedfile import (
68
61
make_versioned_files_factory,
70
63
from bzrlib.weave import WeaveFile
71
from bzrlib.weavefile import read_weave, write_weave
74
def load_tests(standard_tests, module, loader):
75
"""Parameterize VersionedFiles tests for different implementations."""
76
to_adapt, result = split_suite_by_condition(
77
standard_tests, condition_isinstance(TestVersionedFiles))
78
len_one_adapter = TestScenarioApplier()
79
len_two_adapter = TestScenarioApplier()
80
# We want to be sure of behaviour for:
81
# weaves prefix layout (weave texts)
82
# individually named weaves (weave inventories)
83
# annotated knits - prefix|hash|hash-escape layout, we test the third only
84
# as it is the most complex mapper.
85
# individually named knits
86
# individual no-graph knits in packs (signatures)
87
# individual graph knits in packs (inventories)
88
# individual graph nocompression knits in packs (revisions)
89
# plain text knits in packs (texts)
90
len_one_adapter.scenarios = [
93
'factory':make_versioned_files_factory(WeaveFile,
94
ConstantMapper('inventory')),
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
104
('named-nograph-knit-pack', {
105
'cleanup':cleanup_pack_knit,
106
'factory':make_pack_factory(False, False, 1),
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
116
('named-graph-nodelta-knit-pack', {
117
'cleanup':cleanup_pack_knit,
118
'factory':make_pack_factory(True, False, 1),
123
len_two_adapter.scenarios = [
126
'factory':make_versioned_files_factory(WeaveFile,
131
('annotated-knit-escape', {
133
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
137
('plain-knit-pack', {
138
'cleanup':cleanup_pack_knit,
139
'factory':make_pack_factory(True, True, 2),
144
for test in iter_suite_tests(to_adapt):
145
result.addTests(len_one_adapter.adapt(test))
146
result.addTests(len_two_adapter.adapt(test))
64
from bzrlib.weavefile import write_weave
65
from bzrlib.tests.scenarios import load_tests_apply_scenarios
68
load_tests = load_tests_apply_scenarios
150
71
def get_diamond_vf(f, trailing_eol=True, left_only=False):
151
72
"""Get a diamond graph to exercise deltas and merges.
153
74
:param trailing_eol: If True end the last line with \n.
208
133
result = [prefix + suffix for suffix in suffix_list]
210
140
# we loop over each key because that spreads the inserts across prefixes,
211
141
# which is how commit operates.
212
142
for prefix in prefixes:
213
result.append(files.add_lines(prefix + ('origin',), (),
143
result.append(files.add_lines(prefix + get_key('origin'), (),
214
144
['origin' + last_char]))
215
145
for prefix in prefixes:
216
result.append(files.add_lines(prefix + ('base',),
146
result.append(files.add_lines(prefix + get_key('base'),
217
147
get_parents([('origin',)]), ['base' + last_char]))
218
148
for prefix in prefixes:
219
result.append(files.add_lines(prefix + ('left',),
149
result.append(files.add_lines(prefix + get_key('left'),
220
150
get_parents([('base',)]),
221
151
['base\n', 'left' + last_char]))
222
152
if not left_only:
223
153
for prefix in prefixes:
224
result.append(files.add_lines(prefix + ('right',),
154
result.append(files.add_lines(prefix + get_key('right'),
225
155
get_parents([('base',)]),
226
156
['base\n', 'right' + last_char]))
227
157
for prefix in prefixes:
228
result.append(files.add_lines(prefix + ('merged',),
158
result.append(files.add_lines(prefix + get_key('merged'),
229
159
get_parents([('left',), ('right',)]),
230
160
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
742
671
self.assertEqual(expected, progress.updates)
744
673
lines = iter_with_versions(['child', 'otherchild'],
745
[('Walking content.', 0, 2),
746
('Walking content.', 1, 2),
747
('Walking content.', 2, 2)])
674
[('Walking content', 0, 2),
675
('Walking content', 1, 2),
676
('Walking content', 2, 2)])
748
677
# we must see child and otherchild
749
678
self.assertTrue(lines[('child\n', 'child')] > 0)
750
679
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
751
680
# we dont care if we got more than that.
754
lines = iter_with_versions(None, [('Walking content.', 0, 5),
755
('Walking content.', 1, 5),
756
('Walking content.', 2, 5),
757
('Walking content.', 3, 5),
758
('Walking content.', 4, 5),
759
('Walking content.', 5, 5)])
683
lines = iter_with_versions(None, [('Walking content', 0, 5),
684
('Walking content', 1, 5),
685
('Walking content', 2, 5),
686
('Walking content', 3, 5),
687
('Walking content', 4, 5),
688
('Walking content', 5, 5)])
760
689
# all lines must be seen at least once
761
690
self.assertTrue(lines[('base\n', 'base')] > 0)
762
691
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
1433
1377
class TestVersionedFiles(TestCaseWithMemoryTransport):
1434
1378
"""Tests for the multiple-file variant of VersionedFile."""
1380
# We want to be sure of behaviour for:
1381
# weaves prefix layout (weave texts)
1382
# individually named weaves (weave inventories)
1383
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1384
# as it is the most complex mapper.
1385
# individually named knits
1386
# individual no-graph knits in packs (signatures)
1387
# individual graph knits in packs (inventories)
1388
# individual graph nocompression knits in packs (revisions)
1389
# plain text knits in packs (texts)
1390
len_one_scenarios = [
1393
'factory':make_versioned_files_factory(WeaveFile,
1394
ConstantMapper('inventory')),
1397
'support_partial_insertion': False,
1401
'factory':make_file_factory(False, ConstantMapper('revisions')),
1404
'support_partial_insertion': False,
1406
('named-nograph-nodelta-knit-pack', {
1407
'cleanup':cleanup_pack_knit,
1408
'factory':make_pack_factory(False, False, 1),
1411
'support_partial_insertion': False,
1413
('named-graph-knit-pack', {
1414
'cleanup':cleanup_pack_knit,
1415
'factory':make_pack_factory(True, True, 1),
1418
'support_partial_insertion': True,
1420
('named-graph-nodelta-knit-pack', {
1421
'cleanup':cleanup_pack_knit,
1422
'factory':make_pack_factory(True, False, 1),
1425
'support_partial_insertion': False,
1427
('groupcompress-nograph', {
1428
'cleanup':groupcompress.cleanup_pack_group,
1429
'factory':groupcompress.make_pack_factory(False, False, 1),
1432
'support_partial_insertion':False,
1435
len_two_scenarios = [
1438
'factory':make_versioned_files_factory(WeaveFile,
1442
'support_partial_insertion': False,
1444
('annotated-knit-escape', {
1446
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1449
'support_partial_insertion': False,
1451
('plain-knit-pack', {
1452
'cleanup':cleanup_pack_knit,
1453
'factory':make_pack_factory(True, True, 2),
1456
'support_partial_insertion': True,
1459
'cleanup':groupcompress.cleanup_pack_group,
1460
'factory':groupcompress.make_pack_factory(True, False, 1),
1463
'support_partial_insertion':False,
1467
scenarios = len_one_scenarios + len_two_scenarios
1436
1469
def get_versionedfiles(self, relpath='files'):
1437
1470
transport = self.get_transport(relpath)
1438
1471
if relpath != '.':
1439
1472
transport.mkdir('.')
1440
1473
files = self.factory(transport)
1441
1474
if self.cleanup is not None:
1442
self.addCleanup(lambda:self.cleanup(files))
1475
self.addCleanup(self.cleanup, files)
1478
def get_simple_key(self, suffix):
1479
"""Return a key for the object under test."""
1480
if self.key_length == 1:
1483
return ('FileA',) + (suffix,)
1485
def test_add_fallback_implies_without_fallbacks(self):
1486
f = self.get_versionedfiles('files')
1487
if getattr(f, 'add_fallback_versioned_files', None) is None:
1488
raise TestNotApplicable("%s doesn't support fallbacks"
1489
% (f.__class__.__name__,))
1490
g = self.get_versionedfiles('fallback')
1491
key_a = self.get_simple_key('a')
1492
g.add_lines(key_a, [], ['\n'])
1493
f.add_fallback_versioned_files(g)
1494
self.assertTrue(key_a in f.get_parent_map([key_a]))
1495
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1497
def test_add_lines(self):
1498
f = self.get_versionedfiles()
1499
key0 = self.get_simple_key('r0')
1500
key1 = self.get_simple_key('r1')
1501
key2 = self.get_simple_key('r2')
1502
keyf = self.get_simple_key('foo')
1503
f.add_lines(key0, [], ['a\n', 'b\n'])
1505
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1507
f.add_lines(key1, [], ['b\n', 'c\n'])
1509
self.assertTrue(key0 in keys)
1510
self.assertTrue(key1 in keys)
1512
for record in f.get_record_stream([key0, key1], 'unordered', True):
1513
records.append((record.key, record.get_bytes_as('fulltext')))
1515
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1517
def test__add_text(self):
1518
f = self.get_versionedfiles()
1519
key0 = self.get_simple_key('r0')
1520
key1 = self.get_simple_key('r1')
1521
key2 = self.get_simple_key('r2')
1522
keyf = self.get_simple_key('foo')
1523
f._add_text(key0, [], 'a\nb\n')
1525
f._add_text(key1, [key0], 'b\nc\n')
1527
f._add_text(key1, [], 'b\nc\n')
1529
self.assertTrue(key0 in keys)
1530
self.assertTrue(key1 in keys)
1532
for record in f.get_record_stream([key0, key1], 'unordered', True):
1533
records.append((record.key, record.get_bytes_as('fulltext')))
1535
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1445
1537
def test_annotate(self):
1446
1538
files = self.get_versionedfiles()
1447
1539
self.get_diamond_files(files)
1481
1573
self.assertRaises(RevisionNotPresent,
1482
1574
files.annotate, prefix + ('missing-key',))
1576
def test_check_no_parameters(self):
1577
files = self.get_versionedfiles()
1579
def test_check_progressbar_parameter(self):
1580
"""A progress bar can be supplied because check can be a generator."""
1581
pb = ui.ui_factory.nested_progress_bar()
1582
self.addCleanup(pb.finished)
1583
files = self.get_versionedfiles()
1584
files.check(progress_bar=pb)
1586
def test_check_with_keys_becomes_generator(self):
1587
files = self.get_versionedfiles()
1588
self.get_diamond_files(files)
1590
entries = files.check(keys=keys)
1592
# Texts output should be fulltexts.
1593
self.capture_stream(files, entries, seen.add,
1594
files.get_parent_map(keys), require_fulltext=True)
1595
# All texts should be output.
1596
self.assertEqual(set(keys), seen)
1598
def test_clear_cache(self):
1599
files = self.get_versionedfiles()
1484
1602
def test_construct(self):
1485
1603
"""Each parameterised test can be constructed on a transport."""
1486
1604
files = self.get_versionedfiles()
1488
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1606
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1489
1608
return get_diamond_files(files, self.key_length,
1490
1609
trailing_eol=trailing_eol, nograph=not self.graph,
1491
left_only=left_only)
1610
left_only=left_only, nokeys=nokeys)
1612
def _add_content_nostoresha(self, add_lines):
1613
"""When nostore_sha is supplied using old content raises."""
1614
vf = self.get_versionedfiles()
1615
empty_text = ('a', [])
1616
sample_text_nl = ('b', ["foo\n", "bar\n"])
1617
sample_text_no_nl = ('c', ["foo\n", "bar"])
1619
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1621
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1624
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1627
# we now have a copy of all the lines in the vf.
1628
for sha, (version, lines) in zip(
1629
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1630
new_key = self.get_simple_key(version + "2")
1631
self.assertRaises(errors.ExistingContent,
1632
vf.add_lines, new_key, [], lines,
1634
self.assertRaises(errors.ExistingContent,
1635
vf._add_text, new_key, [], ''.join(lines),
1637
# and no new version should have been added.
1638
record = vf.get_record_stream([new_key], 'unordered', True).next()
1639
self.assertEqual('absent', record.storage_kind)
1641
def test_add_lines_nostoresha(self):
1642
self._add_content_nostoresha(add_lines=True)
1644
def test__add_text_nostoresha(self):
1645
self._add_content_nostoresha(add_lines=False)
1493
1647
def test_add_lines_return(self):
1494
1648
files = self.get_versionedfiles()
1521
1675
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1678
def test_add_lines_no_key_generates_chk_key(self):
1679
files = self.get_versionedfiles()
1680
# save code by using the stock data insertion helper.
1681
adds = self.get_diamond_files(files, nokeys=True)
1683
# We can only validate the first 2 elements returned from add_lines.
1685
self.assertEqual(3, len(add))
1686
results.append(add[:2])
1687
if self.key_length == 1:
1689
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1690
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1691
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1692
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1693
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1695
# Check the added items got CHK keys.
1696
self.assertEqual(set([
1697
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1698
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1699
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1700
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1701
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1704
elif self.key_length == 2:
1706
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1707
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1708
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1709
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1710
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1711
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1712
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1713
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1714
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1715
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1717
# Check the added items got CHK keys.
1718
self.assertEqual(set([
1719
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1720
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1721
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1722
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1723
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1724
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1725
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1726
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1727
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1728
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1524
1732
def test_empty_lines(self):
1525
1733
"""Empty files can be stored."""
1526
1734
f = self.get_versionedfiles()
1548
1756
f.get_record_stream([key_b], 'unordered', True
1549
1757
).next().get_bytes_as('fulltext'))
1759
def test_get_known_graph_ancestry(self):
1760
f = self.get_versionedfiles()
1762
raise TestNotApplicable('ancestry info only relevant with graph.')
1763
key_a = self.get_simple_key('a')
1764
key_b = self.get_simple_key('b')
1765
key_c = self.get_simple_key('c')
1771
f.add_lines(key_a, [], ['\n'])
1772
f.add_lines(key_b, [key_a], ['\n'])
1773
f.add_lines(key_c, [key_a, key_b], ['\n'])
1774
kg = f.get_known_graph_ancestry([key_c])
1775
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1776
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1778
def test_known_graph_with_fallbacks(self):
1779
f = self.get_versionedfiles('files')
1781
raise TestNotApplicable('ancestry info only relevant with graph.')
1782
if getattr(f, 'add_fallback_versioned_files', None) is None:
1783
raise TestNotApplicable("%s doesn't support fallbacks"
1784
% (f.__class__.__name__,))
1785
key_a = self.get_simple_key('a')
1786
key_b = self.get_simple_key('b')
1787
key_c = self.get_simple_key('c')
1788
# A only in fallback
1793
g = self.get_versionedfiles('fallback')
1794
g.add_lines(key_a, [], ['\n'])
1795
f.add_fallback_versioned_files(g)
1796
f.add_lines(key_b, [key_a], ['\n'])
1797
f.add_lines(key_c, [key_a, key_b], ['\n'])
1798
kg = f.get_known_graph_ancestry([key_c])
1799
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1551
1801
def test_get_record_stream_empty(self):
1552
1802
"""An empty stream can be requested without error."""
1553
1803
f = self.get_versionedfiles()
1560
1810
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1561
1811
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1562
1812
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1814
'knit-delta-closure', 'knit-delta-closure-ref',
1815
'groupcompress-block', 'groupcompress-block-ref'])
1565
def capture_stream(self, f, entries, on_seen, parents):
1817
def capture_stream(self, f, entries, on_seen, parents,
1818
require_fulltext=False):
1566
1819
"""Capture a stream for testing."""
1567
1820
for factory in entries:
1568
1821
on_seen(factory.key)
1569
1822
self.assertValidStorageKind(factory.storage_kind)
1570
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1823
if factory.sha1 is not None:
1824
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1572
1826
self.assertEqual(parents[factory.key], factory.parents)
1573
1827
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1829
if require_fulltext:
1830
factory.get_bytes_as('fulltext')
1576
1832
def test_get_record_stream_interface(self):
1577
1833
"""each item in a stream has to provide a regular interface."""
1709
1990
entries = files.get_record_stream(keys, 'topological', False)
1710
1991
self.assertAbsentRecord(files, keys, parent_map, entries)
1993
def assertRecordHasContent(self, record, bytes):
1994
"""Assert that record has the bytes bytes."""
1995
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1996
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1998
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1999
files = self.get_versionedfiles()
2000
key = self.get_simple_key('foo')
2001
files.add_lines(key, (), ['my text\n', 'content'])
2002
stream = files.get_record_stream([key], 'unordered', False)
2003
record = stream.next()
2004
if record.storage_kind in ('chunked', 'fulltext'):
2005
# chunked and fulltext representations are for direct use not wire
2006
# serialisation: check they are able to be used directly. To send
2007
# such records over the wire translation will be needed.
2008
self.assertRecordHasContent(record, "my text\ncontent")
2010
bytes = [record.get_bytes_as(record.storage_kind)]
2011
network_stream = versionedfile.NetworkRecordStream(bytes).read()
2012
source_record = record
2014
for record in network_stream:
2015
records.append(record)
2016
self.assertEqual(source_record.storage_kind,
2017
record.storage_kind)
2018
self.assertEqual(source_record.parents, record.parents)
2020
source_record.get_bytes_as(source_record.storage_kind),
2021
record.get_bytes_as(record.storage_kind))
2022
self.assertEqual(1, len(records))
2024
def assertStreamMetaEqual(self, records, expected, stream):
2025
"""Assert that streams expected and stream have the same records.
2027
:param records: A list to collect the seen records.
2028
:return: A generator of the records in stream.
2030
# We make assertions during copying to catch things early for
2032
for record, ref_record in izip(stream, expected):
2033
records.append(record)
2034
self.assertEqual(ref_record.key, record.key)
2035
self.assertEqual(ref_record.storage_kind, record.storage_kind)
2036
self.assertEqual(ref_record.parents, record.parents)
2039
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2041
"""Convert a stream to a bytes iterator.
2043
:param skipped_records: A list with one element to increment when a
2045
:param full_texts: A dict from key->fulltext representation, for
2046
checking chunked or fulltext stored records.
2047
:param stream: A record_stream.
2048
:return: An iterator over the bytes of each record.
2050
for record in stream:
2051
if record.storage_kind in ('chunked', 'fulltext'):
2052
skipped_records[0] += 1
2053
# check the content is correct for direct use.
2054
self.assertRecordHasContent(record, full_texts[record.key])
2056
yield record.get_bytes_as(record.storage_kind)
2058
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2059
files = self.get_versionedfiles()
2060
target_files = self.get_versionedfiles('target')
2061
key = self.get_simple_key('ft')
2062
key_delta = self.get_simple_key('delta')
2063
files.add_lines(key, (), ['my text\n', 'content'])
2065
delta_parents = (key,)
2068
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2069
local = files.get_record_stream([key, key_delta], 'unordered', False)
2070
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2071
skipped_records = [0]
2073
key: "my text\ncontent",
2074
key_delta: "different\ncontent\n",
2076
byte_stream = self.stream_to_bytes_or_skip_counter(
2077
skipped_records, full_texts, local)
2078
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2080
# insert the stream from the network into a versioned files object so we can
2081
# check the content was carried across correctly without doing delta
2083
target_files.insert_record_stream(
2084
self.assertStreamMetaEqual(records, ref, network_stream))
2085
# No duplicates on the wire thank you!
2086
self.assertEqual(2, len(records) + skipped_records[0])
2088
# if any content was copied it all must have all been.
2089
self.assertIdenticalVersionedFile(files, target_files)
2091
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2092
# copy a delta over the wire
2093
files = self.get_versionedfiles()
2094
target_files = self.get_versionedfiles('target')
2095
key = self.get_simple_key('ft')
2096
key_delta = self.get_simple_key('delta')
2097
files.add_lines(key, (), ['my text\n', 'content'])
2099
delta_parents = (key,)
2102
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2103
# Copy the basis text across so we can reconstruct the delta during
2104
# insertion into target.
2105
target_files.insert_record_stream(files.get_record_stream([key],
2106
'unordered', False))
2107
local = files.get_record_stream([key_delta], 'unordered', False)
2108
ref = files.get_record_stream([key_delta], 'unordered', False)
2109
skipped_records = [0]
2111
key_delta: "different\ncontent\n",
2113
byte_stream = self.stream_to_bytes_or_skip_counter(
2114
skipped_records, full_texts, local)
2115
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2117
# insert the stream from the network into a versioned files object so we can
2118
# check the content was carried across correctly without doing delta
2119
# inspection during check_stream.
2120
target_files.insert_record_stream(
2121
self.assertStreamMetaEqual(records, ref, network_stream))
2122
# No duplicates on the wire thank you!
2123
self.assertEqual(1, len(records) + skipped_records[0])
2125
# if any content was copied it all must have all been
2126
self.assertIdenticalVersionedFile(files, target_files)
2128
def test_get_record_stream_wire_ready_delta_closure_included(self):
2129
# copy a delta over the wire with the ability to get its full text.
2130
files = self.get_versionedfiles()
2131
key = self.get_simple_key('ft')
2132
key_delta = self.get_simple_key('delta')
2133
files.add_lines(key, (), ['my text\n', 'content'])
2135
delta_parents = (key,)
2138
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2139
local = files.get_record_stream([key_delta], 'unordered', True)
2140
ref = files.get_record_stream([key_delta], 'unordered', True)
2141
skipped_records = [0]
2143
key_delta: "different\ncontent\n",
2145
byte_stream = self.stream_to_bytes_or_skip_counter(
2146
skipped_records, full_texts, local)
2147
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2149
# insert the stream from the network into a versioned files object so we can
2150
# check the content was carried across correctly without doing delta
2151
# inspection during check_stream.
2152
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2153
# we have to be able to get the full text out:
2154
self.assertRecordHasContent(record, full_texts[record.key])
2155
# No duplicates on the wire thank you!
2156
self.assertEqual(1, len(records) + skipped_records[0])
1712
2158
def assertAbsentRecord(self, files, keys, parents, entries):
1713
2159
"""Helper for test_get_record_stream_missing_records_are_absent."""
1970
2453
self.assertIdenticalVersionedFile(source, files)
2455
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2456
"""An out of order stream can either error or work."""
2458
raise TestNotApplicable('ancestry info only relevant with graph.')
2459
# Create a reasonably long chain of records based on each other, where
2460
# most will be deltas.
2461
source = self.get_versionedfiles('source')
2464
content = [('same same %d\n' % n) for n in range(500)]
2465
for letter in 'abcdefghijklmnopqrstuvwxyz':
2466
key = ('key-' + letter,)
2467
if self.key_length == 2:
2468
key = ('prefix',) + key
2469
content.append('content for ' + letter + '\n')
2470
source.add_lines(key, parents, content)
2473
# Create a stream of these records, excluding the first record that the
2474
# rest ultimately depend upon, and insert it into a new vf.
2476
for key in reversed(keys):
2477
streams.append(source.get_record_stream([key], 'unordered', False))
2478
deltas = chain(*streams[:-1])
2479
files = self.get_versionedfiles()
2481
files.insert_record_stream(deltas)
2482
except RevisionNotPresent:
2483
# Must not have corrupted the file.
2486
# Must only report either just the first key as a missing parent,
2487
# no key as missing (for nodelta scenarios).
2488
missing = set(files.get_missing_compression_parent_keys())
2489
missing.discard(keys[0])
2490
self.assertEqual(set(), missing)
2492
def get_knit_delta_source(self):
2493
"""Get a source that can produce a stream with knit delta records,
2494
regardless of this test's scenario.
2496
mapper = self.get_mapper()
2497
source_transport = self.get_transport('source')
2498
source_transport.mkdir('.')
2499
source = make_file_factory(False, mapper)(source_transport)
2500
get_diamond_files(source, self.key_length, trailing_eol=True,
2501
nograph=False, left_only=False)
1972
2504
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1973
"""Insertion where a needed basis is not included aborts safely."""
1974
# We use a knit always here to be sure we are getting a binary delta.
1975
mapper = self.get_mapper()
1976
source_transport = self.get_transport('source')
1977
source_transport.mkdir('.')
1978
source = make_file_factory(False, mapper)(source_transport)
1979
self.get_diamond_files(source)
1980
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1981
files = self.get_versionedfiles()
1982
self.assertRaises(RevisionNotPresent, files.insert_record_stream,
2505
"""Insertion where a needed basis is not included notifies the caller
2506
of the missing basis. In the meantime a record missing its basis is
2509
source = self.get_knit_delta_source()
2510
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2511
entries = source.get_record_stream(keys, 'unordered', False)
2512
files = self.get_versionedfiles()
2513
if self.support_partial_insertion:
2514
self.assertEqual([],
2515
list(files.get_missing_compression_parent_keys()))
2516
files.insert_record_stream(entries)
2517
missing_bases = files.get_missing_compression_parent_keys()
2518
self.assertEqual(set([self.get_simple_key('left')]),
2520
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2523
errors.RevisionNotPresent, files.insert_record_stream, entries)
2526
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2527
"""Insertion where a needed basis is not included notifies the caller
2528
of the missing basis. That basis can be added in a second
2529
insert_record_stream call that does not need to repeat records present
2530
in the previous stream. The record(s) that required that basis are
2531
fully inserted once their basis is no longer missing.
2533
if not self.support_partial_insertion:
2534
raise TestNotApplicable(
2535
'versioned file scenario does not support partial insertion')
2536
source = self.get_knit_delta_source()
2537
entries = source.get_record_stream([self.get_simple_key('origin'),
2538
self.get_simple_key('merged')], 'unordered', False)
2539
files = self.get_versionedfiles()
2540
files.insert_record_stream(entries)
2541
missing_bases = files.get_missing_compression_parent_keys()
2542
self.assertEqual(set([self.get_simple_key('left')]),
2544
# 'merged' is inserted (although a commit of a write group involving
2545
# this versionedfiles would fail).
2546
merged_key = self.get_simple_key('merged')
2548
[merged_key], files.get_parent_map([merged_key]).keys())
2549
# Add the full delta closure of the missing records
2550
missing_entries = source.get_record_stream(
2551
missing_bases, 'unordered', True)
2552
files.insert_record_stream(missing_entries)
2553
# Now 'merged' is fully inserted (and a commit would succeed).
2554
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2556
[merged_key], files.get_parent_map([merged_key]).keys())
1985
self.assertEqual({}, files.get_parent_map([]))
1987
2559
def test_iter_lines_added_or_present_in_keys(self):
1988
2560
# test that we get at least an equalset of the lines added by
2032
2603
lines = iter_with_keys(
2033
2604
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2034
[('Walking content.', 0, 2),
2035
('Walking content.', 1, 2),
2036
('Walking content.', 2, 2)])
2605
[('Walking content', 0, 2),
2606
('Walking content', 1, 2),
2607
('Walking content', 2, 2)])
2037
2608
# we must see child and otherchild
2038
2609
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2039
2610
self.assertTrue(
2040
2611
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2041
2612
# we dont care if we got more than that.
2043
2614
# test all lines
2044
2615
lines = iter_with_keys(files.keys(),
2045
[('Walking content.', 0, 5),
2046
('Walking content.', 1, 5),
2047
('Walking content.', 2, 5),
2048
('Walking content.', 3, 5),
2049
('Walking content.', 4, 5),
2050
('Walking content.', 5, 5)])
2616
[('Walking content', 0, 5),
2617
('Walking content', 1, 5),
2618
('Walking content', 2, 5),
2619
('Walking content', 3, 5),
2620
('Walking content', 4, 5),
2621
('Walking content', 5, 5)])
2051
2622
# all lines must be seen at least once
2052
2623
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2053
2624
self.assertTrue(