734
732
# the ordering here is to make a tree so that dumb searches have
735
733
# more changes to muck up.
737
class InstrumentedProgress(progress.ProgressTask):
735
class InstrumentedProgress(progress.DummyProgress):
739
737
def __init__(self):
740
progress.ProgressTask.__init__(self)
739
progress.DummyProgress.__init__(self)
741
740
self.updates = []
743
742
def update(self, msg=None, current=None, total=None):
999
998
# we should be able to read from http with a versioned file.
1000
999
vf = self.get_file()
1001
1000
# try an empty file access
1002
readonly_vf = self.get_factory()('foo', get_transport(
1003
self.get_readonly_url('.')))
1001
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1004
1002
self.assertEqual([], readonly_vf.versions())
1006
def test_readonly_http_works_with_feeling(self):
1007
# we should be able to read from http with a versioned file.
1008
vf = self.get_file()
1009
1003
# now with feeling.
1010
1004
vf.add_lines('1', [], ['a\n'])
1011
1005
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1012
readonly_vf = self.get_factory()('foo', get_transport(
1013
self.get_readonly_url('.')))
1006
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1014
1007
self.assertEqual(['1', '2'], vf.versions())
1015
self.assertEqual(['1', '2'], readonly_vf.versions())
1016
1008
for version in readonly_vf.versions():
1017
1009
readonly_vf.get_lines(version)
1476
1464
transport.mkdir('.')
1477
1465
files = self.factory(transport)
1478
1466
if self.cleanup is not None:
1479
self.addCleanup(self.cleanup, files)
1467
self.addCleanup(lambda:self.cleanup(files))
1482
def get_simple_key(self, suffix):
1483
"""Return a key for the object under test."""
1484
if self.key_length == 1:
1487
return ('FileA',) + (suffix,)
1489
def test_add_lines(self):
1490
f = self.get_versionedfiles()
1491
key0 = self.get_simple_key('r0')
1492
key1 = self.get_simple_key('r1')
1493
key2 = self.get_simple_key('r2')
1494
keyf = self.get_simple_key('foo')
1495
f.add_lines(key0, [], ['a\n', 'b\n'])
1497
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1499
f.add_lines(key1, [], ['b\n', 'c\n'])
1501
self.assertTrue(key0 in keys)
1502
self.assertTrue(key1 in keys)
1504
for record in f.get_record_stream([key0, key1], 'unordered', True):
1505
records.append((record.key, record.get_bytes_as('fulltext')))
1507
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1509
def test__add_text(self):
1510
f = self.get_versionedfiles()
1511
key0 = self.get_simple_key('r0')
1512
key1 = self.get_simple_key('r1')
1513
key2 = self.get_simple_key('r2')
1514
keyf = self.get_simple_key('foo')
1515
f._add_text(key0, [], 'a\nb\n')
1517
f._add_text(key1, [key0], 'b\nc\n')
1519
f._add_text(key1, [], 'b\nc\n')
1521
self.assertTrue(key0 in keys)
1522
self.assertTrue(key1 in keys)
1524
for record in f.get_record_stream([key0, key1], 'unordered', True):
1525
records.append((record.key, record.get_bytes_as('fulltext')))
1527
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1529
1470
def test_annotate(self):
1530
1471
files = self.get_versionedfiles()
1531
1472
self.get_diamond_files(files)
1565
1506
self.assertRaises(RevisionNotPresent,
1566
1507
files.annotate, prefix + ('missing-key',))
1568
def test_check_no_parameters(self):
1569
files = self.get_versionedfiles()
1571
def test_check_progressbar_parameter(self):
1572
"""A progress bar can be supplied because check can be a generator."""
1573
pb = ui.ui_factory.nested_progress_bar()
1574
self.addCleanup(pb.finished)
1575
files = self.get_versionedfiles()
1576
files.check(progress_bar=pb)
1578
def test_check_with_keys_becomes_generator(self):
1579
files = self.get_versionedfiles()
1580
self.get_diamond_files(files)
1582
entries = files.check(keys=keys)
1584
# Texts output should be fulltexts.
1585
self.capture_stream(files, entries, seen.add,
1586
files.get_parent_map(keys), require_fulltext=True)
1587
# All texts should be output.
1588
self.assertEqual(set(keys), seen)
1590
def test_clear_cache(self):
1591
files = self.get_versionedfiles()
1594
1509
def test_construct(self):
1595
1510
"""Each parameterised test can be constructed on a transport."""
1596
1511
files = self.get_versionedfiles()
1601
1516
trailing_eol=trailing_eol, nograph=not self.graph,
1602
1517
left_only=left_only, nokeys=nokeys)
1604
def _add_content_nostoresha(self, add_lines):
1519
def test_add_lines_nostoresha(self):
1605
1520
"""When nostore_sha is supplied using old content raises."""
1606
1521
vf = self.get_versionedfiles()
1607
1522
empty_text = ('a', [])
1609
1524
sample_text_no_nl = ('c', ["foo\n", "bar"])
1611
1526
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1613
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1616
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1527
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1618
1528
shas.append(sha)
1619
1529
# we now have a copy of all the lines in the vf.
1620
1530
for sha, (version, lines) in zip(
1623
1533
self.assertRaises(errors.ExistingContent,
1624
1534
vf.add_lines, new_key, [], lines,
1625
1535
nostore_sha=sha)
1626
self.assertRaises(errors.ExistingContent,
1627
vf._add_text, new_key, [], ''.join(lines),
1629
1536
# and no new version should have been added.
1630
1537
record = vf.get_record_stream([new_key], 'unordered', True).next()
1631
1538
self.assertEqual('absent', record.storage_kind)
1633
def test_add_lines_nostoresha(self):
1634
self._add_content_nostoresha(add_lines=True)
1636
def test__add_text_nostoresha(self):
1637
self._add_content_nostoresha(add_lines=False)
1639
1540
def test_add_lines_return(self):
1640
1541
files = self.get_versionedfiles()
1641
1542
# save code by using the stock data insertion helper.
1748
1649
f.get_record_stream([key_b], 'unordered', True
1749
1650
).next().get_bytes_as('fulltext'))
1751
def test_get_known_graph_ancestry(self):
1752
f = self.get_versionedfiles()
1754
raise TestNotApplicable('ancestry info only relevant with graph.')
1755
key_a = self.get_simple_key('a')
1756
key_b = self.get_simple_key('b')
1757
key_c = self.get_simple_key('c')
1763
f.add_lines(key_a, [], ['\n'])
1764
f.add_lines(key_b, [key_a], ['\n'])
1765
f.add_lines(key_c, [key_a, key_b], ['\n'])
1766
kg = f.get_known_graph_ancestry([key_c])
1767
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1768
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1770
def test_known_graph_with_fallbacks(self):
1771
f = self.get_versionedfiles('files')
1773
raise TestNotApplicable('ancestry info only relevant with graph.')
1774
if getattr(f, 'add_fallback_versioned_files', None) is None:
1775
raise TestNotApplicable("%s doesn't support fallbacks"
1776
% (f.__class__.__name__,))
1777
key_a = self.get_simple_key('a')
1778
key_b = self.get_simple_key('b')
1779
key_c = self.get_simple_key('c')
1780
# A only in fallback
1785
g = self.get_versionedfiles('fallback')
1786
g.add_lines(key_a, [], ['\n'])
1787
f.add_fallback_versioned_files(g)
1788
f.add_lines(key_b, [key_a], ['\n'])
1789
f.add_lines(key_c, [key_a, key_b], ['\n'])
1790
kg = f.get_known_graph_ancestry([key_c])
1791
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1793
1652
def test_get_record_stream_empty(self):
1794
1653
"""An empty stream can be requested without error."""
1795
1654
f = self.get_versionedfiles()
1806
1665
'knit-delta-closure', 'knit-delta-closure-ref',
1807
1666
'groupcompress-block', 'groupcompress-block-ref'])
1809
def capture_stream(self, f, entries, on_seen, parents,
1810
require_fulltext=False):
1668
def capture_stream(self, f, entries, on_seen, parents):
1811
1669
"""Capture a stream for testing."""
1812
1670
for factory in entries:
1813
1671
on_seen(factory.key)
1832
1688
self.capture_stream(files, entries, seen.add, parent_map)
1833
1689
self.assertEqual(set(keys), seen)
1691
def get_simple_key(self, suffix):
1692
"""Return a key for the object under test."""
1693
if self.key_length == 1:
1696
return ('FileA',) + (suffix,)
1835
1698
def get_keys_and_sort_order(self):
1836
1699
"""Get diamond test keys list, and their sort ordering."""
1837
1700
if self.key_length == 1:
2203
def test_get_annotator(self):
2204
files = self.get_versionedfiles()
2205
self.get_diamond_files(files)
2206
origin_key = self.get_simple_key('origin')
2207
base_key = self.get_simple_key('base')
2208
left_key = self.get_simple_key('left')
2209
right_key = self.get_simple_key('right')
2210
merged_key = self.get_simple_key('merged')
2211
# annotator = files.get_annotator()
2212
# introduced full text
2213
origins, lines = files.get_annotator().annotate(origin_key)
2214
self.assertEqual([(origin_key,)], origins)
2215
self.assertEqual(['origin\n'], lines)
2217
origins, lines = files.get_annotator().annotate(base_key)
2218
self.assertEqual([(base_key,)], origins)
2220
origins, lines = files.get_annotator().annotate(merged_key)
2229
# Without a graph everything is new.
2236
self.assertRaises(RevisionNotPresent,
2237
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2239
2066
def test_get_parent_map(self):
2240
2067
files = self.get_versionedfiles()
2241
2068
if self.key_length == 1:
2445
2272
self.assertIdenticalVersionedFile(source, files)
2447
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2448
"""An out of order stream can either error or work."""
2450
raise TestNotApplicable('ancestry info only relevant with graph.')
2451
# Create a reasonably long chain of records based on each other, where
2452
# most will be deltas.
2453
source = self.get_versionedfiles('source')
2456
content = [('same same %d\n' % n) for n in range(500)]
2457
for letter in 'abcdefghijklmnopqrstuvwxyz':
2458
key = ('key-' + letter,)
2459
if self.key_length == 2:
2460
key = ('prefix',) + key
2461
content.append('content for ' + letter + '\n')
2462
source.add_lines(key, parents, content)
2465
# Create a stream of these records, excluding the first record that the
2466
# rest ultimately depend upon, and insert it into a new vf.
2468
for key in reversed(keys):
2469
streams.append(source.get_record_stream([key], 'unordered', False))
2470
deltas = chain(*streams[:-1])
2471
files = self.get_versionedfiles()
2473
files.insert_record_stream(deltas)
2474
except RevisionNotPresent:
2475
# Must not have corrupted the file.
2478
# Must only report either just the first key as a missing parent,
2479
# no key as missing (for nodelta scenarios).
2480
missing = set(files.get_missing_compression_parent_keys())
2481
missing.discard(keys[0])
2482
self.assertEqual(set(), missing)
2484
2274
def get_knit_delta_source(self):
2485
2275
"""Get a source that can produce a stream with knit delta records,
2486
2276
regardless of this test's scenario.
2554
2344
# the ordering here is to make a tree so that dumb searches have
2555
2345
# more changes to muck up.
2557
class InstrumentedProgress(progress.ProgressTask):
2347
class InstrumentedProgress(progress.DummyProgress):
2559
2349
def __init__(self):
2560
progress.ProgressTask.__init__(self)
2351
progress.DummyProgress.__init__(self)
2561
2352
self.updates = []
2563
2354
def update(self, msg=None, current=None, total=None):
2752
2543
self.assertRaises(NotImplementedError,
2753
2544
self.texts.add_mpdiffs, [])
2755
def test_check_noerrors(self):
2546
def test_check(self):
2547
self.assertTrue(self.texts.check())
2758
2549
def test_insert_record_stream(self):
2759
2550
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,