1470
1468
transport.mkdir('.')
1471
1469
files = self.factory(transport)
1472
1470
if self.cleanup is not None:
1473
self.addCleanup(self.cleanup, files)
1471
self.addCleanup(lambda:self.cleanup(files))
1476
def get_simple_key(self, suffix):
1477
"""Return a key for the object under test."""
1478
if self.key_length == 1:
1481
return ('FileA',) + (suffix,)
1483
def test_add_lines(self):
1484
f = self.get_versionedfiles()
1485
key0 = self.get_simple_key('r0')
1486
key1 = self.get_simple_key('r1')
1487
key2 = self.get_simple_key('r2')
1488
keyf = self.get_simple_key('foo')
1489
f.add_lines(key0, [], ['a\n', 'b\n'])
1491
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1493
f.add_lines(key1, [], ['b\n', 'c\n'])
1495
self.assertTrue(key0 in keys)
1496
self.assertTrue(key1 in keys)
1498
for record in f.get_record_stream([key0, key1], 'unordered', True):
1499
records.append((record.key, record.get_bytes_as('fulltext')))
1501
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1503
def test__add_text(self):
1504
f = self.get_versionedfiles()
1505
key0 = self.get_simple_key('r0')
1506
key1 = self.get_simple_key('r1')
1507
key2 = self.get_simple_key('r2')
1508
keyf = self.get_simple_key('foo')
1509
f._add_text(key0, [], 'a\nb\n')
1511
f._add_text(key1, [key0], 'b\nc\n')
1513
f._add_text(key1, [], 'b\nc\n')
1515
self.assertTrue(key0 in keys)
1516
self.assertTrue(key1 in keys)
1518
for record in f.get_record_stream([key0, key1], 'unordered', True):
1519
records.append((record.key, record.get_bytes_as('fulltext')))
1521
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1523
1474
def test_annotate(self):
1524
1475
files = self.get_versionedfiles()
1525
1476
self.get_diamond_files(files)
1559
1510
self.assertRaises(RevisionNotPresent,
1560
1511
files.annotate, prefix + ('missing-key',))
1562
def test_check_no_parameters(self):
1563
files = self.get_versionedfiles()
1565
def test_check_progressbar_parameter(self):
1566
"""A progress bar can be supplied because check can be a generator."""
1567
pb = ui.ui_factory.nested_progress_bar()
1568
self.addCleanup(pb.finished)
1569
files = self.get_versionedfiles()
1570
files.check(progress_bar=pb)
1572
def test_check_with_keys_becomes_generator(self):
1573
files = self.get_versionedfiles()
1574
self.get_diamond_files(files)
1576
entries = files.check(keys=keys)
1578
# Texts output should be fulltexts.
1579
self.capture_stream(files, entries, seen.add,
1580
files.get_parent_map(keys), require_fulltext=True)
1581
# All texts should be output.
1582
self.assertEqual(set(keys), seen)
1584
def test_clear_cache(self):
1585
files = self.get_versionedfiles()
1588
1513
def test_construct(self):
1589
1514
"""Each parameterised test can be constructed on a transport."""
1590
1515
files = self.get_versionedfiles()
1595
1520
trailing_eol=trailing_eol, nograph=not self.graph,
1596
1521
left_only=left_only, nokeys=nokeys)
1598
def _add_content_nostoresha(self, add_lines):
1523
def test_add_lines_nostoresha(self):
1599
1524
"""When nostore_sha is supplied using old content raises."""
1600
1525
vf = self.get_versionedfiles()
1601
1526
empty_text = ('a', [])
1603
1528
sample_text_no_nl = ('c', ["foo\n", "bar"])
1605
1530
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1607
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1610
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1531
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1612
1532
shas.append(sha)
1613
1533
# we now have a copy of all the lines in the vf.
1614
1534
for sha, (version, lines) in zip(
1617
1537
self.assertRaises(errors.ExistingContent,
1618
1538
vf.add_lines, new_key, [], lines,
1619
1539
nostore_sha=sha)
1620
self.assertRaises(errors.ExistingContent,
1621
vf._add_text, new_key, [], ''.join(lines),
1623
1540
# and no new version should have been added.
1624
1541
record = vf.get_record_stream([new_key], 'unordered', True).next()
1625
1542
self.assertEqual('absent', record.storage_kind)
1627
def test_add_lines_nostoresha(self):
1628
self._add_content_nostoresha(add_lines=True)
1630
def test__add_text_nostoresha(self):
1631
self._add_content_nostoresha(add_lines=False)
1633
1544
def test_add_lines_return(self):
1634
1545
files = self.get_versionedfiles()
1635
1546
# save code by using the stock data insertion helper.
1742
1653
f.get_record_stream([key_b], 'unordered', True
1743
1654
).next().get_bytes_as('fulltext'))
1745
def test_get_known_graph_ancestry(self):
1746
f = self.get_versionedfiles()
1748
raise TestNotApplicable('ancestry info only relevant with graph.')
1749
key_a = self.get_simple_key('a')
1750
key_b = self.get_simple_key('b')
1751
key_c = self.get_simple_key('c')
1757
f.add_lines(key_a, [], ['\n'])
1758
f.add_lines(key_b, [key_a], ['\n'])
1759
f.add_lines(key_c, [key_a, key_b], ['\n'])
1760
kg = f.get_known_graph_ancestry([key_c])
1761
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1762
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1764
def test_known_graph_with_fallbacks(self):
1765
f = self.get_versionedfiles('files')
1767
raise TestNotApplicable('ancestry info only relevant with graph.')
1768
if getattr(f, 'add_fallback_versioned_files', None) is None:
1769
raise TestNotApplicable("%s doesn't support fallbacks"
1770
% (f.__class__.__name__,))
1771
key_a = self.get_simple_key('a')
1772
key_b = self.get_simple_key('b')
1773
key_c = self.get_simple_key('c')
1774
# A only in fallback
1779
g = self.get_versionedfiles('fallback')
1780
g.add_lines(key_a, [], ['\n'])
1781
f.add_fallback_versioned_files(g)
1782
f.add_lines(key_b, [key_a], ['\n'])
1783
f.add_lines(key_c, [key_a, key_b], ['\n'])
1784
kg = f.get_known_graph_ancestry([key_c])
1785
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1787
1656
def test_get_record_stream_empty(self):
1788
1657
"""An empty stream can be requested without error."""
1789
1658
f = self.get_versionedfiles()
1800
1669
'knit-delta-closure', 'knit-delta-closure-ref',
1801
1670
'groupcompress-block', 'groupcompress-block-ref'])
1803
def capture_stream(self, f, entries, on_seen, parents,
1804
require_fulltext=False):
1672
def capture_stream(self, f, entries, on_seen, parents):
1805
1673
"""Capture a stream for testing."""
1806
1674
for factory in entries:
1807
1675
on_seen(factory.key)
1826
1692
self.capture_stream(files, entries, seen.add, parent_map)
1827
1693
self.assertEqual(set(keys), seen)
1695
def get_simple_key(self, suffix):
1696
"""Return a key for the object under test."""
1697
if self.key_length == 1:
1700
return ('FileA',) + (suffix,)
1829
1702
def get_keys_and_sort_order(self):
1830
1703
"""Get diamond test keys list, and their sort ordering."""
1831
1704
if self.key_length == 1:
2197
def test_get_annotator(self):
2198
files = self.get_versionedfiles()
2199
self.get_diamond_files(files)
2200
origin_key = self.get_simple_key('origin')
2201
base_key = self.get_simple_key('base')
2202
left_key = self.get_simple_key('left')
2203
right_key = self.get_simple_key('right')
2204
merged_key = self.get_simple_key('merged')
2205
# annotator = files.get_annotator()
2206
# introduced full text
2207
origins, lines = files.get_annotator().annotate(origin_key)
2208
self.assertEqual([(origin_key,)], origins)
2209
self.assertEqual(['origin\n'], lines)
2211
origins, lines = files.get_annotator().annotate(base_key)
2212
self.assertEqual([(base_key,)], origins)
2214
origins, lines = files.get_annotator().annotate(merged_key)
2223
# Without a graph everything is new.
2230
self.assertRaises(RevisionNotPresent,
2231
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2233
2070
def test_get_parent_map(self):
2234
2071
files = self.get_versionedfiles()
2235
2072
if self.key_length == 1:
2439
2276
self.assertIdenticalVersionedFile(source, files)
2441
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2442
"""An out of order stream can either error or work."""
2444
raise TestNotApplicable('ancestry info only relevant with graph.')
2445
# Create a reasonably long chain of records based on each other, where
2446
# most will be deltas.
2447
source = self.get_versionedfiles('source')
2450
content = [('same same %d\n' % n) for n in range(500)]
2451
for letter in 'abcdefghijklmnopqrstuvwxyz':
2452
key = ('key-' + letter,)
2453
if self.key_length == 2:
2454
key = ('prefix',) + key
2455
content.append('content for ' + letter + '\n')
2456
source.add_lines(key, parents, content)
2459
# Create a stream of these records, excluding the first record that the
2460
# rest ultimately depend upon, and insert it into a new vf.
2462
for key in reversed(keys):
2463
streams.append(source.get_record_stream([key], 'unordered', False))
2464
deltas = chain(*streams[:-1])
2465
files = self.get_versionedfiles()
2467
files.insert_record_stream(deltas)
2468
except RevisionNotPresent:
2469
# Must not have corrupted the file.
2472
# Must only report either just the first key as a missing parent,
2473
# no key as missing (for nodelta scenarios).
2474
missing = set(files.get_missing_compression_parent_keys())
2475
missing.discard(keys[0])
2476
self.assertEqual(set(), missing)
2478
2278
def get_knit_delta_source(self):
2479
2279
"""Get a source that can produce a stream with knit delta records,
2480
2280
regardless of this test's scenario.
2747
2547
self.assertRaises(NotImplementedError,
2748
2548
self.texts.add_mpdiffs, [])
2750
def test_check_noerrors(self):
2550
def test_check(self):
2551
self.assertTrue(self.texts.check())
2753
2553
def test_insert_record_stream(self):
2754
2554
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,