30
34
RevisionNotPresent,
37
from bzrlib.index import *
33
38
from bzrlib.knit import (
37
44
KnitAnnotateFactory,
42
55
from bzrlib.osutils import split_lines
43
from bzrlib.tests import TestCase, TestCaseWithTransport
44
from bzrlib.transport import TransportLogger, get_transport
56
from bzrlib.tests import (
59
TestCaseWithMemoryTransport,
60
TestCaseWithTransport,
62
from bzrlib.transport import get_transport
45
63
from bzrlib.transport.memory import MemoryTransport
64
from bzrlib.tuned_gzip import GzipFile
65
from bzrlib.util import bencode
46
66
from bzrlib.weave import Weave
49
class KnitContentTests(TestCase):
69
class _CompiledKnitFeature(Feature):
73
import bzrlib._knit_load_data_c
78
def feature_name(self):
79
return 'bzrlib._knit_load_data_c'
81
CompiledKnitFeature = _CompiledKnitFeature()
84
class KnitContentTestsMixin(object):
51
86
def test_constructor(self):
52
content = KnitContent([])
87
content = self._make_content([])
54
89
def test_text(self):
55
content = KnitContent([])
90
content = self._make_content([])
56
91
self.assertEqual(content.text(), [])
58
content = KnitContent([("origin1", "text1"), ("origin2", "text2")])
93
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
59
94
self.assertEqual(content.text(), ["text1", "text2"])
61
def test_annotate(self):
62
content = KnitContent([])
63
self.assertEqual(content.annotate(), [])
65
content = KnitContent([("origin1", "text1"), ("origin2", "text2")])
97
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
99
self.assertIsInstance(copy, content.__class__)
100
self.assertEqual(copy.annotate(), content.annotate())
102
def assertDerivedBlocksEqual(self, source, target, noeol=False):
103
"""Assert that the derived matching blocks match real output"""
104
source_lines = source.splitlines(True)
105
target_lines = target.splitlines(True)
107
if noeol and not line.endswith('\n'):
111
source_content = self._make_content([(None, nl(l)) for l in source_lines])
112
target_content = self._make_content([(None, nl(l)) for l in target_lines])
113
line_delta = source_content.line_delta(target_content)
114
delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
115
source_lines, target_lines))
116
matcher = KnitSequenceMatcher(None, source_lines, target_lines)
117
matcher_blocks = list(list(matcher.get_matching_blocks()))
118
self.assertEqual(matcher_blocks, delta_blocks)
120
def test_get_line_delta_blocks(self):
121
self.assertDerivedBlocksEqual('a\nb\nc\n', 'q\nc\n')
122
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1)
123
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1A)
124
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1B)
125
self.assertDerivedBlocksEqual(TEXT_1B, TEXT_1A)
126
self.assertDerivedBlocksEqual(TEXT_1A, TEXT_1B)
127
self.assertDerivedBlocksEqual(TEXT_1A, '')
128
self.assertDerivedBlocksEqual('', TEXT_1A)
129
self.assertDerivedBlocksEqual('', '')
130
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd')
132
def test_get_line_delta_blocks_noeol(self):
133
"""Handle historical knit deltas safely
135
Some existing knit deltas don't consider the last line to differ
136
when the only difference whether it has a final newline.
138
New knit deltas appear to always consider the last line to differ
141
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd\n', noeol=True)
142
self.assertDerivedBlocksEqual('a\nb\nc\nd\n', 'a\nb\nc', noeol=True)
143
self.assertDerivedBlocksEqual('a\nb\nc\n', 'a\nb\nc', noeol=True)
144
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\n', noeol=True)
147
class TestPlainKnitContent(TestCase, KnitContentTestsMixin):
149
def _make_content(self, lines):
150
annotated_content = AnnotatedKnitContent(lines)
151
return PlainKnitContent(annotated_content.text(), 'bogus')
153
def test_annotate(self):
154
content = self._make_content([])
155
self.assertEqual(content.annotate(), [])
157
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
158
self.assertEqual(content.annotate(),
159
[("bogus", "text1"), ("bogus", "text2")])
161
def test_annotate_iter(self):
162
content = self._make_content([])
163
it = content.annotate_iter()
164
self.assertRaises(StopIteration, it.next)
166
content = self._make_content([("bogus", "text1"), ("bogus", "text2")])
167
it = content.annotate_iter()
168
self.assertEqual(it.next(), ("bogus", "text1"))
169
self.assertEqual(it.next(), ("bogus", "text2"))
170
self.assertRaises(StopIteration, it.next)
172
def test_line_delta(self):
173
content1 = self._make_content([("", "a"), ("", "b")])
174
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
175
self.assertEqual(content1.line_delta(content2),
176
[(1, 2, 2, ["a", "c"])])
178
def test_line_delta_iter(self):
179
content1 = self._make_content([("", "a"), ("", "b")])
180
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
181
it = content1.line_delta_iter(content2)
182
self.assertEqual(it.next(), (1, 2, 2, ["a", "c"]))
183
self.assertRaises(StopIteration, it.next)
186
class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):
188
def _make_content(self, lines):
189
return AnnotatedKnitContent(lines)
191
def test_annotate(self):
192
content = self._make_content([])
193
self.assertEqual(content.annotate(), [])
195
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
66
196
self.assertEqual(content.annotate(),
67
197
[("origin1", "text1"), ("origin2", "text2")])
69
199
def test_annotate_iter(self):
70
content = KnitContent([])
200
content = self._make_content([])
71
201
it = content.annotate_iter()
72
202
self.assertRaises(StopIteration, it.next)
74
content = KnitContent([("origin1", "text1"), ("origin2", "text2")])
204
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
75
205
it = content.annotate_iter()
76
206
self.assertEqual(it.next(), ("origin1", "text1"))
77
207
self.assertEqual(it.next(), ("origin2", "text2"))
78
208
self.assertRaises(StopIteration, it.next)
81
content = KnitContent([("origin1", "text1"), ("origin2", "text2")])
83
self.assertIsInstance(copy, KnitContent)
84
self.assertEqual(copy.annotate(),
85
[("origin1", "text1"), ("origin2", "text2")])
87
210
def test_line_delta(self):
88
content1 = KnitContent([("", "a"), ("", "b")])
89
content2 = KnitContent([("", "a"), ("", "a"), ("", "c")])
211
content1 = self._make_content([("", "a"), ("", "b")])
212
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
90
213
self.assertEqual(content1.line_delta(content2),
91
214
[(1, 2, 2, [("", "a"), ("", "c")])])
93
216
def test_line_delta_iter(self):
94
content1 = KnitContent([("", "a"), ("", "b")])
95
content2 = KnitContent([("", "a"), ("", "a"), ("", "c")])
217
content1 = self._make_content([("", "a"), ("", "b")])
218
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
96
219
it = content1.line_delta_iter(content2)
97
220
self.assertEqual(it.next(), (1, 2, 2, [("", "a"), ("", "c")]))
98
221
self.assertRaises(StopIteration, it.next)
1078
1581
for plan_line, expected_line in zip(plan, AB_MERGE):
1079
1582
self.assertEqual(plan_line, expected_line)
1584
def test_get_stream_empty(self):
1585
"""Get a data stream for an empty knit file."""
1586
k1 = self.make_test_knit()
1587
format, data_list, reader_callable = k1.get_data_stream([])
1588
self.assertEqual('knit-plain', format)
1589
self.assertEqual([], data_list)
1590
content = reader_callable(None)
1591
self.assertEqual('', content)
1592
self.assertIsInstance(content, str)
1594
def test_get_stream_one_version(self):
1595
"""Get a data stream for a single record out of a knit containing just
1598
k1 = self.make_test_knit()
1600
('text-a', [], TEXT_1),
1602
expected_data_list = [
1603
# version, options, length, parents
1604
('text-a', ['fulltext'], 122, []),
1606
for version_id, parents, lines in test_data:
1607
k1.add_lines(version_id, parents, split_lines(lines))
1609
format, data_list, reader_callable = k1.get_data_stream(['text-a'])
1610
self.assertEqual('knit-plain', format)
1611
self.assertEqual(expected_data_list, data_list)
1612
# There's only one record in the knit, so the content should be the
1613
# entire knit data file's contents.
1614
self.assertEqual(k1.transport.get_bytes(k1._data._access._filename),
1615
reader_callable(None))
1617
def test_get_stream_get_one_version_of_many(self):
1618
"""Get a data stream for just one version out of a knit containing many
1621
k1 = self.make_test_knit()
1622
# Insert the same data as test_knit_join, as they seem to cover a range
1623
# of cases (no parents, one parent, multiple parents).
1625
('text-a', [], TEXT_1),
1626
('text-b', ['text-a'], TEXT_1),
1627
('text-c', [], TEXT_1),
1628
('text-d', ['text-c'], TEXT_1),
1629
('text-m', ['text-b', 'text-d'], TEXT_1),
1631
expected_data_list = [
1632
# version, options, length, parents
1633
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1635
for version_id, parents, lines in test_data:
1636
k1.add_lines(version_id, parents, split_lines(lines))
1638
format, data_list, reader_callable = k1.get_data_stream(['text-m'])
1639
self.assertEqual('knit-plain', format)
1640
self.assertEqual(expected_data_list, data_list)
1641
self.assertRecordContentEqual(k1, 'text-m', reader_callable(None))
1643
def test_get_data_stream_unordered_index(self):
1644
"""Get a data stream when the knit index reports versions out of order.
1646
https://bugs.launchpad.net/bzr/+bug/164637
1648
k1 = self.make_test_knit()
1650
('text-a', [], TEXT_1),
1651
('text-b', ['text-a'], TEXT_1),
1652
('text-c', [], TEXT_1),
1653
('text-d', ['text-c'], TEXT_1),
1654
('text-m', ['text-b', 'text-d'], TEXT_1),
1656
for version_id, parents, lines in test_data:
1657
k1.add_lines(version_id, parents, split_lines(lines))
1658
# monkey-patch versions method to return out of order, as if coming
1659
# from multiple independently indexed packs
1660
original_versions = k1.versions
1661
k1.versions = lambda: reversed(original_versions())
1662
expected_data_list = [
1663
('text-a', ['fulltext'], 122, []),
1664
('text-b', ['line-delta'], 84, ['text-a'])]
1665
# now check the fulltext is first and the delta second
1666
format, data_list, _ = k1.get_data_stream(['text-a', 'text-b'])
1667
self.assertEqual('knit-plain', format)
1668
self.assertEqual(expected_data_list, data_list)
1669
# and that's true if we ask for them in the opposite order too
1670
format, data_list, _ = k1.get_data_stream(['text-b', 'text-a'])
1671
self.assertEqual(expected_data_list, data_list)
1672
# also try requesting more versions
1673
format, data_list, _ = k1.get_data_stream([
1674
'text-m', 'text-b', 'text-a'])
1676
('text-a', ['fulltext'], 122, []),
1677
('text-b', ['line-delta'], 84, ['text-a']),
1678
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1681
def test_get_stream_ghost_parent(self):
1682
"""Get a data stream for a version with a ghost parent."""
1683
k1 = self.make_test_knit()
1685
k1.add_lines('text-a', [], split_lines(TEXT_1))
1686
k1.add_lines_with_ghosts('text-b', ['text-a', 'text-ghost'],
1687
split_lines(TEXT_1))
1689
expected_data_list = [
1690
# version, options, length, parents
1691
('text-b', ['line-delta'], 84, ['text-a', 'text-ghost']),
1694
format, data_list, reader_callable = k1.get_data_stream(['text-b'])
1695
self.assertEqual('knit-plain', format)
1696
self.assertEqual(expected_data_list, data_list)
1697
self.assertRecordContentEqual(k1, 'text-b', reader_callable(None))
1699
def test_get_stream_get_multiple_records(self):
1700
"""Get a stream for multiple records of a knit."""
1701
k1 = self.make_test_knit()
1702
# Insert the same data as test_knit_join, as they seem to cover a range
1703
# of cases (no parents, one parent, multiple parents).
1705
('text-a', [], TEXT_1),
1706
('text-b', ['text-a'], TEXT_1),
1707
('text-c', [], TEXT_1),
1708
('text-d', ['text-c'], TEXT_1),
1709
('text-m', ['text-b', 'text-d'], TEXT_1),
1711
for version_id, parents, lines in test_data:
1712
k1.add_lines(version_id, parents, split_lines(lines))
1714
# This test is actually a bit strict as the order in which they're
1715
# returned is not defined. This matches the current (deterministic)
1717
expected_data_list = [
1718
# version, options, length, parents
1719
('text-d', ['line-delta'], 84, ['text-c']),
1720
('text-b', ['line-delta'], 84, ['text-a']),
1722
# Note that even though we request the revision IDs in a particular
1723
# order, the data stream may return them in any order it likes. In this
1724
# case, they'll be in the order they were inserted into the knit.
1725
format, data_list, reader_callable = k1.get_data_stream(
1726
['text-d', 'text-b'])
1727
self.assertEqual('knit-plain', format)
1728
self.assertEqual(expected_data_list, data_list)
1729
# must match order they're returned
1730
self.assertRecordContentEqual(k1, 'text-d', reader_callable(84))
1731
self.assertRecordContentEqual(k1, 'text-b', reader_callable(84))
1732
self.assertEqual('', reader_callable(None),
1733
"There should be no more bytes left to read.")
1735
def test_get_stream_all(self):
1736
"""Get a data stream for all the records in a knit.
1738
This exercises fulltext records, line-delta records, records with
1739
various numbers of parents, and reading multiple records out of the
1740
callable. These cases ought to all be exercised individually by the
1741
other test_get_stream_* tests; this test is basically just paranoia.
1743
k1 = self.make_test_knit()
1744
# Insert the same data as test_knit_join, as they seem to cover a range
1745
# of cases (no parents, one parent, multiple parents).
1747
('text-a', [], TEXT_1),
1748
('text-b', ['text-a'], TEXT_1),
1749
('text-c', [], TEXT_1),
1750
('text-d', ['text-c'], TEXT_1),
1751
('text-m', ['text-b', 'text-d'], TEXT_1),
1753
for version_id, parents, lines in test_data:
1754
k1.add_lines(version_id, parents, split_lines(lines))
1756
# This test is actually a bit strict as the order in which they're
1757
# returned is not defined. This matches the current (deterministic)
1759
expected_data_list = [
1760
# version, options, length, parents
1761
('text-a', ['fulltext'], 122, []),
1762
('text-b', ['line-delta'], 84, ['text-a']),
1763
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1764
('text-c', ['fulltext'], 121, []),
1765
('text-d', ['line-delta'], 84, ['text-c']),
1767
format, data_list, reader_callable = k1.get_data_stream(
1768
['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
1769
self.assertEqual('knit-plain', format)
1770
self.assertEqual(expected_data_list, data_list)
1771
for version_id, options, length, parents in expected_data_list:
1772
bytes = reader_callable(length)
1773
self.assertRecordContentEqual(k1, version_id, bytes)
1775
def assertKnitFilesEqual(self, knit1, knit2):
1776
"""Assert that the contents of the index and data files of two knits are
1780
knit1.transport.get_bytes(knit1._data._access._filename),
1781
knit2.transport.get_bytes(knit2._data._access._filename))
1783
knit1.transport.get_bytes(knit1._index._filename),
1784
knit2.transport.get_bytes(knit2._index._filename))
1786
def assertKnitValuesEqual(self, left, right):
1787
"""Assert that the texts, annotations and graph of left and right are
1790
self.assertEqual(set(left.versions()), set(right.versions()))
1791
for version in left.versions():
1792
self.assertEqual(left.get_parents_with_ghosts(version),
1793
right.get_parents_with_ghosts(version))
1794
self.assertEqual(left.get_lines(version),
1795
right.get_lines(version))
1796
self.assertEqual(left.annotate(version),
1797
right.annotate(version))
1799
def test_insert_data_stream_empty(self):
1800
"""Inserting a data stream with no records should not put any data into
1803
k1 = self.make_test_knit()
1804
k1.insert_data_stream(
1805
(k1.get_format_signature(), [], lambda ignored: ''))
1806
self.assertEqual('', k1.transport.get_bytes(k1._data._access._filename),
1807
"The .knit should be completely empty.")
1808
self.assertEqual(k1._index.HEADER,
1809
k1.transport.get_bytes(k1._index._filename),
1810
"The .kndx should have nothing apart from the header.")
1812
def test_insert_data_stream_one_record(self):
1813
"""Inserting a data stream with one record from a knit with one record
1814
results in byte-identical files.
1816
source = self.make_test_knit(name='source')
1817
source.add_lines('text-a', [], split_lines(TEXT_1))
1818
data_stream = source.get_data_stream(['text-a'])
1819
target = self.make_test_knit(name='target')
1820
target.insert_data_stream(data_stream)
1821
self.assertKnitFilesEqual(source, target)
1823
def test_insert_data_stream_annotated_unannotated(self):
1824
"""Inserting an annotated datastream to an unannotated knit works."""
1825
# case one - full texts.
1826
source = self.make_test_knit(name='source', annotate=True)
1827
target = self.make_test_knit(name='target', annotate=False)
1828
source.add_lines('text-a', [], split_lines(TEXT_1))
1829
target.insert_data_stream(source.get_data_stream(['text-a']))
1830
self.assertKnitValuesEqual(source, target)
1831
# case two - deltas.
1832
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1833
target.insert_data_stream(source.get_data_stream(['text-b']))
1834
self.assertKnitValuesEqual(source, target)
1836
def test_insert_data_stream_unannotated_annotated(self):
1837
"""Inserting an unannotated datastream to an annotated knit works."""
1838
# case one - full texts.
1839
source = self.make_test_knit(name='source', annotate=False)
1840
target = self.make_test_knit(name='target', annotate=True)
1841
source.add_lines('text-a', [], split_lines(TEXT_1))
1842
target.insert_data_stream(source.get_data_stream(['text-a']))
1843
self.assertKnitValuesEqual(source, target)
1844
# case two - deltas.
1845
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1846
target.insert_data_stream(source.get_data_stream(['text-b']))
1847
self.assertKnitValuesEqual(source, target)
1849
def test_insert_data_stream_records_already_present(self):
1850
"""Insert a data stream where some records are alreday present in the
1851
target, and some not. Only the new records are inserted.
1853
source = self.make_test_knit(name='source')
1854
target = self.make_test_knit(name='target')
1855
# Insert 'text-a' into both source and target
1856
source.add_lines('text-a', [], split_lines(TEXT_1))
1857
target.insert_data_stream(source.get_data_stream(['text-a']))
1858
# Insert 'text-b' into just the source.
1859
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1860
# Get a data stream of both text-a and text-b, and insert it.
1861
data_stream = source.get_data_stream(['text-a', 'text-b'])
1862
target.insert_data_stream(data_stream)
1863
# The source and target will now be identical. This means the text-a
1864
# record was not added a second time.
1865
self.assertKnitFilesEqual(source, target)
1867
def test_insert_data_stream_multiple_records(self):
1868
"""Inserting a data stream of all records from a knit with multiple
1869
records results in byte-identical files.
1871
source = self.make_test_knit(name='source')
1872
source.add_lines('text-a', [], split_lines(TEXT_1))
1873
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1874
source.add_lines('text-c', [], split_lines(TEXT_1))
1875
data_stream = source.get_data_stream(['text-a', 'text-b', 'text-c'])
1877
target = self.make_test_knit(name='target')
1878
target.insert_data_stream(data_stream)
1880
self.assertKnitFilesEqual(source, target)
1882
def test_insert_data_stream_ghost_parent(self):
1883
"""Insert a data stream with a record that has a ghost parent."""
1884
# Make a knit with a record, text-a, that has a ghost parent.
1885
source = self.make_test_knit(name='source')
1886
source.add_lines_with_ghosts('text-a', ['text-ghost'],
1887
split_lines(TEXT_1))
1888
data_stream = source.get_data_stream(['text-a'])
1890
target = self.make_test_knit(name='target')
1891
target.insert_data_stream(data_stream)
1893
self.assertKnitFilesEqual(source, target)
1895
# The target knit object is in a consistent state, i.e. the record we
1896
# just added is immediately visible.
1897
self.assertTrue(target.has_version('text-a'))
1898
self.assertTrue(target.has_ghost('text-ghost'))
1899
self.assertEqual(split_lines(TEXT_1), target.get_lines('text-a'))
1901
def test_insert_data_stream_inconsistent_version_lines(self):
1902
"""Inserting a data stream which has different content for a version_id
1903
than already exists in the knit will raise KnitCorrupt.
1905
source = self.make_test_knit(name='source')
1906
target = self.make_test_knit(name='target')
1907
# Insert a different 'text-a' into both source and target
1908
source.add_lines('text-a', [], split_lines(TEXT_1))
1909
target.add_lines('text-a', [], split_lines(TEXT_2))
1910
# Insert a data stream with conflicting content into the target
1911
data_stream = source.get_data_stream(['text-a'])
1913
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1915
def test_insert_data_stream_inconsistent_version_parents(self):
1916
"""Inserting a data stream which has different parents for a version_id
1917
than already exists in the knit will raise KnitCorrupt.
1919
source = self.make_test_knit(name='source')
1920
target = self.make_test_knit(name='target')
1921
# Insert a different 'text-a' into both source and target. They differ
1922
# only by the parents list, the content is the same.
1923
source.add_lines_with_ghosts('text-a', [], split_lines(TEXT_1))
1924
target.add_lines_with_ghosts('text-a', ['a-ghost'], split_lines(TEXT_1))
1925
# Insert a data stream with conflicting content into the target
1926
data_stream = source.get_data_stream(['text-a'])
1928
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1930
def test_insert_data_stream_unknown_format(self):
1931
"""A data stream in a different format to the target knit cannot be
1934
It will raise KnitDataStreamUnknown because the fallback code will fail
1935
to make a knit. In future we may need KnitDataStreamIncompatible again,
1936
for more exotic cases.
1938
data_stream = ('fake-format-signature', [], lambda _: '')
1939
target = self.make_test_knit(name='target')
1941
errors.KnitDataStreamUnknown,
1942
target.insert_data_stream, data_stream)
1944
# * test that a stream of "already present version, then new version"
1945
# inserts correctly.
1948
def assertMadeStreamKnit(self, source_knit, versions, target_knit):
1949
"""Assert that a knit made from a stream is as expected."""
1950
a_stream = source_knit.get_data_stream(versions)
1951
expected_data = a_stream[2](None)
1952
a_stream = source_knit.get_data_stream(versions)
1953
a_knit = target_knit._knit_from_datastream(a_stream)
1954
self.assertEqual(source_knit.factory.__class__,
1955
a_knit.factory.__class__)
1956
self.assertIsInstance(a_knit._data._access, _StreamAccess)
1957
self.assertIsInstance(a_knit._index, _StreamIndex)
1958
self.assertEqual(a_knit._index.data_list, a_stream[1])
1959
self.assertEqual(a_knit._data._access.data, expected_data)
1960
self.assertEqual(a_knit.filename, target_knit.filename)
1961
self.assertEqual(a_knit.transport, target_knit.transport)
1962
self.assertEqual(a_knit._index, a_knit._data._access.stream_index)
1963
self.assertEqual(target_knit, a_knit._data._access.backing_knit)
1964
self.assertIsInstance(a_knit._data._access.orig_factory,
1965
source_knit.factory.__class__)
1967
def test__knit_from_data_stream_empty(self):
1968
"""Create a knit object from a datastream."""
1969
annotated = self.make_test_knit(name='source', annotate=True)
1970
plain = self.make_test_knit(name='target', annotate=False)
1971
# case 1: annotated source
1972
self.assertMadeStreamKnit(annotated, [], annotated)
1973
self.assertMadeStreamKnit(annotated, [], plain)
1974
# case 2: plain source
1975
self.assertMadeStreamKnit(plain, [], annotated)
1976
self.assertMadeStreamKnit(plain, [], plain)
1978
def test__knit_from_data_stream_unknown_format(self):
1979
annotated = self.make_test_knit(name='source', annotate=True)
1980
self.assertRaises(errors.KnitDataStreamUnknown,
1981
annotated._knit_from_datastream, ("unknown", None, None))
1083
1985
Banana cup cakes:
1349
2240
t.put_bytes('test.kndx', '# not really a knit header\n\n')
1351
2242
self.assertRaises(KnitHeaderError, self.make_test_knit)
2245
class TestGraphIndexKnit(KnitTests):
2246
"""Tests for knits using a GraphIndex rather than a KnitIndex."""
2248
def make_g_index(self, name, ref_lists=0, nodes=[]):
2249
builder = GraphIndexBuilder(ref_lists)
2250
for node, references, value in nodes:
2251
builder.add_node(node, references, value)
2252
stream = builder.finish()
2253
trans = self.get_transport()
2254
size = trans.put_file(name, stream)
2255
return GraphIndex(trans, name, size)
2257
def two_graph_index(self, deltas=False, catch_adds=False):
2258
"""Build a two-graph index.
2260
:param deltas: If true, use underlying indices with two node-ref
2261
lists and 'parent' set to a delta-compressed against tail.
2263
# build a complex graph across several indices.
2265
# delta compression inn the index
2266
index1 = self.make_g_index('1', 2, [
2267
(('tip', ), 'N0 100', ([('parent', )], [], )),
2268
(('tail', ), '', ([], []))])
2269
index2 = self.make_g_index('2', 2, [
2270
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], [('tail', )])),
2271
(('separate', ), '', ([], []))])
2273
# just blob location and graph in the index.
2274
index1 = self.make_g_index('1', 1, [
2275
(('tip', ), 'N0 100', ([('parent', )], )),
2276
(('tail', ), '', ([], ))])
2277
index2 = self.make_g_index('2', 1, [
2278
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], )),
2279
(('separate', ), '', ([], ))])
2280
combined_index = CombinedGraphIndex([index1, index2])
2282
self.combined_index = combined_index
2283
self.caught_entries = []
2284
add_callback = self.catch_add
2287
return KnitGraphIndex(combined_index, deltas=deltas,
2288
add_callback=add_callback)
2290
def test_get_graph(self):
2291
index = self.two_graph_index()
2292
self.assertEqual(set([
2293
('tip', ('parent', )),
2295
('parent', ('tail', 'ghost')),
2297
]), set(index.get_graph()))
2299
def test_get_ancestry(self):
2300
# get_ancestry is defined as eliding ghosts, not erroring.
2301
index = self.two_graph_index()
2302
self.assertEqual([], index.get_ancestry([]))
2303
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2304
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2305
self.assertEqual(['tail', 'parent'], index.get_ancestry(['parent']))
2306
self.assertEqual(['tail', 'parent', 'tip'], index.get_ancestry(['tip']))
2307
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2308
(['tail', 'parent', 'tip', 'separate'],
2309
['separate', 'tail', 'parent', 'tip'],
2311
# and without topo_sort
2312
self.assertEqual(set(['separate']),
2313
set(index.get_ancestry(['separate'], topo_sorted=False)))
2314
self.assertEqual(set(['tail']),
2315
set(index.get_ancestry(['tail'], topo_sorted=False)))
2316
self.assertEqual(set(['tail', 'parent']),
2317
set(index.get_ancestry(['parent'], topo_sorted=False)))
2318
self.assertEqual(set(['tail', 'parent', 'tip']),
2319
set(index.get_ancestry(['tip'], topo_sorted=False)))
2320
self.assertEqual(set(['separate', 'tail', 'parent', 'tip']),
2321
set(index.get_ancestry(['tip', 'separate'])))
2322
# asking for a ghost makes it go boom.
2323
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2325
def test_get_ancestry_with_ghosts(self):
2326
index = self.two_graph_index()
2327
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2328
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2329
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2330
self.assertTrue(index.get_ancestry_with_ghosts(['parent']) in
2331
(['tail', 'ghost', 'parent'],
2332
['ghost', 'tail', 'parent'],
2334
self.assertTrue(index.get_ancestry_with_ghosts(['tip']) in
2335
(['tail', 'ghost', 'parent', 'tip'],
2336
['ghost', 'tail', 'parent', 'tip'],
2338
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2339
(['tail', 'ghost', 'parent', 'tip', 'separate'],
2340
['ghost', 'tail', 'parent', 'tip', 'separate'],
2341
['separate', 'tail', 'ghost', 'parent', 'tip'],
2342
['separate', 'ghost', 'tail', 'parent', 'tip'],
2344
# asking for a ghost makes it go boom.
2345
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2347
def test_num_versions(self):
2348
index = self.two_graph_index()
2349
self.assertEqual(4, index.num_versions())
2351
def test_get_versions(self):
2352
index = self.two_graph_index()
2353
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2354
set(index.get_versions()))
2356
def test_has_version(self):
2357
index = self.two_graph_index()
2358
self.assertTrue(index.has_version('tail'))
2359
self.assertFalse(index.has_version('ghost'))
2361
def test_get_position(self):
2362
index = self.two_graph_index()
2363
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2364
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
2366
def test_get_method_deltas(self):
2367
index = self.two_graph_index(deltas=True)
2368
self.assertEqual('fulltext', index.get_method('tip'))
2369
self.assertEqual('line-delta', index.get_method('parent'))
2371
def test_get_method_no_deltas(self):
2372
# check that the parent-history lookup is ignored with deltas=False.
2373
index = self.two_graph_index(deltas=False)
2374
self.assertEqual('fulltext', index.get_method('tip'))
2375
self.assertEqual('fulltext', index.get_method('parent'))
2377
def test_get_options_deltas(self):
2378
index = self.two_graph_index(deltas=True)
2379
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2380
self.assertEqual(['line-delta'], index.get_options('parent'))
2382
def test_get_options_no_deltas(self):
2383
# check that the parent-history lookup is ignored with deltas=False.
2384
index = self.two_graph_index(deltas=False)
2385
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2386
self.assertEqual(['fulltext'], index.get_options('parent'))
2388
def test_get_parents(self):
2389
# get_parents ignores ghosts
2390
index = self.two_graph_index()
2391
self.assertEqual(('tail', ), index.get_parents('parent'))
2392
# and errors on ghosts.
2393
self.assertRaises(errors.RevisionNotPresent,
2394
index.get_parents, 'ghost')
2396
def test_get_parents_with_ghosts(self):
2397
index = self.two_graph_index()
2398
self.assertEqual(('tail', 'ghost'), index.get_parents_with_ghosts('parent'))
2399
# and errors on ghosts.
2400
self.assertRaises(errors.RevisionNotPresent,
2401
index.get_parents_with_ghosts, 'ghost')
2403
def test_check_versions_present(self):
2404
# ghosts should not be considered present
2405
index = self.two_graph_index()
2406
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2408
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2410
index.check_versions_present(['tail', 'separate'])
2412
def catch_add(self, entries):
2413
self.caught_entries.append(entries)
2415
def test_add_no_callback_errors(self):
2416
index = self.two_graph_index()
2417
self.assertRaises(errors.ReadOnlyError, index.add_version,
2418
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
2420
def test_add_version_smoke(self):
2421
index = self.two_graph_index(catch_adds=True)
2422
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
2423
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
2424
self.caught_entries)
2426
def test_add_version_delta_not_delta_index(self):
2427
index = self.two_graph_index(catch_adds=True)
2428
self.assertRaises(errors.KnitCorrupt, index.add_version,
2429
'new', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2430
self.assertEqual([], self.caught_entries)
2432
def test_add_version_same_dup(self):
2433
index = self.two_graph_index(catch_adds=True)
2434
# options can be spelt two different ways
2435
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2436
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])
2437
# but neither should have added data.
2438
self.assertEqual([[], []], self.caught_entries)
2440
def test_add_version_different_dup(self):
2441
index = self.two_graph_index(deltas=True, catch_adds=True)
2443
self.assertRaises(errors.KnitCorrupt, index.add_version,
2444
'tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2445
self.assertRaises(errors.KnitCorrupt, index.add_version,
2446
'tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])
2447
self.assertRaises(errors.KnitCorrupt, index.add_version,
2448
'tip', 'fulltext', (None, 0, 100), ['parent'])
2450
self.assertRaises(errors.KnitCorrupt, index.add_version,
2451
'tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])
2452
self.assertRaises(errors.KnitCorrupt, index.add_version,
2453
'tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])
2455
self.assertRaises(errors.KnitCorrupt, index.add_version,
2456
'tip', 'fulltext,no-eol', (None, 0, 100), [])
2457
self.assertEqual([], self.caught_entries)
2459
def test_add_versions_nodeltas(self):
2460
index = self.two_graph_index(catch_adds=True)
2461
index.add_versions([
2462
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2463
('new2', 'fulltext', (None, 0, 6), ['new']),
2465
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
2466
(('new2', ), ' 0 6', ((('new',),),))],
2467
sorted(self.caught_entries[0]))
2468
self.assertEqual(1, len(self.caught_entries))
2470
def test_add_versions_deltas(self):
2471
index = self.two_graph_index(deltas=True, catch_adds=True)
2472
index.add_versions([
2473
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2474
('new2', 'line-delta', (None, 0, 6), ['new']),
2476
self.assertEqual([(('new', ), 'N50 60', ((('separate',),), ())),
2477
(('new2', ), ' 0 6', ((('new',),), (('new',),), ))],
2478
sorted(self.caught_entries[0]))
2479
self.assertEqual(1, len(self.caught_entries))
2481
def test_add_versions_delta_not_delta_index(self):
2482
index = self.two_graph_index(catch_adds=True)
2483
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2484
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2485
self.assertEqual([], self.caught_entries)
2487
def test_add_versions_random_id_accepted(self):
2488
index = self.two_graph_index(catch_adds=True)
2489
index.add_versions([], random_id=True)
2491
def test_add_versions_same_dup(self):
2492
index = self.two_graph_index(catch_adds=True)
2493
# options can be spelt two different ways
2494
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2495
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2496
# but neither should have added data.
2497
self.assertEqual([[], []], self.caught_entries)
2499
def test_add_versions_different_dup(self):
2500
index = self.two_graph_index(deltas=True, catch_adds=True)
2502
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2503
[('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2504
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2505
[('tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])])
2506
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2507
[('tip', 'fulltext', (None, 0, 100), ['parent'])])
2509
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2510
[('tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])])
2511
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2512
[('tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])])
2514
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2515
[('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2516
# change options in the second record
2517
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2518
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent']),
2519
('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2520
self.assertEqual([], self.caught_entries)
2522
def test_iter_parents(self):
2523
index1 = self.make_g_index('1', 1, [
2525
(('r0', ), 'N0 100', ([], )),
2527
(('r1', ), '', ([('r0', )], ))])
2528
index2 = self.make_g_index('2', 1, [
2530
(('r2', ), 'N0 100', ([('r1', ), ('r0', )], )),
2532
combined_index = CombinedGraphIndex([index1, index2])
2533
index = KnitGraphIndex(combined_index)
2535
# cases: each sample data individually:
2536
self.assertEqual(set([('r0', ())]),
2537
set(index.iter_parents(['r0'])))
2538
self.assertEqual(set([('r1', ('r0', ))]),
2539
set(index.iter_parents(['r1'])))
2540
self.assertEqual(set([('r2', ('r1', 'r0'))]),
2541
set(index.iter_parents(['r2'])))
2542
# no nodes returned for a missing node
2543
self.assertEqual(set(),
2544
set(index.iter_parents(['missing'])))
2545
# 1 node returned with missing nodes skipped
2546
self.assertEqual(set([('r1', ('r0', ))]),
2547
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
2549
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2550
set(index.iter_parents(['r0', 'r1'])))
2551
# 2 nodes returned, missing skipped
2552
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2553
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
2556
class TestNoParentsGraphIndexKnit(KnitTests):
2557
"""Tests for knits using KnitGraphIndex with no parents."""
2559
def make_g_index(self, name, ref_lists=0, nodes=[]):
2560
builder = GraphIndexBuilder(ref_lists)
2561
for node, references in nodes:
2562
builder.add_node(node, references)
2563
stream = builder.finish()
2564
trans = self.get_transport()
2565
size = trans.put_file(name, stream)
2566
return GraphIndex(trans, name, size)
2568
def test_parents_deltas_incompatible(self):
2569
index = CombinedGraphIndex([])
2570
self.assertRaises(errors.KnitError, KnitGraphIndex, index,
2571
deltas=True, parents=False)
2573
def two_graph_index(self, catch_adds=False):
2574
"""Build a two-graph index.
2576
:param deltas: If true, use underlying indices with two node-ref
2577
lists and 'parent' set to a delta-compressed against tail.
2579
# put several versions in the index.
2580
index1 = self.make_g_index('1', 0, [
2581
(('tip', ), 'N0 100'),
2583
index2 = self.make_g_index('2', 0, [
2584
(('parent', ), ' 100 78'),
2585
(('separate', ), '')])
2586
combined_index = CombinedGraphIndex([index1, index2])
2588
self.combined_index = combined_index
2589
self.caught_entries = []
2590
add_callback = self.catch_add
2593
return KnitGraphIndex(combined_index, parents=False,
2594
add_callback=add_callback)
2596
def test_get_graph(self):
2597
index = self.two_graph_index()
2598
self.assertEqual(set([
2603
]), set(index.get_graph()))
2605
def test_get_ancestry(self):
2606
# with no parents, ancestry is always just the key.
2607
index = self.two_graph_index()
2608
self.assertEqual([], index.get_ancestry([]))
2609
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2610
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2611
self.assertEqual(['parent'], index.get_ancestry(['parent']))
2612
self.assertEqual(['tip'], index.get_ancestry(['tip']))
2613
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2614
(['tip', 'separate'],
2615
['separate', 'tip'],
2617
# asking for a ghost makes it go boom.
2618
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2620
def test_get_ancestry_with_ghosts(self):
2621
index = self.two_graph_index()
2622
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2623
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2624
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2625
self.assertEqual(['parent'], index.get_ancestry_with_ghosts(['parent']))
2626
self.assertEqual(['tip'], index.get_ancestry_with_ghosts(['tip']))
2627
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2628
(['tip', 'separate'],
2629
['separate', 'tip'],
2631
# asking for a ghost makes it go boom.
2632
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2634
def test_num_versions(self):
2635
index = self.two_graph_index()
2636
self.assertEqual(4, index.num_versions())
2638
def test_get_versions(self):
2639
index = self.two_graph_index()
2640
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2641
set(index.get_versions()))
2643
def test_has_version(self):
2644
index = self.two_graph_index()
2645
self.assertTrue(index.has_version('tail'))
2646
self.assertFalse(index.has_version('ghost'))
2648
def test_get_position(self):
2649
index = self.two_graph_index()
2650
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2651
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
2653
def test_get_method(self):
2654
index = self.two_graph_index()
2655
self.assertEqual('fulltext', index.get_method('tip'))
2656
self.assertEqual(['fulltext'], index.get_options('parent'))
2658
def test_get_options(self):
2659
index = self.two_graph_index()
2660
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2661
self.assertEqual(['fulltext'], index.get_options('parent'))
2663
def test_get_parents(self):
2664
index = self.two_graph_index()
2665
self.assertEqual((), index.get_parents('parent'))
2666
# and errors on ghosts.
2667
self.assertRaises(errors.RevisionNotPresent,
2668
index.get_parents, 'ghost')
2670
def test_get_parents_with_ghosts(self):
2671
index = self.two_graph_index()
2672
self.assertEqual((), index.get_parents_with_ghosts('parent'))
2673
# and errors on ghosts.
2674
self.assertRaises(errors.RevisionNotPresent,
2675
index.get_parents_with_ghosts, 'ghost')
2677
def test_check_versions_present(self):
2678
index = self.two_graph_index()
2679
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2681
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2682
['tail', 'missing'])
2683
index.check_versions_present(['tail', 'separate'])
2685
def catch_add(self, entries):
2686
self.caught_entries.append(entries)
2688
def test_add_no_callback_errors(self):
2689
index = self.two_graph_index()
2690
self.assertRaises(errors.ReadOnlyError, index.add_version,
2691
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
2693
def test_add_version_smoke(self):
2694
index = self.two_graph_index(catch_adds=True)
2695
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), [])
2696
self.assertEqual([[(('new', ), 'N50 60')]],
2697
self.caught_entries)
2699
def test_add_version_delta_not_delta_index(self):
2700
index = self.two_graph_index(catch_adds=True)
2701
self.assertRaises(errors.KnitCorrupt, index.add_version,
2702
'new', 'no-eol,line-delta', (None, 0, 100), [])
2703
self.assertEqual([], self.caught_entries)
2705
def test_add_version_same_dup(self):
2706
index = self.two_graph_index(catch_adds=True)
2707
# options can be spelt two different ways
2708
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), [])
2709
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), [])
2710
# but neither should have added data.
2711
self.assertEqual([[], []], self.caught_entries)
2713
def test_add_version_different_dup(self):
2714
index = self.two_graph_index(catch_adds=True)
2716
self.assertRaises(errors.KnitCorrupt, index.add_version,
2717
'tip', 'no-eol,line-delta', (None, 0, 100), [])
2718
self.assertRaises(errors.KnitCorrupt, index.add_version,
2719
'tip', 'line-delta,no-eol', (None, 0, 100), [])
2720
self.assertRaises(errors.KnitCorrupt, index.add_version,
2721
'tip', 'fulltext', (None, 0, 100), [])
2723
self.assertRaises(errors.KnitCorrupt, index.add_version,
2724
'tip', 'fulltext,no-eol', (None, 50, 100), [])
2725
self.assertRaises(errors.KnitCorrupt, index.add_version,
2726
'tip', 'fulltext,no-eol', (None, 0, 1000), [])
2728
self.assertRaises(errors.KnitCorrupt, index.add_version,
2729
'tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2730
self.assertEqual([], self.caught_entries)
2732
def test_add_versions(self):
2733
index = self.two_graph_index(catch_adds=True)
2734
index.add_versions([
2735
('new', 'fulltext,no-eol', (None, 50, 60), []),
2736
('new2', 'fulltext', (None, 0, 6), []),
2738
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
2739
sorted(self.caught_entries[0]))
2740
self.assertEqual(1, len(self.caught_entries))
2742
def test_add_versions_delta_not_delta_index(self):
2743
index = self.two_graph_index(catch_adds=True)
2744
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2745
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2746
self.assertEqual([], self.caught_entries)
2748
def test_add_versions_parents_not_parents_index(self):
2749
index = self.two_graph_index(catch_adds=True)
2750
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2751
[('new', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2752
self.assertEqual([], self.caught_entries)
2754
def test_add_versions_random_id_accepted(self):
2755
index = self.two_graph_index(catch_adds=True)
2756
index.add_versions([], random_id=True)
2758
def test_add_versions_same_dup(self):
2759
index = self.two_graph_index(catch_adds=True)
2760
# options can be spelt two different ways
2761
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2762
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), [])])
2763
# but neither should have added data.
2764
self.assertEqual([[], []], self.caught_entries)
2766
def test_add_versions_different_dup(self):
2767
index = self.two_graph_index(catch_adds=True)
2769
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2770
[('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2771
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2772
[('tip', 'line-delta,no-eol', (None, 0, 100), [])])
2773
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2774
[('tip', 'fulltext', (None, 0, 100), [])])
2776
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2777
[('tip', 'fulltext,no-eol', (None, 50, 100), [])])
2778
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2779
[('tip', 'fulltext,no-eol', (None, 0, 1000), [])])
2781
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2782
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2783
# change options in the second record
2784
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2785
[('tip', 'fulltext,no-eol', (None, 0, 100), []),
2786
('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2787
self.assertEqual([], self.caught_entries)
2789
def test_iter_parents(self):
2790
index = self.two_graph_index()
2791
self.assertEqual(set([
2792
('tip', ()), ('tail', ()), ('parent', ()), ('separate', ())
2794
set(index.iter_parents(['tip', 'tail', 'ghost', 'parent', 'separate'])))
2795
self.assertEqual(set([('tip', ())]),
2796
set(index.iter_parents(['tip'])))
2797
self.assertEqual(set(),
2798
set(index.iter_parents([])))
2801
class TestPackKnits(KnitTests):
2802
"""Tests that use a _PackAccess and KnitGraphIndex."""
2804
def test_get_data_stream_packs_ignores_pack_overhead(self):
2805
# Packs have an encoding overhead that should not be included in the
2806
# 'size' field of a data stream, because it is not returned by the
2807
# raw_reading functions - it is why index_memo's are opaque, and
2808
# get_data_stream was abusing this.
2809
packname = 'test.pack'
2810
transport = self.get_transport()
2811
def write_data(bytes):
2812
transport.append_bytes(packname, bytes)
2813
writer = pack.ContainerWriter(write_data)
2815
index = InMemoryGraphIndex(2)
2816
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
2818
indices = {index:(transport, packname)}
2819
access = _PackAccess(indices, writer=(writer, index))
2820
k = KnitVersionedFile('test', get_transport('.'),
2821
delta=True, create=True, index=knit_index, access_method=access)
2822
# insert something into the knit
2823
k.add_lines('text-1', [], ["foo\n"])
2824
# get a data stream for it
2825
stream = k.get_data_stream(['text-1'])
2826
# if the stream has been incorrectly assembled, we will get a short read
2827
# reading from the stream (as streams have no trailer)
2828
expected_length = stream[1][0][2]
2829
# we use -1 to do the read, so that if a trailer is added this test
2830
# will fail and we'll adjust it to handle that case correctly, rather
2831
# than allowing an over-read that is bogus.
2832
self.assertEqual(expected_length, len(stream[2](-1)))
2835
class Test_StreamIndex(KnitTests):
2837
def get_index(self, knit, stream):
2838
"""Get a _StreamIndex from knit and stream."""
2839
return knit._knit_from_datastream(stream)._index
2841
def assertIndexVersions(self, knit, versions):
2842
"""Check that the _StreamIndex versions are those of the stream."""
2843
index = self.get_index(knit, knit.get_data_stream(versions))
2844
self.assertEqual(set(index.get_versions()), set(versions))
2845
# check we didn't get duplicates
2846
self.assertEqual(len(index.get_versions()), len(versions))
2848
def assertIndexAncestry(self, knit, ancestry_versions, versions, result):
2849
"""Check the result of a get_ancestry call on knit."""
2850
index = self.get_index(knit, knit.get_data_stream(versions))
2853
set(index.get_ancestry(ancestry_versions, False)))
2855
def assertIterParents(self, knit, versions, parent_versions, result):
2856
"""Check the result of an iter_parents call on knit."""
2857
index = self.get_index(knit, knit.get_data_stream(versions))
2858
self.assertEqual(result, index.iter_parents(parent_versions))
2860
def assertGetMethod(self, knit, versions, version, result):
2861
index = self.get_index(knit, knit.get_data_stream(versions))
2862
self.assertEqual(result, index.get_method(version))
2864
def assertGetOptions(self, knit, version, options):
2865
index = self.get_index(knit, knit.get_data_stream(version))
2866
self.assertEqual(options, index.get_options(version))
2868
def assertGetPosition(self, knit, versions, version, result):
2869
index = self.get_index(knit, knit.get_data_stream(versions))
2870
if result[1] is None:
2871
result = (result[0], index, result[2], result[3])
2872
self.assertEqual(result, index.get_position(version))
2874
def assertGetParentsWithGhosts(self, knit, versions, version, parents):
2875
index = self.get_index(knit, knit.get_data_stream(versions))
2876
self.assertEqual(parents, index.get_parents_with_ghosts(version))
2878
def make_knit_with_4_versions_2_dags(self):
2879
knit = self.make_test_knit()
2880
knit.add_lines('a', [], ["foo"])
2881
knit.add_lines('b', [], [])
2882
knit.add_lines('c', ['b', 'a'], [])
2883
knit.add_lines_with_ghosts('d', ['e', 'f'], [])
2886
def test_versions(self):
2887
"""The versions of a StreamIndex are those of the datastream."""
2888
knit = self.make_knit_with_4_versions_2_dags()
2889
# ask for most permutations, which catches bugs like falling back to the
2890
# target knit, or showing ghosts, etc.
2891
self.assertIndexVersions(knit, [])
2892
self.assertIndexVersions(knit, ['a'])
2893
self.assertIndexVersions(knit, ['b'])
2894
self.assertIndexVersions(knit, ['c'])
2895
self.assertIndexVersions(knit, ['d'])
2896
self.assertIndexVersions(knit, ['a', 'b'])
2897
self.assertIndexVersions(knit, ['b', 'c'])
2898
self.assertIndexVersions(knit, ['a', 'c'])
2899
self.assertIndexVersions(knit, ['a', 'b', 'c'])
2900
self.assertIndexVersions(knit, ['a', 'b', 'c', 'd'])
2902
def test_construct(self):
2903
"""Constructing a StreamIndex generates index data."""
2904
data_list = [('text-a', ['fulltext'], 127, []),
2905
('text-b', ['option'], 128, ['text-c'])]
2906
index = _StreamIndex(data_list)
2907
self.assertEqual({'text-a':(['fulltext'], (0, 127), []),
2908
'text-b':(['option'], (127, 127 + 128), ['text-c'])},
2911
def test_get_ancestry(self):
2912
knit = self.make_knit_with_4_versions_2_dags()
2913
self.assertIndexAncestry(knit, ['a'], ['a'], ['a'])
2914
self.assertIndexAncestry(knit, ['b'], ['b'], ['b'])
2915
self.assertIndexAncestry(knit, ['c'], ['c'], ['c'])
2916
self.assertIndexAncestry(knit, ['c'], ['a', 'b', 'c'],
2917
set(['a', 'b', 'c']))
2918
self.assertIndexAncestry(knit, ['c', 'd'], ['a', 'b', 'c', 'd'],
2919
set(['a', 'b', 'c', 'd']))
2921
def test_get_method(self):
2922
knit = self.make_knit_with_4_versions_2_dags()
2923
self.assertGetMethod(knit, ['a'], 'a', 'fulltext')
2924
self.assertGetMethod(knit, ['c'], 'c', 'line-delta')
2925
# get_method on a basis that is not in the datastream (but in the
2926
# backing knit) returns 'fulltext', because thats what we'll create as
2928
self.assertGetMethod(knit, ['c'], 'b', 'fulltext')
2930
def test_iter_parents(self):
2931
knit = self.make_knit_with_4_versions_2_dags()
2932
self.assertIterParents(knit, ['a'], ['a'], [('a', [])])
2933
self.assertIterParents(knit, ['a', 'b'], ['a', 'b'],
2934
[('a', []), ('b', [])])
2935
self.assertIterParents(knit, ['a', 'b', 'c'], ['a', 'b', 'c'],
2936
[('a', []), ('b', []), ('c', ['b', 'a'])])
2937
self.assertIterParents(knit, ['a', 'b', 'c', 'd'],
2938
['a', 'b', 'c', 'd'],
2939
[('a', []), ('b', []), ('c', ['b', 'a']), ('d', ['e', 'f'])])
2940
self.assertIterParents(knit, ['c'], ['a', 'b', 'c'],
2941
[('c', ['b', 'a'])])
2943
def test_get_options(self):
2944
knit = self.make_knit_with_4_versions_2_dags()
2945
self.assertGetOptions(knit, 'a', ['no-eol', 'fulltext'])
2946
self.assertGetOptions(knit, 'c', ['line-delta'])
2948
def test_get_parents_with_ghosts(self):
2949
knit = self.make_knit_with_4_versions_2_dags()
2950
self.assertGetParentsWithGhosts(knit, ['a'], 'a', [])
2951
self.assertGetParentsWithGhosts(knit, ['c'], 'c', ['b', 'a'])
2952
self.assertGetParentsWithGhosts(knit, ['d'], 'd', ['e', 'f'])
2954
def test_get_position(self):
2955
knit = self.make_knit_with_4_versions_2_dags()
2956
# get_position returns (thunk_flag, index(can be None), start, end) for
2957
# _StreamAccess to use.
2958
self.assertGetPosition(knit, ['a'], 'a', (False, None, 0, 78))
2959
self.assertGetPosition(knit, ['a', 'c'], 'c', (False, None, 78, 156))
2960
# get_position on a text that is not in the datastream (but in the
2961
# backing knit) returns (True, 'versionid', None, None) - and then the
2962
# access object can construct the relevant data as needed.
2963
self.assertGetPosition(knit, ['a', 'c'], 'b', (True, 'b', None, None))
2966
class Test_StreamAccess(KnitTests):
2968
def get_index_access(self, knit, stream):
2969
"""Get a _StreamAccess from knit and stream."""
2970
knit = knit._knit_from_datastream(stream)
2971
return knit._index, knit._data._access
2973
def assertGetRawRecords(self, knit, versions):
2974
index, access = self.get_index_access(knit,
2975
knit.get_data_stream(versions))
2976
# check that every version asked for can be obtained from the resulting
2980
for version in versions:
2981
memos.append(knit._index.get_position(version))
2983
for version, data in zip(
2984
versions, knit._data._access.get_raw_records(memos)):
2985
original[version] = data
2987
for version in versions:
2988
memos.append(index.get_position(version))
2990
for version, data in zip(versions, access.get_raw_records(memos)):
2991
streamed[version] = data
2992
self.assertEqual(original, streamed)
2994
for version in versions:
2995
data = list(access.get_raw_records(
2996
[index.get_position(version)]))[0]
2997
self.assertEqual(original[version], data)
2999
def make_knit_with_two_versions(self):
3000
knit = self.make_test_knit()
3001
knit.add_lines('a', [], ["foo"])
3002
knit.add_lines('b', [], ["bar"])
3005
def test_get_raw_records(self):
3006
knit = self.make_knit_with_two_versions()
3007
self.assertGetRawRecords(knit, ['a', 'b'])
3008
self.assertGetRawRecords(knit, ['a'])
3009
self.assertGetRawRecords(knit, ['b'])
3011
def test_get_raw_record_from_backing_knit(self):
3012
# the thunk layer should create an artificial A on-demand when needed.
3013
source_knit = self.make_test_knit(name='plain', annotate=False)
3014
target_knit = self.make_test_knit(name='annotated', annotate=True)
3015
source_knit.add_lines("A", [], ["Foo\n"])
3016
# Give the target A, so we can try to thunk across to it.
3017
target_knit.join(source_knit)
3018
index, access = self.get_index_access(target_knit,
3019
source_knit.get_data_stream([]))
3020
raw_data = list(access.get_raw_records([(True, "A", None, None)]))[0]
3021
df = GzipFile(mode='rb', fileobj=StringIO(raw_data))
3023
'version A 1 5d36b88bb697a2d778f024048bafabd443d74503\n'
3027
def test_asking_for_thunk_stream_is_not_plain_errors(self):
3028
knit = self.make_test_knit(name='annotated', annotate=True)
3029
knit.add_lines("A", [], ["Foo\n"])
3030
index, access = self.get_index_access(knit,
3031
knit.get_data_stream([]))
3032
self.assertRaises(errors.KnitCorrupt,
3033
list, access.get_raw_records([(True, "A", None, None)]))