118
265
f = self.reopen_file(create=True)
121
def test_get_record_stream_empty(self):
122
"""get_record_stream is a replacement for get_data_stream."""
124
entries = f.get_record_stream([], 'unordered', False)
125
self.assertEqual([], list(entries))
127
def assertValidStorageKind(self, storage_kind):
128
"""Assert that storage_kind is a valid storage_kind."""
129
self.assertSubset([storage_kind],
130
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
131
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
132
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
134
def capture_stream(self, f, entries, on_seen, parents):
135
"""Capture a stream for testing."""
136
for factory in entries:
138
self.assertValidStorageKind(factory.storage_kind)
139
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
140
self.assertEqual(parents[factory.key[0]], factory.parents)
141
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
144
def test_get_record_stream_interface(self):
145
"""Each item in a stream has to provide a regular interface."""
146
f, parents = get_diamond_vf(self.get_file())
147
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
150
self.capture_stream(f, entries, seen.add, parents)
151
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
154
def test_get_record_stream_interface_ordered(self):
155
"""Each item in a stream has to provide a regular interface."""
156
f, parents = get_diamond_vf(self.get_file())
157
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
158
'topological', False)
160
self.capture_stream(f, entries, seen.append, parents)
161
self.assertSubset([tuple(seen)],
163
(('base',), ('left',), ('right',), ('merged',)),
164
(('base',), ('right',), ('left',), ('merged',)),
167
def test_get_record_stream_interface_ordered_with_delta_closure(self):
168
"""Each item in a stream has to provide a regular interface."""
169
f, parents = get_diamond_vf(self.get_file())
170
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
173
for factory in entries:
174
seen.append(factory.key)
175
self.assertValidStorageKind(factory.storage_kind)
176
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
177
self.assertEqual(parents[factory.key[0]], factory.parents)
178
self.assertEqual(f.get_text(factory.key[0]),
179
factory.get_bytes_as('fulltext'))
180
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
182
self.assertSubset([tuple(seen)],
184
(('base',), ('left',), ('right',), ('merged',)),
185
(('base',), ('right',), ('left',), ('merged',)),
188
def test_get_record_stream_unknown_storage_kind_raises(self):
189
"""Asking for a storage kind that the stream cannot supply raises."""
190
f, parents = get_diamond_vf(self.get_file())
191
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
193
# We track the contents because we should be able to try, fail a
194
# particular kind and then ask for one that works and continue.
196
for factory in entries:
197
seen.add(factory.key)
198
self.assertValidStorageKind(factory.storage_kind)
199
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
200
self.assertEqual(parents[factory.key[0]], factory.parents)
201
# currently no stream emits mpdiff
202
self.assertRaises(errors.UnavailableRepresentation,
203
factory.get_bytes_as, 'mpdiff')
204
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
206
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
209
def test_get_record_stream_missing_records_are_absent(self):
210
f, parents = get_diamond_vf(self.get_file())
211
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
213
self.assertAbsentRecord(f, parents, entries)
214
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
215
'topological', False)
216
self.assertAbsentRecord(f, parents, entries)
218
def assertAbsentRecord(self, f, parents, entries):
219
"""Helper for test_get_record_stream_missing_records_are_absent."""
221
for factory in entries:
222
seen.add(factory.key)
223
if factory.key == ('or',):
224
self.assertEqual('absent', factory.storage_kind)
225
self.assertEqual(None, factory.sha1)
226
self.assertEqual(None, factory.parents)
228
self.assertValidStorageKind(factory.storage_kind)
229
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
230
self.assertEqual(parents[factory.key[0]], factory.parents)
231
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
234
set([('base',), ('left',), ('right',), ('merged',), ('or',)]),
237
def test_filter_absent_records(self):
238
"""Requested missing records can be filter trivially."""
239
f, parents = get_diamond_vf(self.get_file())
240
entries = f.get_record_stream(['merged', 'left', 'right', 'extra', 'base'],
243
self.capture_stream(f, versionedfile.filter_absent(entries), seen.add,
245
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
248
def test_insert_record_stream_empty(self):
249
"""Inserting an empty record stream should work."""
252
f.insert_record_stream([])
254
def assertIdenticalVersionedFile(self, left, right):
255
"""Assert that left and right have the same contents."""
256
self.assertEqual(set(left.versions()), set(right.versions()))
257
self.assertEqual(left.get_parent_map(left.versions()),
258
right.get_parent_map(right.versions()))
259
for v in left.versions():
260
self.assertEqual(left.get_text(v), right.get_text(v))
262
def test_insert_record_stream_fulltexts(self):
263
"""Any file should accept a stream of fulltexts."""
265
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
266
create=True, get_scope=self.get_transaction)
267
source, _ = get_diamond_vf(weave_vf)
268
stream = source.get_record_stream(source.versions(), 'topological',
270
f.insert_record_stream(stream)
271
self.assertIdenticalVersionedFile(f, source)
273
def test_insert_record_stream_fulltexts_noeol(self):
274
"""Any file should accept a stream of fulltexts."""
276
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
277
create=True, get_scope=self.get_transaction)
278
source, _ = get_diamond_vf(weave_vf, trailing_eol=False)
279
stream = source.get_record_stream(source.versions(), 'topological',
281
f.insert_record_stream(stream)
282
self.assertIdenticalVersionedFile(f, source)
284
def test_insert_record_stream_annotated_knits(self):
285
"""Any file should accept a stream from plain knits."""
287
source = make_file_knit('source', get_transport(self.get_url('.')),
289
get_diamond_vf(source)
290
stream = source.get_record_stream(source.versions(), 'topological',
292
f.insert_record_stream(stream)
293
self.assertIdenticalVersionedFile(f, source)
295
def test_insert_record_stream_annotated_knits_noeol(self):
296
"""Any file should accept a stream from plain knits."""
298
source = make_file_knit('source', get_transport(self.get_url('.')),
300
get_diamond_vf(source, trailing_eol=False)
301
stream = source.get_record_stream(source.versions(), 'topological',
303
f.insert_record_stream(stream)
304
self.assertIdenticalVersionedFile(f, source)
306
def test_insert_record_stream_plain_knits(self):
307
"""Any file should accept a stream from plain knits."""
309
source = make_file_knit('source', get_transport(self.get_url('.')),
310
create=True, factory=KnitPlainFactory())
311
get_diamond_vf(source)
312
stream = source.get_record_stream(source.versions(), 'topological',
314
f.insert_record_stream(stream)
315
self.assertIdenticalVersionedFile(f, source)
317
def test_insert_record_stream_plain_knits_noeol(self):
318
"""Any file should accept a stream from plain knits."""
320
source = make_file_knit('source', get_transport(self.get_url('.')),
321
create=True, factory=KnitPlainFactory())
322
get_diamond_vf(source, trailing_eol=False)
323
stream = source.get_record_stream(source.versions(), 'topological',
325
f.insert_record_stream(stream)
326
self.assertIdenticalVersionedFile(f, source)
328
def test_insert_record_stream_existing_keys(self):
329
"""Inserting keys already in a file should not error."""
331
source = make_file_knit('source', get_transport(self.get_url('.')),
332
create=True, factory=KnitPlainFactory())
333
get_diamond_vf(source)
334
# insert some keys into f.
335
get_diamond_vf(f, left_only=True)
336
stream = source.get_record_stream(source.versions(), 'topological',
338
f.insert_record_stream(stream)
339
self.assertIdenticalVersionedFile(f, source)
341
def test_insert_record_stream_missing_keys(self):
342
"""Inserting a stream with absent keys should raise an error."""
344
source = make_file_knit('source', get_transport(self.get_url('.')),
345
create=True, factory=KnitPlainFactory())
346
stream = source.get_record_stream(['missing'], 'topological',
348
self.assertRaises(errors.RevisionNotPresent, f.insert_record_stream,
351
def test_insert_record_stream_out_of_order(self):
352
"""An out of order stream can either error or work."""
353
f, parents = get_diamond_vf(self.get_file())
354
origin_entries = f.get_record_stream(['origin'], 'unordered', False)
355
end_entries = f.get_record_stream(['merged', 'left'],
356
'topological', False)
357
start_entries = f.get_record_stream(['right', 'base'],
358
'topological', False)
359
entries = chain(origin_entries, end_entries, start_entries)
360
target = self.get_file('target')
362
target.insert_record_stream(entries)
363
except RevisionNotPresent:
364
# Must not have corrupted the file.
367
self.assertIdenticalVersionedFile(f, target)
369
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
370
"""Insertion where a needed basis is not included aborts safely."""
371
# Annotated source - deltas can be used in any knit.
372
source = make_file_knit('source', get_transport(self.get_url('.')),
374
get_diamond_vf(source)
375
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
377
self.assertRaises(RevisionNotPresent, f.insert_record_stream, entries)
379
self.assertFalse(f.has_version('merged'))
381
268
def test_adds_with_parent_texts(self):
382
269
f = self.get_file()
383
270
parent_texts = {}
1019
902
return WeaveFile
1022
class TestKnit(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1024
def get_file(self, name='foo', create=True):
1025
return make_file_knit(name, get_transport(self.get_url('.')),
1026
delta=True, create=True, get_scope=self.get_transaction)
1028
def get_factory(self):
1029
return make_file_knit
1031
def get_file_corrupted_text(self):
1032
knit = self.get_file()
1033
knit.add_lines('v1', [], ['hello\n'])
1034
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
1037
def reopen_file(self, name='foo', create=False):
1038
return self.get_file(name, create)
1040
def test_detection(self):
1041
knit = self.get_file()
1044
def test_no_implicit_create(self):
1045
self.assertRaises(errors.NoSuchFile, self.get_factory(), 'foo',
1046
get_transport(self.get_url('.')))
1049
class TestPlaintextKnit(TestKnit):
1050
"""Test a knit with no cached annotations"""
1052
def get_file(self, name='foo', create=True):
1053
return make_file_knit(name, get_transport(self.get_url('.')),
1054
delta=True, create=create, get_scope=self.get_transaction,
1055
factory=_mod_knit.KnitPlainFactory())
1058
905
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
1060
907
def setUp(self):
1061
908
TestCaseWithMemoryTransport.setUp(self)
1062
self.vf1 = make_file_knit('root', self.get_transport(), create=True)
1063
self.vf2 = make_file_knit('root', self.get_transport(), create=True)
1064
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root',
1065
[self.vf1, self.vf2])
909
mapper = PrefixMapper()
910
factory = make_file_factory(True, mapper)
911
self.vf1 = factory(self.get_transport('root-1'))
912
self.vf2 = factory(self.get_transport('root-2'))
913
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
914
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
1067
916
def test_add_lines(self):
1068
self.plan_merge_vf.add_lines('a:', [], [])
1069
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a', [],
1071
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', None,
1073
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', [],
1076
def test_ancestry(self):
1077
self.vf1.add_lines('A', [], [])
1078
self.vf1.add_lines('B', ['A'], [])
1079
self.plan_merge_vf.add_lines('C:', ['B'], [])
1080
self.plan_merge_vf.add_lines('D:', ['C:'], [])
1081
self.assertEqual(set(['A', 'B', 'C:', 'D:']),
1082
self.plan_merge_vf.get_ancestry('D:', topo_sorted=False))
917
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
918
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
919
('root', 'a'), [], [])
920
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
921
('root', 'a:'), None, [])
922
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
923
('root', 'a:'), [], None)
1084
925
def setup_abcde(self):
1085
self.vf1.add_lines('A', [], ['a'])
1086
self.vf1.add_lines('B', ['A'], ['b'])
1087
self.vf2.add_lines('C', [], ['c'])
1088
self.vf2.add_lines('D', ['C'], ['d'])
1089
self.plan_merge_vf.add_lines('E:', ['B', 'D'], ['e'])
1091
def test_ancestry_uses_all_versionedfiles(self):
1093
self.assertEqual(set(['A', 'B', 'C', 'D', 'E:']),
1094
self.plan_merge_vf.get_ancestry('E:', topo_sorted=False))
1096
def test_ancestry_raises_revision_not_present(self):
1097
error = self.assertRaises(errors.RevisionNotPresent,
1098
self.plan_merge_vf.get_ancestry, 'E:', False)
1099
self.assertContainsRe(str(error), '{E:} not present in "root"')
926
self.vf1.add_lines(('root', 'A'), [], ['a'])
927
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
928
self.vf2.add_lines(('root', 'C'), [], ['c'])
929
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
930
self.plan_merge_vf.add_lines(('root', 'E:'),
931
[('root', 'B'), ('root', 'D')], ['e'])
1101
933
def test_get_parents(self):
1102
934
self.setup_abcde()
1103
self.assertEqual({'B':('A',)}, self.plan_merge_vf.get_parent_map(['B']))
1104
self.assertEqual({'D':('C',)}, self.plan_merge_vf.get_parent_map(['D']))
1105
self.assertEqual({'E:':('B', 'D')},
1106
self.plan_merge_vf.get_parent_map(['E:']))
1107
self.assertEqual({}, self.plan_merge_vf.get_parent_map(['F']))
935
self.assertEqual({('root', 'B'):(('root', 'A'),)},
936
self.plan_merge_vf.get_parent_map([('root', 'B')]))
937
self.assertEqual({('root', 'D'):(('root', 'C'),)},
938
self.plan_merge_vf.get_parent_map([('root', 'D')]))
939
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
940
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
942
self.plan_merge_vf.get_parent_map([('root', 'F')]))
1108
943
self.assertEqual({
1112
}, self.plan_merge_vf.get_parent_map(['B', 'D', 'E:', 'F']))
944
('root', 'B'):(('root', 'A'),),
945
('root', 'D'):(('root', 'C'),),
946
('root', 'E:'):(('root', 'B'),('root', 'D')),
948
self.plan_merge_vf.get_parent_map(
949
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
1114
def test_get_lines(self):
951
def test_get_record_stream(self):
1115
952
self.setup_abcde()
1116
self.assertEqual(['a'], self.plan_merge_vf.get_lines('A'))
1117
self.assertEqual(['c'], self.plan_merge_vf.get_lines('C'))
1118
self.assertEqual(['e'], self.plan_merge_vf.get_lines('E:'))
1119
error = self.assertRaises(errors.RevisionNotPresent,
1120
self.plan_merge_vf.get_lines, 'F')
1121
self.assertContainsRe(str(error), '{F} not present in "root"')
1124
class InterString(versionedfile.InterVersionedFile):
1125
"""An inter-versionedfile optimised code path for strings.
1127
This is for use during testing where we use strings as versionedfiles
1128
so that none of the default regsitered interversionedfile classes will
1129
match - which lets us test the match logic.
1133
def is_compatible(source, target):
1134
"""InterString is compatible with strings-as-versionedfiles."""
1135
return isinstance(source, str) and isinstance(target, str)
1138
# TODO this and the InterRepository core logic should be consolidatable
1139
# if we make the registry a separate class though we still need to
1140
# test the behaviour in the active registry to catch failure-to-handle-
1142
class TestInterVersionedFile(TestCaseWithMemoryTransport):
1144
def test_get_default_inter_versionedfile(self):
1145
# test that the InterVersionedFile.get(a, b) probes
1146
# for a class where is_compatible(a, b) returns
1147
# true and returns a default interversionedfile otherwise.
1148
# This also tests that the default registered optimised interversionedfile
1149
# classes do not barf inappropriately when a surprising versionedfile type
1150
# is handed to them.
1151
dummy_a = "VersionedFile 1."
1152
dummy_b = "VersionedFile 2."
1153
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
1155
def assertGetsDefaultInterVersionedFile(self, a, b):
1156
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
1157
inter = versionedfile.InterVersionedFile.get(a, b)
1158
self.assertEqual(versionedfile.InterVersionedFile,
1160
self.assertEqual(a, inter.source)
1161
self.assertEqual(b, inter.target)
1163
def test_register_inter_versionedfile_class(self):
1164
# test that a optimised code path provider - a
1165
# InterVersionedFile subclass can be registered and unregistered
1166
# and that it is correctly selected when given a versionedfile
1167
# pair that it returns true on for the is_compatible static method
1169
dummy_a = "VersionedFile 1."
1170
dummy_b = "VersionedFile 2."
1171
versionedfile.InterVersionedFile.register_optimiser(InterString)
1173
# we should get the default for something InterString returns False
1175
self.assertFalse(InterString.is_compatible(dummy_a, None))
1176
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
1177
# and we should get an InterString for a pair it 'likes'
1178
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
1179
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
1180
self.assertEqual(InterString, inter.__class__)
1181
self.assertEqual(dummy_a, inter.source)
1182
self.assertEqual(dummy_b, inter.target)
1184
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
1185
# now we should get the default InterVersionedFile object again.
1186
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
953
def get_record(suffix):
954
return self.plan_merge_vf.get_record_stream(
955
[('root', suffix)], 'unordered', True).next()
956
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
957
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
958
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
959
self.assertEqual('absent', get_record('F').storage_kind)
1189
962
class TestReadonlyHttpMixin(object):
1618
1376
This is used for -> weaves, and for -> annotated knits.
1620
1378
# we need a full text, and a delta
1621
f, parents = get_diamond_vf(self.get_knit(annotated=False),
1379
f = self.get_knit(annotated=False)
1380
get_diamond_files(f, 1, trailing_eol=False)
1623
1381
# Reconstructing a full text requires a backing versioned file, and it
1624
1382
# must have the base lines requested from it.
1625
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1383
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1626
1384
ft_data, delta_data = self.helpGetBytes(f,
1627
1385
_mod_knit.FTPlainToFullText(None),
1628
1386
_mod_knit.DeltaPlainToFullText(logged_vf))
1629
1387
self.assertEqual('origin', ft_data)
1630
1388
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1631
self.assertEqual([('get_lines', 'left')], logged_vf.calls)
1389
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1390
True)], logged_vf.calls)
1393
class TestKeyMapper(TestCaseWithMemoryTransport):
1394
"""Tests for various key mapping logic."""
1396
def test_identity_mapper(self):
1397
mapper = versionedfile.ConstantMapper("inventory")
1398
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1399
self.assertEqual("inventory", mapper.map(('quux',)))
1401
def test_prefix_mapper(self):
1403
mapper = versionedfile.PrefixMapper()
1404
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1405
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1406
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1407
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1409
def test_hash_prefix_mapper(self):
1410
#format6: hash + plain
1411
mapper = versionedfile.HashPrefixMapper()
1412
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1413
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1414
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1415
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1417
def test_hash_escaped_mapper(self):
1418
#knit1: hash + escaped
1419
mapper = versionedfile.HashEscapedPrefixMapper()
1420
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1421
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1423
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1425
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1426
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1429
class TestVersionedFiles(TestCaseWithMemoryTransport):
1430
"""Tests for the multiple-file variant of VersionedFile."""
1432
def get_versionedfiles(self, relpath='files'):
1433
transport = self.get_transport(relpath)
1435
transport.mkdir('.')
1436
files = self.factory(transport)
1437
if self.cleanup is not None:
1438
self.addCleanup(lambda:self.cleanup(files))
1441
def test_annotate(self):
1442
files = self.get_versionedfiles()
1443
self.get_diamond_files(files)
1444
if self.key_length == 1:
1448
# introduced full text
1449
origins = files.annotate(prefix + ('origin',))
1451
(prefix + ('origin',), 'origin\n')],
1454
origins = files.annotate(prefix + ('base',))
1456
(prefix + ('base',), 'base\n')],
1459
origins = files.annotate(prefix + ('merged',))
1462
(prefix + ('base',), 'base\n'),
1463
(prefix + ('left',), 'left\n'),
1464
(prefix + ('right',), 'right\n'),
1465
(prefix + ('merged',), 'merged\n')
1469
# Without a graph everything is new.
1471
(prefix + ('merged',), 'base\n'),
1472
(prefix + ('merged',), 'left\n'),
1473
(prefix + ('merged',), 'right\n'),
1474
(prefix + ('merged',), 'merged\n')
1477
self.assertRaises(RevisionNotPresent,
1478
files.annotate, prefix + ('missing-key',))
1480
def test_construct(self):
1481
"""Each parameterised test can be constructed on a transport."""
1482
files = self.get_versionedfiles()
1484
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1485
return get_diamond_files(files, self.key_length,
1486
trailing_eol=trailing_eol, nograph=not self.graph,
1487
left_only=left_only)
1489
def test_add_lines_return(self):
1490
files = self.get_versionedfiles()
1491
# save code by using the stock data insertion helper.
1492
adds = self.get_diamond_files(files)
1494
# We can only validate the first 2 elements returned from add_lines.
1496
self.assertEqual(3, len(add))
1497
results.append(add[:2])
1498
if self.key_length == 1:
1500
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1501
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1502
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1503
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1504
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1506
elif self.key_length == 2:
1508
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1509
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1510
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1511
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1512
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1513
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1514
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1515
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1516
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1517
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1520
def test_empty_lines(self):
1521
"""Empty files can be stored."""
1522
f = self.get_versionedfiles()
1523
key_a = self.get_simple_key('a')
1524
f.add_lines(key_a, [], [])
1525
self.assertEqual('',
1526
f.get_record_stream([key_a], 'unordered', True
1527
).next().get_bytes_as('fulltext'))
1528
key_b = self.get_simple_key('b')
1529
f.add_lines(key_b, self.get_parents([key_a]), [])
1530
self.assertEqual('',
1531
f.get_record_stream([key_b], 'unordered', True
1532
).next().get_bytes_as('fulltext'))
1534
def test_newline_only(self):
1535
f = self.get_versionedfiles()
1536
key_a = self.get_simple_key('a')
1537
f.add_lines(key_a, [], ['\n'])
1538
self.assertEqual('\n',
1539
f.get_record_stream([key_a], 'unordered', True
1540
).next().get_bytes_as('fulltext'))
1541
key_b = self.get_simple_key('b')
1542
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1543
self.assertEqual('\n',
1544
f.get_record_stream([key_b], 'unordered', True
1545
).next().get_bytes_as('fulltext'))
1547
def test_get_record_stream_empty(self):
1548
"""An empty stream can be requested without error."""
1549
f = self.get_versionedfiles()
1550
entries = f.get_record_stream([], 'unordered', False)
1551
self.assertEqual([], list(entries))
1553
def assertValidStorageKind(self, storage_kind):
1554
"""Assert that storage_kind is a valid storage_kind."""
1555
self.assertSubset([storage_kind],
1556
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1557
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
1558
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
1560
def capture_stream(self, f, entries, on_seen, parents):
1561
"""Capture a stream for testing."""
1562
for factory in entries:
1563
on_seen(factory.key)
1564
self.assertValidStorageKind(factory.storage_kind)
1565
self.assertEqual(f.get_sha1s([factory.key])[0], factory.sha1)
1566
self.assertEqual(parents[factory.key], factory.parents)
1567
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1570
def test_get_record_stream_interface(self):
1571
"""each item in a stream has to provide a regular interface."""
1572
files = self.get_versionedfiles()
1573
self.get_diamond_files(files)
1574
keys, _ = self.get_keys_and_sort_order()
1575
parent_map = files.get_parent_map(keys)
1576
entries = files.get_record_stream(keys, 'unordered', False)
1578
self.capture_stream(files, entries, seen.add, parent_map)
1579
self.assertEqual(set(keys), seen)
1581
def get_simple_key(self, suffix):
1582
"""Return a key for the object under test."""
1583
if self.key_length == 1:
1586
return ('FileA',) + (suffix,)
1588
def get_keys_and_sort_order(self):
1589
"""Get diamond test keys list, and their sort ordering."""
1590
if self.key_length == 1:
1591
keys = [('merged',), ('left',), ('right',), ('base',)]
1592
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1595
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1597
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1601
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1602
('FileA', 'base'):0,
1603
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1604
('FileB', 'base'):0,
1606
return keys, sort_order
1608
def test_get_record_stream_interface_ordered(self):
1609
"""each item in a stream has to provide a regular interface."""
1610
files = self.get_versionedfiles()
1611
self.get_diamond_files(files)
1612
keys, sort_order = self.get_keys_and_sort_order()
1613
parent_map = files.get_parent_map(keys)
1614
entries = files.get_record_stream(keys, 'topological', False)
1616
self.capture_stream(files, entries, seen.append, parent_map)
1617
self.assertStreamOrder(sort_order, seen, keys)
1619
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1620
"""each item must be accessible as a fulltext."""
1621
files = self.get_versionedfiles()
1622
self.get_diamond_files(files)
1623
keys, sort_order = self.get_keys_and_sort_order()
1624
parent_map = files.get_parent_map(keys)
1625
entries = files.get_record_stream(keys, 'topological', True)
1627
for factory in entries:
1628
seen.append(factory.key)
1629
self.assertValidStorageKind(factory.storage_kind)
1630
self.assertSubset([factory.sha1], [None, files.get_sha1s([factory.key])[0]])
1631
self.assertEqual(parent_map[factory.key], factory.parents)
1632
# self.assertEqual(files.get_text(factory.key),
1633
self.assertIsInstance(factory.get_bytes_as('fulltext'), str)
1634
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1636
self.assertStreamOrder(sort_order, seen, keys)
1638
def assertStreamOrder(self, sort_order, seen, keys):
1639
self.assertEqual(len(set(seen)), len(keys))
1640
if self.key_length == 1:
1643
lows = {('FileA',):0, ('FileB',):0}
1645
self.assertEqual(set(keys), set(seen))
1648
sort_pos = sort_order[key]
1649
self.assertTrue(sort_pos >= lows[key[:-1]],
1650
"Out of order in sorted stream: %r, %r" % (key, seen))
1651
lows[key[:-1]] = sort_pos
1653
def test_get_record_stream_unknown_storage_kind_raises(self):
1654
"""Asking for a storage kind that the stream cannot supply raises."""
1655
files = self.get_versionedfiles()
1656
self.get_diamond_files(files)
1657
if self.key_length == 1:
1658
keys = [('merged',), ('left',), ('right',), ('base',)]
1661
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1663
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1666
parent_map = files.get_parent_map(keys)
1667
entries = files.get_record_stream(keys, 'unordered', False)
1668
# We track the contents because we should be able to try, fail a
1669
# particular kind and then ask for one that works and continue.
1671
for factory in entries:
1672
seen.add(factory.key)
1673
self.assertValidStorageKind(factory.storage_kind)
1674
self.assertEqual(files.get_sha1s([factory.key])[0], factory.sha1)
1675
self.assertEqual(parent_map[factory.key], factory.parents)
1676
# currently no stream emits mpdiff
1677
self.assertRaises(errors.UnavailableRepresentation,
1678
factory.get_bytes_as, 'mpdiff')
1679
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1681
self.assertEqual(set(keys), seen)
1683
def test_get_record_stream_missing_records_are_absent(self):
1684
files = self.get_versionedfiles()
1685
self.get_diamond_files(files)
1686
if self.key_length == 1:
1687
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1690
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1691
('FileA', 'absent'), ('FileA', 'base'),
1692
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1693
('FileB', 'absent'), ('FileB', 'base'),
1694
('absent', 'absent'),
1696
parent_map = files.get_parent_map(keys)
1697
entries = files.get_record_stream(keys, 'unordered', False)
1698
self.assertAbsentRecord(files, keys, parent_map, entries)
1699
entries = files.get_record_stream(keys, 'topological', False)
1700
self.assertAbsentRecord(files, keys, parent_map, entries)
1702
def assertAbsentRecord(self, files, keys, parents, entries):
1703
"""Helper for test_get_record_stream_missing_records_are_absent."""
1705
for factory in entries:
1706
seen.add(factory.key)
1707
if factory.key[-1] == 'absent':
1708
self.assertEqual('absent', factory.storage_kind)
1709
self.assertEqual(None, factory.sha1)
1710
self.assertEqual(None, factory.parents)
1712
self.assertValidStorageKind(factory.storage_kind)
1713
self.assertEqual(files.get_sha1s([factory.key])[0], factory.sha1)
1714
self.assertEqual(parents[factory.key], factory.parents)
1715
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1717
self.assertEqual(set(keys), seen)
1719
def test_filter_absent_records(self):
1720
"""Requested missing records can be filter trivially."""
1721
files = self.get_versionedfiles()
1722
self.get_diamond_files(files)
1723
keys, _ = self.get_keys_and_sort_order()
1724
parent_map = files.get_parent_map(keys)
1725
# Add an absent record in the middle of the present keys. (We don't ask
1726
# for just absent keys to ensure that content before and after the
1727
# absent keys is still delivered).
1728
present_keys = list(keys)
1729
if self.key_length == 1:
1730
keys.insert(2, ('extra',))
1732
keys.insert(2, ('extra', 'extra'))
1733
entries = files.get_record_stream(keys, 'unordered', False)
1735
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1737
self.assertEqual(set(present_keys), seen)
1739
def get_mapper(self):
1740
"""Get a mapper suitable for the key length of the test interface."""
1741
if self.key_length == 1:
1742
return ConstantMapper('source')
1744
return HashEscapedPrefixMapper()
1746
def get_parents(self, parents):
1747
"""Get parents, taking self.graph into consideration."""
1753
def test_get_parent_map(self):
1754
files = self.get_versionedfiles()
1755
if self.key_length == 1:
1757
(('r0',), self.get_parents(())),
1758
(('r1',), self.get_parents((('r0',),))),
1759
(('r2',), self.get_parents(())),
1760
(('r3',), self.get_parents(())),
1761
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1765
(('FileA', 'r0'), self.get_parents(())),
1766
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1767
(('FileA', 'r2'), self.get_parents(())),
1768
(('FileA', 'r3'), self.get_parents(())),
1769
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1770
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1772
for key, parents in parent_details:
1773
files.add_lines(key, parents, [])
1774
# immediately after adding it should be queryable.
1775
self.assertEqual({key:parents}, files.get_parent_map([key]))
1776
# We can ask for an empty set
1777
self.assertEqual({}, files.get_parent_map([]))
1778
# We can ask for many keys
1779
all_parents = dict(parent_details)
1780
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1781
# Absent keys are just not included in the result.
1782
keys = all_parents.keys()
1783
if self.key_length == 1:
1784
keys.insert(1, ('missing',))
1786
keys.insert(1, ('missing', 'missing'))
1787
# Absent keys are just ignored
1788
self.assertEqual(all_parents, files.get_parent_map(keys))
1790
def test_get_sha1s(self):
1791
files = self.get_versionedfiles()
1792
self.get_diamond_files(files)
1793
if self.key_length == 1:
1794
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
1796
# ask for shas from different prefixes.
1798
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
1799
('FileA', 'merged'), ('FileB', 'right'),
1802
'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
1803
'00e364d235126be43292ab09cb4686cf703ddc17',
1804
'a8478686da38e370e32e42e8a0c220e33ee9132f',
1805
'ed8bce375198ea62444dc71952b22cfc2b09226d',
1806
'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
1808
files.get_sha1s(keys))
1810
def test_insert_record_stream_empty(self):
1811
"""Inserting an empty record stream should work."""
1812
files = self.get_versionedfiles()
1813
files.insert_record_stream([])
1815
def assertIdenticalVersionedFile(self, expected, actual):
1816
"""Assert that left and right have the same contents."""
1817
self.assertEqual(set(actual.keys()), set(expected.keys()))
1818
actual_parents = actual.get_parent_map(actual.keys())
1820
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
1822
for key, parents in actual_parents.items():
1823
self.assertEqual(None, parents)
1824
for key in actual.keys():
1825
actual_text = actual.get_record_stream(
1826
[key], 'unordered', True).next().get_bytes_as('fulltext')
1827
expected_text = expected.get_record_stream(
1828
[key], 'unordered', True).next().get_bytes_as('fulltext')
1829
self.assertEqual(actual_text, expected_text)
1831
def test_insert_record_stream_fulltexts(self):
1832
"""Any file should accept a stream of fulltexts."""
1833
files = self.get_versionedfiles()
1834
mapper = self.get_mapper()
1835
source_transport = self.get_transport('source')
1836
source_transport.mkdir('.')
1837
# weaves always output fulltexts.
1838
source = make_versioned_files_factory(WeaveFile, mapper)(
1840
self.get_diamond_files(source, trailing_eol=False)
1841
stream = source.get_record_stream(source.keys(), 'topological',
1843
files.insert_record_stream(stream)
1844
self.assertIdenticalVersionedFile(source, files)
1846
def test_insert_record_stream_fulltexts_noeol(self):
1847
"""Any file should accept a stream of fulltexts."""
1848
files = self.get_versionedfiles()
1849
mapper = self.get_mapper()
1850
source_transport = self.get_transport('source')
1851
source_transport.mkdir('.')
1852
# weaves always output fulltexts.
1853
source = make_versioned_files_factory(WeaveFile, mapper)(
1855
self.get_diamond_files(source, trailing_eol=False)
1856
stream = source.get_record_stream(source.keys(), 'topological',
1858
files.insert_record_stream(stream)
1859
self.assertIdenticalVersionedFile(source, files)
1861
def test_insert_record_stream_annotated_knits(self):
1862
"""Any file should accept a stream from plain knits."""
1863
files = self.get_versionedfiles()
1864
mapper = self.get_mapper()
1865
source_transport = self.get_transport('source')
1866
source_transport.mkdir('.')
1867
source = make_file_factory(True, mapper)(source_transport)
1868
self.get_diamond_files(source)
1869
stream = source.get_record_stream(source.keys(), 'topological',
1871
files.insert_record_stream(stream)
1872
self.assertIdenticalVersionedFile(source, files)
1874
def test_insert_record_stream_annotated_knits_noeol(self):
1875
"""Any file should accept a stream from plain knits."""
1876
files = self.get_versionedfiles()
1877
mapper = self.get_mapper()
1878
source_transport = self.get_transport('source')
1879
source_transport.mkdir('.')
1880
source = make_file_factory(True, mapper)(source_transport)
1881
self.get_diamond_files(source, trailing_eol=False)
1882
stream = source.get_record_stream(source.keys(), 'topological',
1884
files.insert_record_stream(stream)
1885
self.assertIdenticalVersionedFile(source, files)
1887
def test_insert_record_stream_plain_knits(self):
1888
"""Any file should accept a stream from plain knits."""
1889
files = self.get_versionedfiles()
1890
mapper = self.get_mapper()
1891
source_transport = self.get_transport('source')
1892
source_transport.mkdir('.')
1893
source = make_file_factory(False, mapper)(source_transport)
1894
self.get_diamond_files(source)
1895
stream = source.get_record_stream(source.keys(), 'topological',
1897
files.insert_record_stream(stream)
1898
self.assertIdenticalVersionedFile(source, files)
1900
def test_insert_record_stream_plain_knits_noeol(self):
1901
"""Any file should accept a stream from plain knits."""
1902
files = self.get_versionedfiles()
1903
mapper = self.get_mapper()
1904
source_transport = self.get_transport('source')
1905
source_transport.mkdir('.')
1906
source = make_file_factory(False, mapper)(source_transport)
1907
self.get_diamond_files(source, trailing_eol=False)
1908
stream = source.get_record_stream(source.keys(), 'topological',
1910
files.insert_record_stream(stream)
1911
self.assertIdenticalVersionedFile(source, files)
1913
def test_insert_record_stream_existing_keys(self):
1914
"""Inserting keys already in a file should not error."""
1915
files = self.get_versionedfiles()
1916
source = self.get_versionedfiles('source')
1917
self.get_diamond_files(source)
1918
# insert some keys into f.
1919
self.get_diamond_files(files, left_only=True)
1920
stream = source.get_record_stream(source.keys(), 'topological',
1922
files.insert_record_stream(stream)
1923
self.assertIdenticalVersionedFile(source, files)
1925
def test_insert_record_stream_missing_keys(self):
1926
"""Inserting a stream with absent keys should raise an error."""
1927
files = self.get_versionedfiles()
1928
source = self.get_versionedfiles('source')
1929
stream = source.get_record_stream([('missing',) * self.key_length],
1930
'topological', False)
1931
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
1934
def test_insert_record_stream_out_of_order(self):
1935
"""An out of order stream can either error or work."""
1936
files = self.get_versionedfiles()
1937
source = self.get_versionedfiles('source')
1938
self.get_diamond_files(source)
1939
if self.key_length == 1:
1940
origin_keys = [('origin',)]
1941
end_keys = [('merged',), ('left',)]
1942
start_keys = [('right',), ('base',)]
1944
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
1945
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
1946
('FileB', 'merged',), ('FileB', 'left',)]
1947
start_keys = [('FileA', 'right',), ('FileA', 'base',),
1948
('FileB', 'right',), ('FileB', 'base',)]
1949
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
1950
end_entries = source.get_record_stream(end_keys, 'topological', False)
1951
start_entries = source.get_record_stream(start_keys, 'topological', False)
1952
entries = chain(origin_entries, end_entries, start_entries)
1954
files.insert_record_stream(entries)
1955
except RevisionNotPresent:
1956
# Must not have corrupted the file.
1959
self.assertIdenticalVersionedFile(source, files)
1961
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1962
"""Insertion where a needed basis is not included aborts safely."""
1963
# We use a knit always here to be sure we are getting a binary delta.
1964
mapper = self.get_mapper()
1965
source_transport = self.get_transport('source')
1966
source_transport.mkdir('.')
1967
source = make_file_factory(False, mapper)(source_transport)
1968
self.get_diamond_files(source)
1969
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1970
files = self.get_versionedfiles()
1971
self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1974
self.assertEqual({}, files.get_parent_map([]))
1976
def test_iter_lines_added_or_present_in_keys(self):
1977
# test that we get at least an equalset of the lines added by
1978
# versions in the store.
1979
# the ordering here is to make a tree so that dumb searches have
1980
# more changes to muck up.
1982
class InstrumentedProgress(progress.DummyProgress):
1986
progress.DummyProgress.__init__(self)
1989
def update(self, msg=None, current=None, total=None):
1990
self.updates.append((msg, current, total))
1992
files = self.get_versionedfiles()
1993
# add a base to get included
1994
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
1995
# add a ancestor to be included on one side
1996
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
1997
# add a ancestor to be included on the other side
1998
files.add_lines(self.get_simple_key('rancestor'),
1999
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2000
# add a child of rancestor with no eofile-nl
2001
files.add_lines(self.get_simple_key('child'),
2002
self.get_parents([self.get_simple_key('rancestor')]),
2003
['base\n', 'child\n'])
2004
# add a child of lancestor and base to join the two roots
2005
files.add_lines(self.get_simple_key('otherchild'),
2006
self.get_parents([self.get_simple_key('lancestor'),
2007
self.get_simple_key('base')]),
2008
['base\n', 'lancestor\n', 'otherchild\n'])
2009
def iter_with_keys(keys, expected):
2010
# now we need to see what lines are returned, and how often.
2012
progress = InstrumentedProgress()
2013
# iterate over the lines
2014
for line in files.iter_lines_added_or_present_in_keys(keys,
2016
lines.setdefault(line, 0)
2018
if []!= progress.updates:
2019
self.assertEqual(expected, progress.updates)
2021
lines = iter_with_keys(
2022
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2023
[('Walking content.', 0, 2),
2024
('Walking content.', 1, 2),
2025
('Walking content.', 2, 2)])
2026
# we must see child and otherchild
2027
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2029
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2030
# we dont care if we got more than that.
2033
lines = iter_with_keys(files.keys(),
2034
[('Walking content.', 0, 5),
2035
('Walking content.', 1, 5),
2036
('Walking content.', 2, 5),
2037
('Walking content.', 3, 5),
2038
('Walking content.', 4, 5),
2039
('Walking content.', 5, 5)])
2040
# all lines must be seen at least once
2041
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2043
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2045
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2046
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2048
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2050
def test_make_mpdiffs(self):
2051
from bzrlib import multiparent
2052
files = self.get_versionedfiles('source')
2053
# add texts that should trip the knit maximum delta chain threshold
2054
# as well as doing parallel chains of data in knits.
2055
# this is done by two chains of 25 insertions
2056
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2057
files.add_lines(self.get_simple_key('noeol'),
2058
self.get_parents([self.get_simple_key('base')]), ['line'])
2059
# detailed eol tests:
2060
# shared last line with parent no-eol
2061
files.add_lines(self.get_simple_key('noeolsecond'),
2062
self.get_parents([self.get_simple_key('noeol')]),
2064
# differing last line with parent, both no-eol
2065
files.add_lines(self.get_simple_key('noeolnotshared'),
2066
self.get_parents([self.get_simple_key('noeolsecond')]),
2067
['line\n', 'phone'])
2068
# add eol following a noneol parent, change content
2069
files.add_lines(self.get_simple_key('eol'),
2070
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2071
# add eol following a noneol parent, no change content
2072
files.add_lines(self.get_simple_key('eolline'),
2073
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2074
# noeol with no parents:
2075
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2076
# noeol preceeding its leftmost parent in the output:
2077
# this is done by making it a merge of two parents with no common
2078
# anestry: noeolbase and noeol with the
2079
# later-inserted parent the leftmost.
2080
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2081
self.get_parents([self.get_simple_key('noeolbase'),
2082
self.get_simple_key('noeol')]),
2084
# two identical eol texts
2085
files.add_lines(self.get_simple_key('noeoldup'),
2086
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2087
next_parent = self.get_simple_key('base')
2088
text_name = 'chain1-'
2090
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2091
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2092
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2093
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2094
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2095
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2096
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2097
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2098
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2099
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2100
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2101
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2102
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2103
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2104
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2105
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2106
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2107
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2108
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2109
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2110
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2111
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2112
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2113
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2114
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2115
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2117
for depth in range(26):
2118
new_version = self.get_simple_key(text_name + '%s' % depth)
2119
text = text + ['line\n']
2120
files.add_lines(new_version, self.get_parents([next_parent]), text)
2121
next_parent = new_version
2122
next_parent = self.get_simple_key('base')
2123
text_name = 'chain2-'
2125
for depth in range(26):
2126
new_version = self.get_simple_key(text_name + '%s' % depth)
2127
text = text + ['line\n']
2128
files.add_lines(new_version, self.get_parents([next_parent]), text)
2129
next_parent = new_version
2130
target = self.get_versionedfiles('target')
2131
for key in multiparent.topo_iter_keys(files, files.keys()):
2132
mpdiff = files.make_mpdiffs([key])[0]
2133
parents = files.get_parent_map([key])[key] or []
2135
[(key, parents, files.get_sha1s([key])[0], mpdiff)])
2136
self.assertEqualDiff(
2137
files.get_record_stream([key], 'unordered',
2138
True).next().get_bytes_as('fulltext'),
2139
target.get_record_stream([key], 'unordered',
2140
True).next().get_bytes_as('fulltext')
2143
def test_keys(self):
2144
# While use is discouraged, versions() is still needed by aspects of
2146
files = self.get_versionedfiles()
2147
self.assertEqual(set(), set(files.keys()))
2148
if self.key_length == 1:
2151
key = ('foo', 'bar',)
2152
files.add_lines(key, (), [])
2153
self.assertEqual(set([key]), set(files.keys()))