118
267
f = self.reopen_file(create=True)
121
def test_get_record_stream_empty(self):
122
"""get_record_stream is a replacement for get_data_stream."""
124
entries = f.get_record_stream([], 'unordered', False)
125
self.assertEqual([], list(entries))
127
def assertValidStorageKind(self, storage_kind):
128
"""Assert that storage_kind is a valid storage_kind."""
129
self.assertSubset([storage_kind],
130
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
131
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
132
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
134
def capture_stream(self, f, entries, on_seen, parents):
135
"""Capture a stream for testing."""
136
for factory in entries:
138
self.assertValidStorageKind(factory.storage_kind)
139
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
140
self.assertEqual(parents[factory.key[0]], factory.parents)
141
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
144
def test_get_record_stream_interface(self):
145
"""Each item in a stream has to provide a regular interface."""
146
f, parents = get_diamond_vf(self.get_file())
147
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
150
self.capture_stream(f, entries, seen.add, parents)
151
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
154
def test_get_record_stream_interface_ordered(self):
155
"""Each item in a stream has to provide a regular interface."""
156
f, parents = get_diamond_vf(self.get_file())
157
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
158
'topological', False)
160
self.capture_stream(f, entries, seen.append, parents)
161
self.assertSubset([tuple(seen)],
163
(('base',), ('left',), ('right',), ('merged',)),
164
(('base',), ('right',), ('left',), ('merged',)),
167
def test_get_record_stream_interface_ordered_with_delta_closure(self):
168
"""Each item in a stream has to provide a regular interface."""
169
f, parents = get_diamond_vf(self.get_file())
170
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
173
for factory in entries:
174
seen.append(factory.key)
175
self.assertValidStorageKind(factory.storage_kind)
176
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
177
self.assertEqual(parents[factory.key[0]], factory.parents)
178
self.assertEqual(f.get_text(factory.key[0]),
179
factory.get_bytes_as('fulltext'))
180
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
182
self.assertSubset([tuple(seen)],
184
(('base',), ('left',), ('right',), ('merged',)),
185
(('base',), ('right',), ('left',), ('merged',)),
188
def test_get_record_stream_unknown_storage_kind_raises(self):
189
"""Asking for a storage kind that the stream cannot supply raises."""
190
f, parents = get_diamond_vf(self.get_file())
191
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
193
# We track the contents because we should be able to try, fail a
194
# particular kind and then ask for one that works and continue.
196
for factory in entries:
197
seen.add(factory.key)
198
self.assertValidStorageKind(factory.storage_kind)
199
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
200
self.assertEqual(parents[factory.key[0]], factory.parents)
201
# currently no stream emits mpdiff
202
self.assertRaises(errors.UnavailableRepresentation,
203
factory.get_bytes_as, 'mpdiff')
204
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
206
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
209
def test_get_record_stream_missing_records_are_absent(self):
210
f, parents = get_diamond_vf(self.get_file())
211
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
213
self.assertAbsentRecord(f, parents, entries)
214
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
215
'topological', False)
216
self.assertAbsentRecord(f, parents, entries)
218
def assertAbsentRecord(self, f, parents, entries):
219
"""Helper for test_get_record_stream_missing_records_are_absent."""
221
for factory in entries:
222
seen.add(factory.key)
223
if factory.key == ('or',):
224
self.assertEqual('absent', factory.storage_kind)
225
self.assertEqual(None, factory.sha1)
226
self.assertEqual(None, factory.parents)
228
self.assertValidStorageKind(factory.storage_kind)
229
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
230
self.assertEqual(parents[factory.key[0]], factory.parents)
231
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
234
set([('base',), ('left',), ('right',), ('merged',), ('or',)]),
237
def test_filter_absent_records(self):
238
"""Requested missing records can be filter trivially."""
239
f, parents = get_diamond_vf(self.get_file())
240
entries = f.get_record_stream(['merged', 'left', 'right', 'extra', 'base'],
243
self.capture_stream(f, versionedfile.filter_absent(entries), seen.add,
245
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
248
def test_insert_record_stream_empty(self):
249
"""Inserting an empty record stream should work."""
252
f.insert_record_stream([])
254
def assertIdenticalVersionedFile(self, left, right):
255
"""Assert that left and right have the same contents."""
256
self.assertEqual(set(left.versions()), set(right.versions()))
257
self.assertEqual(left.get_parent_map(left.versions()),
258
right.get_parent_map(right.versions()))
259
for v in left.versions():
260
self.assertEqual(left.get_text(v), right.get_text(v))
262
def test_insert_record_stream_fulltexts(self):
263
"""Any file should accept a stream of fulltexts."""
265
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
266
create=True, get_scope=self.get_transaction)
267
source, _ = get_diamond_vf(weave_vf)
268
stream = source.get_record_stream(source.versions(), 'topological',
270
f.insert_record_stream(stream)
271
self.assertIdenticalVersionedFile(f, source)
273
def test_insert_record_stream_fulltexts_noeol(self):
274
"""Any file should accept a stream of fulltexts."""
276
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
277
create=True, get_scope=self.get_transaction)
278
source, _ = get_diamond_vf(weave_vf, trailing_eol=False)
279
stream = source.get_record_stream(source.versions(), 'topological',
281
f.insert_record_stream(stream)
282
self.assertIdenticalVersionedFile(f, source)
284
def test_insert_record_stream_annotated_knits(self):
285
"""Any file should accept a stream from plain knits."""
287
source = make_file_knit('source', get_transport(self.get_url('.')),
289
get_diamond_vf(source)
290
stream = source.get_record_stream(source.versions(), 'topological',
292
f.insert_record_stream(stream)
293
self.assertIdenticalVersionedFile(f, source)
295
def test_insert_record_stream_annotated_knits_noeol(self):
296
"""Any file should accept a stream from plain knits."""
298
source = make_file_knit('source', get_transport(self.get_url('.')),
300
get_diamond_vf(source, trailing_eol=False)
301
stream = source.get_record_stream(source.versions(), 'topological',
303
f.insert_record_stream(stream)
304
self.assertIdenticalVersionedFile(f, source)
306
def test_insert_record_stream_plain_knits(self):
307
"""Any file should accept a stream from plain knits."""
309
source = make_file_knit('source', get_transport(self.get_url('.')),
310
create=True, factory=KnitPlainFactory())
311
get_diamond_vf(source)
312
stream = source.get_record_stream(source.versions(), 'topological',
314
f.insert_record_stream(stream)
315
self.assertIdenticalVersionedFile(f, source)
317
def test_insert_record_stream_plain_knits_noeol(self):
318
"""Any file should accept a stream from plain knits."""
320
source = make_file_knit('source', get_transport(self.get_url('.')),
321
create=True, factory=KnitPlainFactory())
322
get_diamond_vf(source, trailing_eol=False)
323
stream = source.get_record_stream(source.versions(), 'topological',
325
f.insert_record_stream(stream)
326
self.assertIdenticalVersionedFile(f, source)
328
def test_insert_record_stream_existing_keys(self):
329
"""Inserting keys already in a file should not error."""
331
source = make_file_knit('source', get_transport(self.get_url('.')),
332
create=True, factory=KnitPlainFactory())
333
get_diamond_vf(source)
334
# insert some keys into f.
335
get_diamond_vf(f, left_only=True)
336
stream = source.get_record_stream(source.versions(), 'topological',
338
f.insert_record_stream(stream)
339
self.assertIdenticalVersionedFile(f, source)
341
def test_insert_record_stream_missing_keys(self):
342
"""Inserting a stream with absent keys should raise an error."""
344
source = make_file_knit('source', get_transport(self.get_url('.')),
345
create=True, factory=KnitPlainFactory())
346
stream = source.get_record_stream(['missing'], 'topological',
348
self.assertRaises(errors.RevisionNotPresent, f.insert_record_stream,
351
def test_insert_record_stream_out_of_order(self):
352
"""An out of order stream can either error or work."""
353
f, parents = get_diamond_vf(self.get_file())
354
origin_entries = f.get_record_stream(['origin'], 'unordered', False)
355
end_entries = f.get_record_stream(['merged', 'left'],
356
'topological', False)
357
start_entries = f.get_record_stream(['right', 'base'],
358
'topological', False)
359
entries = chain(origin_entries, end_entries, start_entries)
360
target = self.get_file('target')
362
target.insert_record_stream(entries)
363
except RevisionNotPresent:
364
# Must not have corrupted the file.
367
self.assertIdenticalVersionedFile(f, target)
369
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
370
"""Insertion where a needed basis is not included aborts safely."""
371
# Annotated source - deltas can be used in any knit.
372
source = make_file_knit('source', get_transport(self.get_url('.')),
374
get_diamond_vf(source)
375
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
377
self.assertRaises(RevisionNotPresent, f.insert_record_stream, entries)
379
self.assertFalse(f.has_version('merged'))
381
270
def test_adds_with_parent_texts(self):
382
271
f = self.get_file()
383
272
parent_texts = {}
937
class TestKnit(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
939
def get_file(self, name='foo', create=True):
940
return make_file_knit(name, get_transport(self.get_url('.')),
941
delta=True, create=True, get_scope=self.get_transaction)
943
def get_factory(self):
944
return make_file_knit
946
def get_file_corrupted_text(self):
947
knit = self.get_file()
948
knit.add_lines('v1', [], ['hello\n'])
949
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
952
def reopen_file(self, name='foo', create=False):
953
return self.get_file(name, create)
955
def test_detection(self):
956
knit = self.get_file()
959
def test_no_implicit_create(self):
960
self.assertRaises(errors.NoSuchFile, self.get_factory(), 'foo',
961
get_transport(self.get_url('.')))
964
class TestPlaintextKnit(TestKnit):
965
"""Test a knit with no cached annotations"""
967
def get_file(self, name='foo', create=True):
968
return make_file_knit(name, get_transport(self.get_url('.')),
969
delta=True, create=create, get_scope=self.get_transaction,
970
factory=_mod_knit.KnitPlainFactory())
973
909
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
976
912
TestCaseWithMemoryTransport.setUp(self)
977
self.vf1 = make_file_knit('root', self.get_transport(), create=True)
978
self.vf2 = make_file_knit('root', self.get_transport(), create=True)
979
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root',
980
[self.vf1, self.vf2])
913
mapper = PrefixMapper()
914
factory = make_file_factory(True, mapper)
915
self.vf1 = factory(self.get_transport('root-1'))
916
self.vf2 = factory(self.get_transport('root-2'))
917
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
918
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
982
920
def test_add_lines(self):
983
self.plan_merge_vf.add_lines('a:', [], [])
984
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a', [],
986
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', None,
988
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', [],
991
def test_ancestry(self):
992
self.vf1.add_lines('A', [], [])
993
self.vf1.add_lines('B', ['A'], [])
994
self.plan_merge_vf.add_lines('C:', ['B'], [])
995
self.plan_merge_vf.add_lines('D:', ['C:'], [])
996
self.assertEqual(set(['A', 'B', 'C:', 'D:']),
997
self.plan_merge_vf.get_ancestry('D:', topo_sorted=False))
921
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
922
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
923
('root', 'a'), [], [])
924
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
925
('root', 'a:'), None, [])
926
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
927
('root', 'a:'), [], None)
999
929
def setup_abcde(self):
1000
self.vf1.add_lines('A', [], ['a'])
1001
self.vf1.add_lines('B', ['A'], ['b'])
1002
self.vf2.add_lines('C', [], ['c'])
1003
self.vf2.add_lines('D', ['C'], ['d'])
1004
self.plan_merge_vf.add_lines('E:', ['B', 'D'], ['e'])
1006
def test_ancestry_uses_all_versionedfiles(self):
1008
self.assertEqual(set(['A', 'B', 'C', 'D', 'E:']),
1009
self.plan_merge_vf.get_ancestry('E:', topo_sorted=False))
1011
def test_ancestry_raises_revision_not_present(self):
1012
error = self.assertRaises(errors.RevisionNotPresent,
1013
self.plan_merge_vf.get_ancestry, 'E:', False)
1014
self.assertContainsRe(str(error), '{E:} not present in "root"')
930
self.vf1.add_lines(('root', 'A'), [], ['a'])
931
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
932
self.vf2.add_lines(('root', 'C'), [], ['c'])
933
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
934
self.plan_merge_vf.add_lines(('root', 'E:'),
935
[('root', 'B'), ('root', 'D')], ['e'])
1016
937
def test_get_parents(self):
1017
938
self.setup_abcde()
1018
self.assertEqual({'B':('A',)}, self.plan_merge_vf.get_parent_map(['B']))
1019
self.assertEqual({'D':('C',)}, self.plan_merge_vf.get_parent_map(['D']))
1020
self.assertEqual({'E:':('B', 'D')},
1021
self.plan_merge_vf.get_parent_map(['E:']))
1022
self.assertEqual({}, self.plan_merge_vf.get_parent_map(['F']))
939
self.assertEqual({('root', 'B'):(('root', 'A'),)},
940
self.plan_merge_vf.get_parent_map([('root', 'B')]))
941
self.assertEqual({('root', 'D'):(('root', 'C'),)},
942
self.plan_merge_vf.get_parent_map([('root', 'D')]))
943
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
944
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
946
self.plan_merge_vf.get_parent_map([('root', 'F')]))
1023
947
self.assertEqual({
1027
}, self.plan_merge_vf.get_parent_map(['B', 'D', 'E:', 'F']))
948
('root', 'B'):(('root', 'A'),),
949
('root', 'D'):(('root', 'C'),),
950
('root', 'E:'):(('root', 'B'),('root', 'D')),
952
self.plan_merge_vf.get_parent_map(
953
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
1029
def test_get_lines(self):
955
def test_get_record_stream(self):
1030
956
self.setup_abcde()
1031
self.assertEqual(['a'], self.plan_merge_vf.get_lines('A'))
1032
self.assertEqual(['c'], self.plan_merge_vf.get_lines('C'))
1033
self.assertEqual(['e'], self.plan_merge_vf.get_lines('E:'))
1034
error = self.assertRaises(errors.RevisionNotPresent,
1035
self.plan_merge_vf.get_lines, 'F')
1036
self.assertContainsRe(str(error), '{F} not present in "root"')
1039
class InterString(versionedfile.InterVersionedFile):
1040
"""An inter-versionedfile optimised code path for strings.
1042
This is for use during testing where we use strings as versionedfiles
1043
so that none of the default regsitered interversionedfile classes will
1044
match - which lets us test the match logic.
1048
def is_compatible(source, target):
1049
"""InterString is compatible with strings-as-versionedfiles."""
1050
return isinstance(source, str) and isinstance(target, str)
1053
# TODO this and the InterRepository core logic should be consolidatable
1054
# if we make the registry a separate class though we still need to
1055
# test the behaviour in the active registry to catch failure-to-handle-
1057
class TestInterVersionedFile(TestCaseWithMemoryTransport):
1059
def test_get_default_inter_versionedfile(self):
1060
# test that the InterVersionedFile.get(a, b) probes
1061
# for a class where is_compatible(a, b) returns
1062
# true and returns a default interversionedfile otherwise.
1063
# This also tests that the default registered optimised interversionedfile
1064
# classes do not barf inappropriately when a surprising versionedfile type
1065
# is handed to them.
1066
dummy_a = "VersionedFile 1."
1067
dummy_b = "VersionedFile 2."
1068
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
1070
def assertGetsDefaultInterVersionedFile(self, a, b):
1071
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
1072
inter = versionedfile.InterVersionedFile.get(a, b)
1073
self.assertEqual(versionedfile.InterVersionedFile,
1075
self.assertEqual(a, inter.source)
1076
self.assertEqual(b, inter.target)
1078
def test_register_inter_versionedfile_class(self):
1079
# test that a optimised code path provider - a
1080
# InterVersionedFile subclass can be registered and unregistered
1081
# and that it is correctly selected when given a versionedfile
1082
# pair that it returns true on for the is_compatible static method
1084
dummy_a = "VersionedFile 1."
1085
dummy_b = "VersionedFile 2."
1086
versionedfile.InterVersionedFile.register_optimiser(InterString)
1088
# we should get the default for something InterString returns False
1090
self.assertFalse(InterString.is_compatible(dummy_a, None))
1091
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
1092
# and we should get an InterString for a pair it 'likes'
1093
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
1094
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
1095
self.assertEqual(InterString, inter.__class__)
1096
self.assertEqual(dummy_a, inter.source)
1097
self.assertEqual(dummy_b, inter.target)
1099
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
1100
# now we should get the default InterVersionedFile object again.
1101
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
957
def get_record(suffix):
958
return self.plan_merge_vf.get_record_stream(
959
[('root', suffix)], 'unordered', True).next()
960
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
961
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
962
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
963
self.assertEqual('absent', get_record('F').storage_kind)
1104
966
class TestReadonlyHttpMixin(object):
1533
1380
This is used for -> weaves, and for -> annotated knits.
1535
1382
# we need a full text, and a delta
1536
f, parents = get_diamond_vf(self.get_knit(annotated=False),
1383
f = self.get_knit(annotated=False)
1384
get_diamond_files(f, 1, trailing_eol=False)
1538
1385
# Reconstructing a full text requires a backing versioned file, and it
1539
1386
# must have the base lines requested from it.
1540
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1387
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1541
1388
ft_data, delta_data = self.helpGetBytes(f,
1542
1389
_mod_knit.FTPlainToFullText(None),
1543
1390
_mod_knit.DeltaPlainToFullText(logged_vf))
1544
1391
self.assertEqual('origin', ft_data)
1545
1392
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1546
self.assertEqual([('get_lines', 'left')], logged_vf.calls)
1393
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1394
True)], logged_vf.calls)
1397
class TestKeyMapper(TestCaseWithMemoryTransport):
1398
"""Tests for various key mapping logic."""
1400
def test_identity_mapper(self):
1401
mapper = versionedfile.ConstantMapper("inventory")
1402
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1403
self.assertEqual("inventory", mapper.map(('quux',)))
1405
def test_prefix_mapper(self):
1407
mapper = versionedfile.PrefixMapper()
1408
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1409
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1410
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1411
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1413
def test_hash_prefix_mapper(self):
1414
#format6: hash + plain
1415
mapper = versionedfile.HashPrefixMapper()
1416
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1417
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1418
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1419
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1421
def test_hash_escaped_mapper(self):
1422
#knit1: hash + escaped
1423
mapper = versionedfile.HashEscapedPrefixMapper()
1424
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1425
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1427
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1429
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1430
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1433
class TestVersionedFiles(TestCaseWithMemoryTransport):
1434
"""Tests for the multiple-file variant of VersionedFile."""
1436
def get_versionedfiles(self, relpath='files'):
1437
transport = self.get_transport(relpath)
1439
transport.mkdir('.')
1440
files = self.factory(transport)
1441
if self.cleanup is not None:
1442
self.addCleanup(lambda:self.cleanup(files))
1445
def test_annotate(self):
1446
files = self.get_versionedfiles()
1447
self.get_diamond_files(files)
1448
if self.key_length == 1:
1452
# introduced full text
1453
origins = files.annotate(prefix + ('origin',))
1455
(prefix + ('origin',), 'origin\n')],
1458
origins = files.annotate(prefix + ('base',))
1460
(prefix + ('base',), 'base\n')],
1463
origins = files.annotate(prefix + ('merged',))
1466
(prefix + ('base',), 'base\n'),
1467
(prefix + ('left',), 'left\n'),
1468
(prefix + ('right',), 'right\n'),
1469
(prefix + ('merged',), 'merged\n')
1473
# Without a graph everything is new.
1475
(prefix + ('merged',), 'base\n'),
1476
(prefix + ('merged',), 'left\n'),
1477
(prefix + ('merged',), 'right\n'),
1478
(prefix + ('merged',), 'merged\n')
1481
self.assertRaises(RevisionNotPresent,
1482
files.annotate, prefix + ('missing-key',))
1484
def test_construct(self):
1485
"""Each parameterised test can be constructed on a transport."""
1486
files = self.get_versionedfiles()
1488
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1489
return get_diamond_files(files, self.key_length,
1490
trailing_eol=trailing_eol, nograph=not self.graph,
1491
left_only=left_only)
1493
def test_add_lines_return(self):
1494
files = self.get_versionedfiles()
1495
# save code by using the stock data insertion helper.
1496
adds = self.get_diamond_files(files)
1498
# We can only validate the first 2 elements returned from add_lines.
1500
self.assertEqual(3, len(add))
1501
results.append(add[:2])
1502
if self.key_length == 1:
1504
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1505
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1506
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1507
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1508
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1510
elif self.key_length == 2:
1512
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1513
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1514
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1515
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1516
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1517
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1518
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1519
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1520
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1521
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1524
def test_empty_lines(self):
1525
"""Empty files can be stored."""
1526
f = self.get_versionedfiles()
1527
key_a = self.get_simple_key('a')
1528
f.add_lines(key_a, [], [])
1529
self.assertEqual('',
1530
f.get_record_stream([key_a], 'unordered', True
1531
).next().get_bytes_as('fulltext'))
1532
key_b = self.get_simple_key('b')
1533
f.add_lines(key_b, self.get_parents([key_a]), [])
1534
self.assertEqual('',
1535
f.get_record_stream([key_b], 'unordered', True
1536
).next().get_bytes_as('fulltext'))
1538
def test_newline_only(self):
1539
f = self.get_versionedfiles()
1540
key_a = self.get_simple_key('a')
1541
f.add_lines(key_a, [], ['\n'])
1542
self.assertEqual('\n',
1543
f.get_record_stream([key_a], 'unordered', True
1544
).next().get_bytes_as('fulltext'))
1545
key_b = self.get_simple_key('b')
1546
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1547
self.assertEqual('\n',
1548
f.get_record_stream([key_b], 'unordered', True
1549
).next().get_bytes_as('fulltext'))
1551
def test_get_record_stream_empty(self):
1552
"""An empty stream can be requested without error."""
1553
f = self.get_versionedfiles()
1554
entries = f.get_record_stream([], 'unordered', False)
1555
self.assertEqual([], list(entries))
1557
def assertValidStorageKind(self, storage_kind):
1558
"""Assert that storage_kind is a valid storage_kind."""
1559
self.assertSubset([storage_kind],
1560
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1561
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
1562
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
1564
def capture_stream(self, f, entries, on_seen, parents):
1565
"""Capture a stream for testing."""
1566
for factory in entries:
1567
on_seen(factory.key)
1568
self.assertValidStorageKind(factory.storage_kind)
1569
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1571
self.assertEqual(parents[factory.key], factory.parents)
1572
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1575
def test_get_record_stream_interface(self):
1576
"""each item in a stream has to provide a regular interface."""
1577
files = self.get_versionedfiles()
1578
self.get_diamond_files(files)
1579
keys, _ = self.get_keys_and_sort_order()
1580
parent_map = files.get_parent_map(keys)
1581
entries = files.get_record_stream(keys, 'unordered', False)
1583
self.capture_stream(files, entries, seen.add, parent_map)
1584
self.assertEqual(set(keys), seen)
1586
def get_simple_key(self, suffix):
1587
"""Return a key for the object under test."""
1588
if self.key_length == 1:
1591
return ('FileA',) + (suffix,)
1593
def get_keys_and_sort_order(self):
1594
"""Get diamond test keys list, and their sort ordering."""
1595
if self.key_length == 1:
1596
keys = [('merged',), ('left',), ('right',), ('base',)]
1597
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1600
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1602
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1606
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1607
('FileA', 'base'):0,
1608
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1609
('FileB', 'base'):0,
1611
return keys, sort_order
1613
def test_get_record_stream_interface_ordered(self):
1614
"""each item in a stream has to provide a regular interface."""
1615
files = self.get_versionedfiles()
1616
self.get_diamond_files(files)
1617
keys, sort_order = self.get_keys_and_sort_order()
1618
parent_map = files.get_parent_map(keys)
1619
entries = files.get_record_stream(keys, 'topological', False)
1621
self.capture_stream(files, entries, seen.append, parent_map)
1622
self.assertStreamOrder(sort_order, seen, keys)
1624
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1625
"""each item must be accessible as a fulltext."""
1626
files = self.get_versionedfiles()
1627
self.get_diamond_files(files)
1628
keys, sort_order = self.get_keys_and_sort_order()
1629
parent_map = files.get_parent_map(keys)
1630
entries = files.get_record_stream(keys, 'topological', True)
1632
for factory in entries:
1633
seen.append(factory.key)
1634
self.assertValidStorageKind(factory.storage_kind)
1635
self.assertSubset([factory.sha1],
1636
[None, files.get_sha1s([factory.key])[factory.key]])
1637
self.assertEqual(parent_map[factory.key], factory.parents)
1638
# self.assertEqual(files.get_text(factory.key),
1639
self.assertIsInstance(factory.get_bytes_as('fulltext'), str)
1640
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1642
self.assertStreamOrder(sort_order, seen, keys)
1644
def assertStreamOrder(self, sort_order, seen, keys):
1645
self.assertEqual(len(set(seen)), len(keys))
1646
if self.key_length == 1:
1649
lows = {('FileA',):0, ('FileB',):0}
1651
self.assertEqual(set(keys), set(seen))
1654
sort_pos = sort_order[key]
1655
self.assertTrue(sort_pos >= lows[key[:-1]],
1656
"Out of order in sorted stream: %r, %r" % (key, seen))
1657
lows[key[:-1]] = sort_pos
1659
def test_get_record_stream_unknown_storage_kind_raises(self):
1660
"""Asking for a storage kind that the stream cannot supply raises."""
1661
files = self.get_versionedfiles()
1662
self.get_diamond_files(files)
1663
if self.key_length == 1:
1664
keys = [('merged',), ('left',), ('right',), ('base',)]
1667
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1669
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1672
parent_map = files.get_parent_map(keys)
1673
entries = files.get_record_stream(keys, 'unordered', False)
1674
# We track the contents because we should be able to try, fail a
1675
# particular kind and then ask for one that works and continue.
1677
for factory in entries:
1678
seen.add(factory.key)
1679
self.assertValidStorageKind(factory.storage_kind)
1680
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1682
self.assertEqual(parent_map[factory.key], factory.parents)
1683
# currently no stream emits mpdiff
1684
self.assertRaises(errors.UnavailableRepresentation,
1685
factory.get_bytes_as, 'mpdiff')
1686
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1688
self.assertEqual(set(keys), seen)
1690
def test_get_record_stream_missing_records_are_absent(self):
1691
files = self.get_versionedfiles()
1692
self.get_diamond_files(files)
1693
if self.key_length == 1:
1694
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1697
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1698
('FileA', 'absent'), ('FileA', 'base'),
1699
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1700
('FileB', 'absent'), ('FileB', 'base'),
1701
('absent', 'absent'),
1703
parent_map = files.get_parent_map(keys)
1704
entries = files.get_record_stream(keys, 'unordered', False)
1705
self.assertAbsentRecord(files, keys, parent_map, entries)
1706
entries = files.get_record_stream(keys, 'topological', False)
1707
self.assertAbsentRecord(files, keys, parent_map, entries)
1709
def assertAbsentRecord(self, files, keys, parents, entries):
1710
"""Helper for test_get_record_stream_missing_records_are_absent."""
1712
for factory in entries:
1713
seen.add(factory.key)
1714
if factory.key[-1] == 'absent':
1715
self.assertEqual('absent', factory.storage_kind)
1716
self.assertEqual(None, factory.sha1)
1717
self.assertEqual(None, factory.parents)
1719
self.assertValidStorageKind(factory.storage_kind)
1720
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1722
self.assertEqual(parents[factory.key], factory.parents)
1723
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1725
self.assertEqual(set(keys), seen)
1727
def test_filter_absent_records(self):
1728
"""Requested missing records can be filter trivially."""
1729
files = self.get_versionedfiles()
1730
self.get_diamond_files(files)
1731
keys, _ = self.get_keys_and_sort_order()
1732
parent_map = files.get_parent_map(keys)
1733
# Add an absent record in the middle of the present keys. (We don't ask
1734
# for just absent keys to ensure that content before and after the
1735
# absent keys is still delivered).
1736
present_keys = list(keys)
1737
if self.key_length == 1:
1738
keys.insert(2, ('extra',))
1740
keys.insert(2, ('extra', 'extra'))
1741
entries = files.get_record_stream(keys, 'unordered', False)
1743
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1745
self.assertEqual(set(present_keys), seen)
1747
def get_mapper(self):
1748
"""Get a mapper suitable for the key length of the test interface."""
1749
if self.key_length == 1:
1750
return ConstantMapper('source')
1752
return HashEscapedPrefixMapper()
1754
def get_parents(self, parents):
1755
"""Get parents, taking self.graph into consideration."""
1761
def test_get_parent_map(self):
1762
files = self.get_versionedfiles()
1763
if self.key_length == 1:
1765
(('r0',), self.get_parents(())),
1766
(('r1',), self.get_parents((('r0',),))),
1767
(('r2',), self.get_parents(())),
1768
(('r3',), self.get_parents(())),
1769
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1773
(('FileA', 'r0'), self.get_parents(())),
1774
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1775
(('FileA', 'r2'), self.get_parents(())),
1776
(('FileA', 'r3'), self.get_parents(())),
1777
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1778
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1780
for key, parents in parent_details:
1781
files.add_lines(key, parents, [])
1782
# immediately after adding it should be queryable.
1783
self.assertEqual({key:parents}, files.get_parent_map([key]))
1784
# We can ask for an empty set
1785
self.assertEqual({}, files.get_parent_map([]))
1786
# We can ask for many keys
1787
all_parents = dict(parent_details)
1788
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1789
# Absent keys are just not included in the result.
1790
keys = all_parents.keys()
1791
if self.key_length == 1:
1792
keys.insert(1, ('missing',))
1794
keys.insert(1, ('missing', 'missing'))
1795
# Absent keys are just ignored
1796
self.assertEqual(all_parents, files.get_parent_map(keys))
1798
def test_get_sha1s(self):
1799
files = self.get_versionedfiles()
1800
self.get_diamond_files(files)
1801
if self.key_length == 1:
1802
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
1804
# ask for shas from different prefixes.
1806
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
1807
('FileA', 'merged'), ('FileB', 'right'),
1810
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
1811
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
1812
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
1813
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
1814
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
1816
files.get_sha1s(keys))
1818
def test_insert_record_stream_empty(self):
1819
"""Inserting an empty record stream should work."""
1820
files = self.get_versionedfiles()
1821
files.insert_record_stream([])
1823
def assertIdenticalVersionedFile(self, expected, actual):
1824
"""Assert that left and right have the same contents."""
1825
self.assertEqual(set(actual.keys()), set(expected.keys()))
1826
actual_parents = actual.get_parent_map(actual.keys())
1828
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
1830
for key, parents in actual_parents.items():
1831
self.assertEqual(None, parents)
1832
for key in actual.keys():
1833
actual_text = actual.get_record_stream(
1834
[key], 'unordered', True).next().get_bytes_as('fulltext')
1835
expected_text = expected.get_record_stream(
1836
[key], 'unordered', True).next().get_bytes_as('fulltext')
1837
self.assertEqual(actual_text, expected_text)
1839
def test_insert_record_stream_fulltexts(self):
1840
"""Any file should accept a stream of fulltexts."""
1841
files = self.get_versionedfiles()
1842
mapper = self.get_mapper()
1843
source_transport = self.get_transport('source')
1844
source_transport.mkdir('.')
1845
# weaves always output fulltexts.
1846
source = make_versioned_files_factory(WeaveFile, mapper)(
1848
self.get_diamond_files(source, trailing_eol=False)
1849
stream = source.get_record_stream(source.keys(), 'topological',
1851
files.insert_record_stream(stream)
1852
self.assertIdenticalVersionedFile(source, files)
1854
def test_insert_record_stream_fulltexts_noeol(self):
1855
"""Any file should accept a stream of fulltexts."""
1856
files = self.get_versionedfiles()
1857
mapper = self.get_mapper()
1858
source_transport = self.get_transport('source')
1859
source_transport.mkdir('.')
1860
# weaves always output fulltexts.
1861
source = make_versioned_files_factory(WeaveFile, mapper)(
1863
self.get_diamond_files(source, trailing_eol=False)
1864
stream = source.get_record_stream(source.keys(), 'topological',
1866
files.insert_record_stream(stream)
1867
self.assertIdenticalVersionedFile(source, files)
1869
def test_insert_record_stream_annotated_knits(self):
1870
"""Any file should accept a stream from plain knits."""
1871
files = self.get_versionedfiles()
1872
mapper = self.get_mapper()
1873
source_transport = self.get_transport('source')
1874
source_transport.mkdir('.')
1875
source = make_file_factory(True, mapper)(source_transport)
1876
self.get_diamond_files(source)
1877
stream = source.get_record_stream(source.keys(), 'topological',
1879
files.insert_record_stream(stream)
1880
self.assertIdenticalVersionedFile(source, files)
1882
def test_insert_record_stream_annotated_knits_noeol(self):
1883
"""Any file should accept a stream from plain knits."""
1884
files = self.get_versionedfiles()
1885
mapper = self.get_mapper()
1886
source_transport = self.get_transport('source')
1887
source_transport.mkdir('.')
1888
source = make_file_factory(True, mapper)(source_transport)
1889
self.get_diamond_files(source, trailing_eol=False)
1890
stream = source.get_record_stream(source.keys(), 'topological',
1892
files.insert_record_stream(stream)
1893
self.assertIdenticalVersionedFile(source, files)
1895
def test_insert_record_stream_plain_knits(self):
1896
"""Any file should accept a stream from plain knits."""
1897
files = self.get_versionedfiles()
1898
mapper = self.get_mapper()
1899
source_transport = self.get_transport('source')
1900
source_transport.mkdir('.')
1901
source = make_file_factory(False, mapper)(source_transport)
1902
self.get_diamond_files(source)
1903
stream = source.get_record_stream(source.keys(), 'topological',
1905
files.insert_record_stream(stream)
1906
self.assertIdenticalVersionedFile(source, files)
1908
def test_insert_record_stream_plain_knits_noeol(self):
1909
"""Any file should accept a stream from plain knits."""
1910
files = self.get_versionedfiles()
1911
mapper = self.get_mapper()
1912
source_transport = self.get_transport('source')
1913
source_transport.mkdir('.')
1914
source = make_file_factory(False, mapper)(source_transport)
1915
self.get_diamond_files(source, trailing_eol=False)
1916
stream = source.get_record_stream(source.keys(), 'topological',
1918
files.insert_record_stream(stream)
1919
self.assertIdenticalVersionedFile(source, files)
1921
def test_insert_record_stream_existing_keys(self):
1922
"""Inserting keys already in a file should not error."""
1923
files = self.get_versionedfiles()
1924
source = self.get_versionedfiles('source')
1925
self.get_diamond_files(source)
1926
# insert some keys into f.
1927
self.get_diamond_files(files, left_only=True)
1928
stream = source.get_record_stream(source.keys(), 'topological',
1930
files.insert_record_stream(stream)
1931
self.assertIdenticalVersionedFile(source, files)
1933
def test_insert_record_stream_missing_keys(self):
1934
"""Inserting a stream with absent keys should raise an error."""
1935
files = self.get_versionedfiles()
1936
source = self.get_versionedfiles('source')
1937
stream = source.get_record_stream([('missing',) * self.key_length],
1938
'topological', False)
1939
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
1942
def test_insert_record_stream_out_of_order(self):
1943
"""An out of order stream can either error or work."""
1944
files = self.get_versionedfiles()
1945
source = self.get_versionedfiles('source')
1946
self.get_diamond_files(source)
1947
if self.key_length == 1:
1948
origin_keys = [('origin',)]
1949
end_keys = [('merged',), ('left',)]
1950
start_keys = [('right',), ('base',)]
1952
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
1953
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
1954
('FileB', 'merged',), ('FileB', 'left',)]
1955
start_keys = [('FileA', 'right',), ('FileA', 'base',),
1956
('FileB', 'right',), ('FileB', 'base',)]
1957
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
1958
end_entries = source.get_record_stream(end_keys, 'topological', False)
1959
start_entries = source.get_record_stream(start_keys, 'topological', False)
1960
entries = chain(origin_entries, end_entries, start_entries)
1962
files.insert_record_stream(entries)
1963
except RevisionNotPresent:
1964
# Must not have corrupted the file.
1967
self.assertIdenticalVersionedFile(source, files)
1969
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1970
"""Insertion where a needed basis is not included aborts safely."""
1971
# We use a knit always here to be sure we are getting a binary delta.
1972
mapper = self.get_mapper()
1973
source_transport = self.get_transport('source')
1974
source_transport.mkdir('.')
1975
source = make_file_factory(False, mapper)(source_transport)
1976
self.get_diamond_files(source)
1977
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1978
files = self.get_versionedfiles()
1979
self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1982
self.assertEqual({}, files.get_parent_map([]))
1984
def test_iter_lines_added_or_present_in_keys(self):
1985
# test that we get at least an equalset of the lines added by
1986
# versions in the store.
1987
# the ordering here is to make a tree so that dumb searches have
1988
# more changes to muck up.
1990
class InstrumentedProgress(progress.DummyProgress):
1994
progress.DummyProgress.__init__(self)
1997
def update(self, msg=None, current=None, total=None):
1998
self.updates.append((msg, current, total))
2000
files = self.get_versionedfiles()
2001
# add a base to get included
2002
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2003
# add a ancestor to be included on one side
2004
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2005
# add a ancestor to be included on the other side
2006
files.add_lines(self.get_simple_key('rancestor'),
2007
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2008
# add a child of rancestor with no eofile-nl
2009
files.add_lines(self.get_simple_key('child'),
2010
self.get_parents([self.get_simple_key('rancestor')]),
2011
['base\n', 'child\n'])
2012
# add a child of lancestor and base to join the two roots
2013
files.add_lines(self.get_simple_key('otherchild'),
2014
self.get_parents([self.get_simple_key('lancestor'),
2015
self.get_simple_key('base')]),
2016
['base\n', 'lancestor\n', 'otherchild\n'])
2017
def iter_with_keys(keys, expected):
2018
# now we need to see what lines are returned, and how often.
2020
progress = InstrumentedProgress()
2021
# iterate over the lines
2022
for line in files.iter_lines_added_or_present_in_keys(keys,
2024
lines.setdefault(line, 0)
2026
if []!= progress.updates:
2027
self.assertEqual(expected, progress.updates)
2029
lines = iter_with_keys(
2030
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2031
[('Walking content.', 0, 2),
2032
('Walking content.', 1, 2),
2033
('Walking content.', 2, 2)])
2034
# we must see child and otherchild
2035
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2037
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2038
# we dont care if we got more than that.
2041
lines = iter_with_keys(files.keys(),
2042
[('Walking content.', 0, 5),
2043
('Walking content.', 1, 5),
2044
('Walking content.', 2, 5),
2045
('Walking content.', 3, 5),
2046
('Walking content.', 4, 5),
2047
('Walking content.', 5, 5)])
2048
# all lines must be seen at least once
2049
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2051
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2053
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2054
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2056
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2058
def test_make_mpdiffs(self):
2059
from bzrlib import multiparent
2060
files = self.get_versionedfiles('source')
2061
# add texts that should trip the knit maximum delta chain threshold
2062
# as well as doing parallel chains of data in knits.
2063
# this is done by two chains of 25 insertions
2064
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2065
files.add_lines(self.get_simple_key('noeol'),
2066
self.get_parents([self.get_simple_key('base')]), ['line'])
2067
# detailed eol tests:
2068
# shared last line with parent no-eol
2069
files.add_lines(self.get_simple_key('noeolsecond'),
2070
self.get_parents([self.get_simple_key('noeol')]),
2072
# differing last line with parent, both no-eol
2073
files.add_lines(self.get_simple_key('noeolnotshared'),
2074
self.get_parents([self.get_simple_key('noeolsecond')]),
2075
['line\n', 'phone'])
2076
# add eol following a noneol parent, change content
2077
files.add_lines(self.get_simple_key('eol'),
2078
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2079
# add eol following a noneol parent, no change content
2080
files.add_lines(self.get_simple_key('eolline'),
2081
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2082
# noeol with no parents:
2083
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2084
# noeol preceeding its leftmost parent in the output:
2085
# this is done by making it a merge of two parents with no common
2086
# anestry: noeolbase and noeol with the
2087
# later-inserted parent the leftmost.
2088
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2089
self.get_parents([self.get_simple_key('noeolbase'),
2090
self.get_simple_key('noeol')]),
2092
# two identical eol texts
2093
files.add_lines(self.get_simple_key('noeoldup'),
2094
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2095
next_parent = self.get_simple_key('base')
2096
text_name = 'chain1-'
2098
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2099
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2100
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2101
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2102
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2103
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2104
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2105
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2106
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2107
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2108
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2109
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2110
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2111
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2112
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2113
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2114
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2115
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2116
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2117
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2118
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2119
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2120
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2121
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2122
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2123
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2125
for depth in range(26):
2126
new_version = self.get_simple_key(text_name + '%s' % depth)
2127
text = text + ['line\n']
2128
files.add_lines(new_version, self.get_parents([next_parent]), text)
2129
next_parent = new_version
2130
next_parent = self.get_simple_key('base')
2131
text_name = 'chain2-'
2133
for depth in range(26):
2134
new_version = self.get_simple_key(text_name + '%s' % depth)
2135
text = text + ['line\n']
2136
files.add_lines(new_version, self.get_parents([next_parent]), text)
2137
next_parent = new_version
2138
target = self.get_versionedfiles('target')
2139
for key in multiparent.topo_iter_keys(files, files.keys()):
2140
mpdiff = files.make_mpdiffs([key])[0]
2141
parents = files.get_parent_map([key])[key] or []
2143
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2144
self.assertEqualDiff(
2145
files.get_record_stream([key], 'unordered',
2146
True).next().get_bytes_as('fulltext'),
2147
target.get_record_stream([key], 'unordered',
2148
True).next().get_bytes_as('fulltext')
2151
def test_keys(self):
2152
# While use is discouraged, versions() is still needed by aspects of
2154
files = self.get_versionedfiles()
2155
self.assertEqual(set(), set(files.keys()))
2156
if self.key_length == 1:
2159
key = ('foo', 'bar',)
2160
files.add_lines(key, (), [])
2161
self.assertEqual(set([key]), set(files.keys()))
2164
class VirtualVersionedFilesTests(TestCase):
2165
"""Basic tests for the VirtualVersionedFiles implementations."""
2167
def _get_parent_map(self, keys):
2170
if k in self._parent_map:
2171
ret[k] = self._parent_map[k]
2175
TestCase.setUp(self)
2177
self._parent_map = {}
2178
self.texts = VirtualVersionedFiles(self._get_parent_map,
2181
def test_add_lines(self):
2182
self.assertRaises(NotImplementedError,
2183
self.texts.add_lines, "foo", [], [])
2185
def test_add_mpdiffs(self):
2186
self.assertRaises(NotImplementedError,
2187
self.texts.add_mpdiffs, [])
2189
def test_check(self):
2190
self.assertTrue(self.texts.check())
2192
def test_insert_record_stream(self):
2193
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2196
def test_get_sha1s_nonexistent(self):
2197
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2199
def test_get_sha1s(self):
2200
self._lines["key"] = ["dataline1", "dataline2"]
2201
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2202
self.texts.get_sha1s([("key",)]))
2204
def test_get_parent_map(self):
2205
self._parent_map = {"G": ("A", "B")}
2206
self.assertEquals({("G",): (("A",),("B",))},
2207
self.texts.get_parent_map([("G",), ("L",)]))
2209
def test_get_record_stream(self):
2210
self._lines["A"] = ["FOO", "BAR"]
2211
it = self.texts.get_record_stream([("A",)], "unordered", True)
2213
self.assertEquals("fulltext", record.storage_kind)
2214
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2216
def test_get_record_stream_absent(self):
2217
it = self.texts.get_record_stream([("A",)], "unordered", True)
2219
self.assertEquals("absent", record.storage_kind)