343
137
except NotImplementedError:
346
def test_add_reserved(self):
348
self.assertRaises(errors.ReservedId,
349
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
351
def test_add_lines_nostoresha(self):
352
"""When nostore_sha is supplied using old content raises."""
354
empty_text = ('a', [])
355
sample_text_nl = ('b', ["foo\n", "bar\n"])
356
sample_text_no_nl = ('c', ["foo\n", "bar"])
358
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
359
sha, _, _ = vf.add_lines(version, [], lines)
361
# we now have a copy of all the lines in the vf.
362
for sha, (version, lines) in zip(
363
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
364
self.assertRaises(errors.ExistingContent,
365
vf.add_lines, version + "2", [], lines,
367
# and no new version should have been added.
368
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
371
def test_add_lines_with_ghosts_nostoresha(self):
372
"""When nostore_sha is supplied using old content raises."""
374
empty_text = ('a', [])
375
sample_text_nl = ('b', ["foo\n", "bar\n"])
376
sample_text_no_nl = ('c', ["foo\n", "bar"])
378
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
379
sha, _, _ = vf.add_lines(version, [], lines)
381
# we now have a copy of all the lines in the vf.
382
# is the test applicable to this vf implementation?
384
vf.add_lines_with_ghosts('d', [], [])
385
except NotImplementedError:
386
raise TestSkipped("add_lines_with_ghosts is optional")
387
for sha, (version, lines) in zip(
388
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
389
self.assertRaises(errors.ExistingContent,
390
vf.add_lines_with_ghosts, version + "2", [], lines,
392
# and no new version should have been added.
393
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
396
def test_add_lines_return_value(self):
397
# add_lines should return the sha1 and the text size.
399
empty_text = ('a', [])
400
sample_text_nl = ('b', ["foo\n", "bar\n"])
401
sample_text_no_nl = ('c', ["foo\n", "bar"])
402
# check results for the three cases:
403
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
404
# the first two elements are the same for all versioned files:
405
# - the digest and the size of the text. For some versioned files
406
# additional data is returned in additional tuple elements.
407
result = vf.add_lines(version, [], lines)
408
self.assertEqual(3, len(result))
409
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
411
# parents should not affect the result:
412
lines = sample_text_nl[1]
413
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
414
vf.add_lines('d', ['b', 'c'], lines)[0:2])
416
def test_get_reserved(self):
418
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
419
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
420
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
422
def test_add_unchanged_last_line_noeol_snapshot(self):
423
"""Add a text with an unchanged last line with no eol should work."""
424
# Test adding this in a number of chain lengths; because the interface
425
# for VersionedFile does not allow forcing a specific chain length, we
426
# just use a small base to get the first snapshot, then a much longer
427
# first line for the next add (which will make the third add snapshot)
428
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
429
# as a capped delta length, but ideally we would have some way of
430
# tuning the test to the store (e.g. keep going until a snapshot
432
for length in range(20):
434
vf = self.get_file('case-%d' % length)
437
for step in range(length):
438
version = prefix % step
439
lines = (['prelude \n'] * step) + ['line']
440
vf.add_lines(version, parents, lines)
441
version_lines[version] = lines
443
vf.add_lines('no-eol', parents, ['line'])
444
vf.get_texts(version_lines.keys())
445
self.assertEqualDiff('line', vf.get_text('no-eol'))
447
def test_get_texts_eol_variation(self):
448
# similar to the failure in <http://bugs.launchpad.net/234748>
450
sample_text_nl = ["line\n"]
451
sample_text_no_nl = ["line"]
458
lines = sample_text_nl
460
lines = sample_text_no_nl
461
# left_matching blocks is an internal api; it operates on the
462
# *internal* representation for a knit, which is with *all* lines
463
# being normalised to end with \n - even the final line in a no_nl
464
# file. Using it here ensures that a broken internal implementation
465
# (which is what this test tests) will generate a correct line
466
# delta (which is to say, an empty delta).
467
vf.add_lines(version, parents, lines,
468
left_matching_blocks=[(0, 0, 1)])
470
versions.append(version)
471
version_lines[version] = lines
473
vf.get_texts(versions)
474
vf.get_texts(reversed(versions))
476
def test_add_lines_with_matching_blocks_noeol_last_line(self):
477
"""Add a text with an unchanged last line with no eol should work."""
478
from bzrlib import multiparent
479
# Hand verified sha1 of the text we're adding.
480
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
481
# Create a mpdiff which adds a new line before the trailing line, and
482
# reuse the last line unaltered (which can cause annotation reuse).
483
# Test adding this in two situations:
484
# On top of a new insertion
485
vf = self.get_file('fulltext')
486
vf.add_lines('noeol', [], ['line'])
487
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
488
left_matching_blocks=[(0, 1, 1)])
489
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
491
vf = self.get_file('delta')
492
vf.add_lines('base', [], ['line'])
493
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
494
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
495
left_matching_blocks=[(1, 1, 1)])
496
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
498
def test_make_mpdiffs(self):
499
from bzrlib import multiparent
500
vf = self.get_file('foo')
501
sha1s = self._setup_for_deltas(vf)
502
new_vf = self.get_file('bar')
503
for version in multiparent.topo_iter(vf):
504
mpdiff = vf.make_mpdiffs([version])[0]
505
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
506
vf.get_sha1s([version])[version], mpdiff)])
507
self.assertEqualDiff(vf.get_text(version),
508
new_vf.get_text(version))
510
def test_make_mpdiffs_with_ghosts(self):
511
vf = self.get_file('foo')
513
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
514
except NotImplementedError:
515
# old Weave formats do not allow ghosts
517
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
140
def test_get_delta(self):
142
sha1s = self._setup_for_deltas(f)
143
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
144
[(0, 0, 1, [('base', 'line\n')])])
145
self.assertEqual(expected_delta, f.get_delta('base'))
147
text_name = 'chain1-'
148
for depth in range(26):
149
new_version = text_name + '%s' % depth
150
expected_delta = (next_parent, sha1s[depth],
152
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
153
self.assertEqual(expected_delta, f.get_delta(new_version))
154
next_parent = new_version
156
text_name = 'chain2-'
157
for depth in range(26):
158
new_version = text_name + '%s' % depth
159
expected_delta = (next_parent, sha1s[depth], False,
160
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
161
self.assertEqual(expected_delta, f.get_delta(new_version))
162
next_parent = new_version
163
# smoke test for eol support
164
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
165
self.assertEqual(['line'], f.get_lines('noeol'))
166
self.assertEqual(expected_delta, f.get_delta('noeol'))
168
def test_get_deltas(self):
170
sha1s = self._setup_for_deltas(f)
171
deltas = f.get_deltas(f.versions())
172
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
173
[(0, 0, 1, [('base', 'line\n')])])
174
self.assertEqual(expected_delta, deltas['base'])
176
text_name = 'chain1-'
177
for depth in range(26):
178
new_version = text_name + '%s' % depth
179
expected_delta = (next_parent, sha1s[depth],
181
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
182
self.assertEqual(expected_delta, deltas[new_version])
183
next_parent = new_version
185
text_name = 'chain2-'
186
for depth in range(26):
187
new_version = text_name + '%s' % depth
188
expected_delta = (next_parent, sha1s[depth], False,
189
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
190
self.assertEqual(expected_delta, deltas[new_version])
191
next_parent = new_version
192
# smoke tests for eol support
193
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
194
self.assertEqual(['line'], f.get_lines('noeol'))
195
self.assertEqual(expected_delta, deltas['noeol'])
196
# smoke tests for eol support - two noeol in a row same content
197
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
198
[(0, 1, 2, [(u'noeolsecond', 'line\n'), (u'noeolsecond', 'line\n')])]),
199
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
200
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
201
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
202
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
203
# two no-eol in a row, different content
204
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
205
[(1, 2, 1, [(u'noeolnotshared', 'phone\n')])])
206
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
207
self.assertEqual(expected_delta, deltas['noeolnotshared'])
208
# eol folling a no-eol with content change
209
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
210
[(0, 1, 1, [(u'eol', 'phone\n')])])
211
self.assertEqual(['phone\n'], f.get_lines('eol'))
212
self.assertEqual(expected_delta, deltas['eol'])
213
# eol folling a no-eol with content change
214
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
215
[(0, 1, 1, [(u'eolline', 'line\n')])])
216
self.assertEqual(['line\n'], f.get_lines('eolline'))
217
self.assertEqual(expected_delta, deltas['eolline'])
218
# eol with no parents
219
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
220
[(0, 0, 1, [(u'noeolbase', 'line\n')])])
221
self.assertEqual(['line'], f.get_lines('noeolbase'))
222
self.assertEqual(expected_delta, deltas['noeolbase'])
223
# eol with two parents, in inverse insertion order
224
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
225
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]),
226
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
227
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]))
228
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
229
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
519
231
def _setup_for_deltas(self, f):
520
self.assertFalse(f.has_version('base'))
232
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
521
233
# add texts that should trip the knit maximum delta chain threshold
522
234
# as well as doing parallel chains of data in knits.
523
235
# this is done by two chains of 25 insertions
1244
1165
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1245
1166
'xxx', '>>>>>>> ', 'bbb']
1248
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1250
def test_select_adaptor(self):
1251
"""Test expected adapters exist."""
1252
# One scenario for each lookup combination we expect to use.
1253
# Each is source_kind, requested_kind, adapter class
1255
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1256
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1257
('knit-annotated-delta-gz', 'knit-delta-gz',
1258
_mod_knit.DeltaAnnotatedToUnannotated),
1259
('knit-annotated-delta-gz', 'fulltext',
1260
_mod_knit.DeltaAnnotatedToFullText),
1261
('knit-annotated-ft-gz', 'knit-ft-gz',
1262
_mod_knit.FTAnnotatedToUnannotated),
1263
('knit-annotated-ft-gz', 'fulltext',
1264
_mod_knit.FTAnnotatedToFullText),
1266
for source, requested, klass in scenarios:
1267
adapter_factory = versionedfile.adapter_registry.get(
1268
(source, requested))
1269
adapter = adapter_factory(None)
1270
self.assertIsInstance(adapter, klass)
1272
def get_knit(self, annotated=True):
1273
mapper = ConstantMapper('knit')
1274
transport = self.get_transport()
1275
return make_file_factory(annotated, mapper)(transport)
1277
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1278
"""Grab the interested adapted texts for tests."""
1279
# origin is a fulltext
1280
entries = f.get_record_stream([('origin',)], 'unordered', False)
1281
base = entries.next()
1282
ft_data = ft_adapter.get_bytes(base, base.get_bytes_as(base.storage_kind))
1283
# merged is both a delta and multiple parents.
1284
entries = f.get_record_stream([('merged',)], 'unordered', False)
1285
merged = entries.next()
1286
delta_data = delta_adapter.get_bytes(merged,
1287
merged.get_bytes_as(merged.storage_kind))
1288
return ft_data, delta_data
1290
def test_deannotation_noeol(self):
1291
"""Test converting annotated knits to unannotated knits."""
1292
# we need a full text, and a delta
1294
get_diamond_files(f, 1, trailing_eol=False)
1295
ft_data, delta_data = self.helpGetBytes(f,
1296
_mod_knit.FTAnnotatedToUnannotated(None),
1297
_mod_knit.DeltaAnnotatedToUnannotated(None))
1299
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1302
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1304
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1305
'1,2,3\nleft\nright\nmerged\nend merged\n',
1306
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1308
def test_deannotation(self):
1309
"""Test converting annotated knits to unannotated knits."""
1310
# we need a full text, and a delta
1312
get_diamond_files(f, 1)
1313
ft_data, delta_data = self.helpGetBytes(f,
1314
_mod_knit.FTAnnotatedToUnannotated(None),
1315
_mod_knit.DeltaAnnotatedToUnannotated(None))
1317
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1320
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1322
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1323
'2,2,2\nright\nmerged\nend merged\n',
1324
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1326
def test_annotated_to_fulltext_no_eol(self):
1327
"""Test adapting annotated knits to full texts (for -> weaves)."""
1328
# we need a full text, and a delta
1330
get_diamond_files(f, 1, trailing_eol=False)
1331
# Reconstructing a full text requires a backing versioned file, and it
1332
# must have the base lines requested from it.
1333
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1334
ft_data, delta_data = self.helpGetBytes(f,
1335
_mod_knit.FTAnnotatedToFullText(None),
1336
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1337
self.assertEqual('origin', ft_data)
1338
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1339
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1340
True)], logged_vf.calls)
1342
def test_annotated_to_fulltext(self):
1343
"""Test adapting annotated knits to full texts (for -> weaves)."""
1344
# we need a full text, and a delta
1346
get_diamond_files(f, 1)
1347
# Reconstructing a full text requires a backing versioned file, and it
1348
# must have the base lines requested from it.
1349
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1350
ft_data, delta_data = self.helpGetBytes(f,
1351
_mod_knit.FTAnnotatedToFullText(None),
1352
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1353
self.assertEqual('origin\n', ft_data)
1354
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1355
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1356
True)], logged_vf.calls)
1358
def test_unannotated_to_fulltext(self):
1359
"""Test adapting unannotated knits to full texts.
1361
This is used for -> weaves, and for -> annotated knits.
1363
# we need a full text, and a delta
1364
f = self.get_knit(annotated=False)
1365
get_diamond_files(f, 1)
1366
# Reconstructing a full text requires a backing versioned file, and it
1367
# must have the base lines requested from it.
1368
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1369
ft_data, delta_data = self.helpGetBytes(f,
1370
_mod_knit.FTPlainToFullText(None),
1371
_mod_knit.DeltaPlainToFullText(logged_vf))
1372
self.assertEqual('origin\n', ft_data)
1373
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1374
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1375
True)], logged_vf.calls)
1377
def test_unannotated_to_fulltext_no_eol(self):
1378
"""Test adapting unannotated knits to full texts.
1380
This is used for -> weaves, and for -> annotated knits.
1382
# we need a full text, and a delta
1383
f = self.get_knit(annotated=False)
1384
get_diamond_files(f, 1, trailing_eol=False)
1385
# Reconstructing a full text requires a backing versioned file, and it
1386
# must have the base lines requested from it.
1387
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1388
ft_data, delta_data = self.helpGetBytes(f,
1389
_mod_knit.FTPlainToFullText(None),
1390
_mod_knit.DeltaPlainToFullText(logged_vf))
1391
self.assertEqual('origin', ft_data)
1392
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1393
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1394
True)], logged_vf.calls)
1397
class TestKeyMapper(TestCaseWithMemoryTransport):
1398
"""Tests for various key mapping logic."""
1400
def test_identity_mapper(self):
1401
mapper = versionedfile.ConstantMapper("inventory")
1402
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1403
self.assertEqual("inventory", mapper.map(('quux',)))
1405
def test_prefix_mapper(self):
1407
mapper = versionedfile.PrefixMapper()
1408
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1409
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1410
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1411
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1413
def test_hash_prefix_mapper(self):
1414
#format6: hash + plain
1415
mapper = versionedfile.HashPrefixMapper()
1416
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1417
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1418
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1419
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1421
def test_hash_escaped_mapper(self):
1422
#knit1: hash + escaped
1423
mapper = versionedfile.HashEscapedPrefixMapper()
1424
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1425
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1427
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1429
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1430
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1433
class TestVersionedFiles(TestCaseWithMemoryTransport):
1434
"""Tests for the multiple-file variant of VersionedFile."""
1436
def get_versionedfiles(self, relpath='files'):
1437
transport = self.get_transport(relpath)
1439
transport.mkdir('.')
1440
files = self.factory(transport)
1441
if self.cleanup is not None:
1442
self.addCleanup(lambda:self.cleanup(files))
1445
def test_annotate(self):
1446
files = self.get_versionedfiles()
1447
self.get_diamond_files(files)
1448
if self.key_length == 1:
1452
# introduced full text
1453
origins = files.annotate(prefix + ('origin',))
1455
(prefix + ('origin',), 'origin\n')],
1458
origins = files.annotate(prefix + ('base',))
1460
(prefix + ('base',), 'base\n')],
1463
origins = files.annotate(prefix + ('merged',))
1466
(prefix + ('base',), 'base\n'),
1467
(prefix + ('left',), 'left\n'),
1468
(prefix + ('right',), 'right\n'),
1469
(prefix + ('merged',), 'merged\n')
1473
# Without a graph everything is new.
1475
(prefix + ('merged',), 'base\n'),
1476
(prefix + ('merged',), 'left\n'),
1477
(prefix + ('merged',), 'right\n'),
1478
(prefix + ('merged',), 'merged\n')
1481
self.assertRaises(RevisionNotPresent,
1482
files.annotate, prefix + ('missing-key',))
1484
def test_construct(self):
1485
"""Each parameterised test can be constructed on a transport."""
1486
files = self.get_versionedfiles()
1488
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1489
return get_diamond_files(files, self.key_length,
1490
trailing_eol=trailing_eol, nograph=not self.graph,
1491
left_only=left_only)
1493
def test_add_lines_return(self):
1494
files = self.get_versionedfiles()
1495
# save code by using the stock data insertion helper.
1496
adds = self.get_diamond_files(files)
1498
# We can only validate the first 2 elements returned from add_lines.
1500
self.assertEqual(3, len(add))
1501
results.append(add[:2])
1502
if self.key_length == 1:
1504
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1505
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1506
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1507
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1508
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1510
elif self.key_length == 2:
1512
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1513
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1514
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1515
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1516
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1517
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1518
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1519
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1520
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1521
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1524
def test_empty_lines(self):
1525
"""Empty files can be stored."""
1526
f = self.get_versionedfiles()
1527
key_a = self.get_simple_key('a')
1528
f.add_lines(key_a, [], [])
1529
self.assertEqual('',
1530
f.get_record_stream([key_a], 'unordered', True
1531
).next().get_bytes_as('fulltext'))
1532
key_b = self.get_simple_key('b')
1533
f.add_lines(key_b, self.get_parents([key_a]), [])
1534
self.assertEqual('',
1535
f.get_record_stream([key_b], 'unordered', True
1536
).next().get_bytes_as('fulltext'))
1538
def test_newline_only(self):
1539
f = self.get_versionedfiles()
1540
key_a = self.get_simple_key('a')
1541
f.add_lines(key_a, [], ['\n'])
1542
self.assertEqual('\n',
1543
f.get_record_stream([key_a], 'unordered', True
1544
).next().get_bytes_as('fulltext'))
1545
key_b = self.get_simple_key('b')
1546
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1547
self.assertEqual('\n',
1548
f.get_record_stream([key_b], 'unordered', True
1549
).next().get_bytes_as('fulltext'))
1551
def test_get_record_stream_empty(self):
1552
"""An empty stream can be requested without error."""
1553
f = self.get_versionedfiles()
1554
entries = f.get_record_stream([], 'unordered', False)
1555
self.assertEqual([], list(entries))
1557
def assertValidStorageKind(self, storage_kind):
1558
"""Assert that storage_kind is a valid storage_kind."""
1559
self.assertSubset([storage_kind],
1560
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1561
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
1562
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
1564
def capture_stream(self, f, entries, on_seen, parents):
1565
"""Capture a stream for testing."""
1566
for factory in entries:
1567
on_seen(factory.key)
1568
self.assertValidStorageKind(factory.storage_kind)
1569
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1571
self.assertEqual(parents[factory.key], factory.parents)
1572
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1575
def test_get_record_stream_interface(self):
1576
"""each item in a stream has to provide a regular interface."""
1577
files = self.get_versionedfiles()
1578
self.get_diamond_files(files)
1579
keys, _ = self.get_keys_and_sort_order()
1580
parent_map = files.get_parent_map(keys)
1581
entries = files.get_record_stream(keys, 'unordered', False)
1583
self.capture_stream(files, entries, seen.add, parent_map)
1584
self.assertEqual(set(keys), seen)
1586
def get_simple_key(self, suffix):
1587
"""Return a key for the object under test."""
1588
if self.key_length == 1:
1591
return ('FileA',) + (suffix,)
1593
def get_keys_and_sort_order(self):
1594
"""Get diamond test keys list, and their sort ordering."""
1595
if self.key_length == 1:
1596
keys = [('merged',), ('left',), ('right',), ('base',)]
1597
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1600
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1602
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1606
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1607
('FileA', 'base'):0,
1608
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1609
('FileB', 'base'):0,
1611
return keys, sort_order
1613
def test_get_record_stream_interface_ordered(self):
1614
"""each item in a stream has to provide a regular interface."""
1615
files = self.get_versionedfiles()
1616
self.get_diamond_files(files)
1617
keys, sort_order = self.get_keys_and_sort_order()
1618
parent_map = files.get_parent_map(keys)
1619
entries = files.get_record_stream(keys, 'topological', False)
1621
self.capture_stream(files, entries, seen.append, parent_map)
1622
self.assertStreamOrder(sort_order, seen, keys)
1624
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1625
"""each item must be accessible as a fulltext."""
1626
files = self.get_versionedfiles()
1627
self.get_diamond_files(files)
1628
keys, sort_order = self.get_keys_and_sort_order()
1629
parent_map = files.get_parent_map(keys)
1630
entries = files.get_record_stream(keys, 'topological', True)
1632
for factory in entries:
1633
seen.append(factory.key)
1634
self.assertValidStorageKind(factory.storage_kind)
1635
self.assertSubset([factory.sha1],
1636
[None, files.get_sha1s([factory.key])[factory.key]])
1637
self.assertEqual(parent_map[factory.key], factory.parents)
1638
# self.assertEqual(files.get_text(factory.key),
1639
self.assertIsInstance(factory.get_bytes_as('fulltext'), str)
1640
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1642
self.assertStreamOrder(sort_order, seen, keys)
1644
def assertStreamOrder(self, sort_order, seen, keys):
1645
self.assertEqual(len(set(seen)), len(keys))
1646
if self.key_length == 1:
1649
lows = {('FileA',):0, ('FileB',):0}
1651
self.assertEqual(set(keys), set(seen))
1654
sort_pos = sort_order[key]
1655
self.assertTrue(sort_pos >= lows[key[:-1]],
1656
"Out of order in sorted stream: %r, %r" % (key, seen))
1657
lows[key[:-1]] = sort_pos
1659
def test_get_record_stream_unknown_storage_kind_raises(self):
1660
"""Asking for a storage kind that the stream cannot supply raises."""
1661
files = self.get_versionedfiles()
1662
self.get_diamond_files(files)
1663
if self.key_length == 1:
1664
keys = [('merged',), ('left',), ('right',), ('base',)]
1667
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1669
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1672
parent_map = files.get_parent_map(keys)
1673
entries = files.get_record_stream(keys, 'unordered', False)
1674
# We track the contents because we should be able to try, fail a
1675
# particular kind and then ask for one that works and continue.
1677
for factory in entries:
1678
seen.add(factory.key)
1679
self.assertValidStorageKind(factory.storage_kind)
1680
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1682
self.assertEqual(parent_map[factory.key], factory.parents)
1683
# currently no stream emits mpdiff
1684
self.assertRaises(errors.UnavailableRepresentation,
1685
factory.get_bytes_as, 'mpdiff')
1686
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1688
self.assertEqual(set(keys), seen)
1690
def test_get_record_stream_missing_records_are_absent(self):
1691
files = self.get_versionedfiles()
1692
self.get_diamond_files(files)
1693
if self.key_length == 1:
1694
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1697
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1698
('FileA', 'absent'), ('FileA', 'base'),
1699
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1700
('FileB', 'absent'), ('FileB', 'base'),
1701
('absent', 'absent'),
1703
parent_map = files.get_parent_map(keys)
1704
entries = files.get_record_stream(keys, 'unordered', False)
1705
self.assertAbsentRecord(files, keys, parent_map, entries)
1706
entries = files.get_record_stream(keys, 'topological', False)
1707
self.assertAbsentRecord(files, keys, parent_map, entries)
1709
def assertAbsentRecord(self, files, keys, parents, entries):
1710
"""Helper for test_get_record_stream_missing_records_are_absent."""
1712
for factory in entries:
1713
seen.add(factory.key)
1714
if factory.key[-1] == 'absent':
1715
self.assertEqual('absent', factory.storage_kind)
1716
self.assertEqual(None, factory.sha1)
1717
self.assertEqual(None, factory.parents)
1719
self.assertValidStorageKind(factory.storage_kind)
1720
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1722
self.assertEqual(parents[factory.key], factory.parents)
1723
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1725
self.assertEqual(set(keys), seen)
1727
def test_filter_absent_records(self):
1728
"""Requested missing records can be filter trivially."""
1729
files = self.get_versionedfiles()
1730
self.get_diamond_files(files)
1731
keys, _ = self.get_keys_and_sort_order()
1732
parent_map = files.get_parent_map(keys)
1733
# Add an absent record in the middle of the present keys. (We don't ask
1734
# for just absent keys to ensure that content before and after the
1735
# absent keys is still delivered).
1736
present_keys = list(keys)
1737
if self.key_length == 1:
1738
keys.insert(2, ('extra',))
1740
keys.insert(2, ('extra', 'extra'))
1741
entries = files.get_record_stream(keys, 'unordered', False)
1743
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1745
self.assertEqual(set(present_keys), seen)
1747
def get_mapper(self):
1748
"""Get a mapper suitable for the key length of the test interface."""
1749
if self.key_length == 1:
1750
return ConstantMapper('source')
1752
return HashEscapedPrefixMapper()
1754
def get_parents(self, parents):
1755
"""Get parents, taking self.graph into consideration."""
1761
def test_get_parent_map(self):
1762
files = self.get_versionedfiles()
1763
if self.key_length == 1:
1765
(('r0',), self.get_parents(())),
1766
(('r1',), self.get_parents((('r0',),))),
1767
(('r2',), self.get_parents(())),
1768
(('r3',), self.get_parents(())),
1769
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1773
(('FileA', 'r0'), self.get_parents(())),
1774
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1775
(('FileA', 'r2'), self.get_parents(())),
1776
(('FileA', 'r3'), self.get_parents(())),
1777
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1778
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1780
for key, parents in parent_details:
1781
files.add_lines(key, parents, [])
1782
# immediately after adding it should be queryable.
1783
self.assertEqual({key:parents}, files.get_parent_map([key]))
1784
# We can ask for an empty set
1785
self.assertEqual({}, files.get_parent_map([]))
1786
# We can ask for many keys
1787
all_parents = dict(parent_details)
1788
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1789
# Absent keys are just not included in the result.
1790
keys = all_parents.keys()
1791
if self.key_length == 1:
1792
keys.insert(1, ('missing',))
1794
keys.insert(1, ('missing', 'missing'))
1795
# Absent keys are just ignored
1796
self.assertEqual(all_parents, files.get_parent_map(keys))
1798
def test_get_sha1s(self):
1799
files = self.get_versionedfiles()
1800
self.get_diamond_files(files)
1801
if self.key_length == 1:
1802
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
1804
# ask for shas from different prefixes.
1806
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
1807
('FileA', 'merged'), ('FileB', 'right'),
1810
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
1811
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
1812
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
1813
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
1814
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
1816
files.get_sha1s(keys))
1818
def test_insert_record_stream_empty(self):
1819
"""Inserting an empty record stream should work."""
1820
files = self.get_versionedfiles()
1821
files.insert_record_stream([])
1823
def assertIdenticalVersionedFile(self, expected, actual):
1824
"""Assert that left and right have the same contents."""
1825
self.assertEqual(set(actual.keys()), set(expected.keys()))
1826
actual_parents = actual.get_parent_map(actual.keys())
1828
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
1830
for key, parents in actual_parents.items():
1831
self.assertEqual(None, parents)
1832
for key in actual.keys():
1833
actual_text = actual.get_record_stream(
1834
[key], 'unordered', True).next().get_bytes_as('fulltext')
1835
expected_text = expected.get_record_stream(
1836
[key], 'unordered', True).next().get_bytes_as('fulltext')
1837
self.assertEqual(actual_text, expected_text)
1839
def test_insert_record_stream_fulltexts(self):
1840
"""Any file should accept a stream of fulltexts."""
1841
files = self.get_versionedfiles()
1842
mapper = self.get_mapper()
1843
source_transport = self.get_transport('source')
1844
source_transport.mkdir('.')
1845
# weaves always output fulltexts.
1846
source = make_versioned_files_factory(WeaveFile, mapper)(
1848
self.get_diamond_files(source, trailing_eol=False)
1849
stream = source.get_record_stream(source.keys(), 'topological',
1851
files.insert_record_stream(stream)
1852
self.assertIdenticalVersionedFile(source, files)
1854
def test_insert_record_stream_fulltexts_noeol(self):
1855
"""Any file should accept a stream of fulltexts."""
1856
files = self.get_versionedfiles()
1857
mapper = self.get_mapper()
1858
source_transport = self.get_transport('source')
1859
source_transport.mkdir('.')
1860
# weaves always output fulltexts.
1861
source = make_versioned_files_factory(WeaveFile, mapper)(
1863
self.get_diamond_files(source, trailing_eol=False)
1864
stream = source.get_record_stream(source.keys(), 'topological',
1866
files.insert_record_stream(stream)
1867
self.assertIdenticalVersionedFile(source, files)
1869
def test_insert_record_stream_annotated_knits(self):
1870
"""Any file should accept a stream from plain knits."""
1871
files = self.get_versionedfiles()
1872
mapper = self.get_mapper()
1873
source_transport = self.get_transport('source')
1874
source_transport.mkdir('.')
1875
source = make_file_factory(True, mapper)(source_transport)
1876
self.get_diamond_files(source)
1877
stream = source.get_record_stream(source.keys(), 'topological',
1879
files.insert_record_stream(stream)
1880
self.assertIdenticalVersionedFile(source, files)
1882
def test_insert_record_stream_annotated_knits_noeol(self):
1883
"""Any file should accept a stream from plain knits."""
1884
files = self.get_versionedfiles()
1885
mapper = self.get_mapper()
1886
source_transport = self.get_transport('source')
1887
source_transport.mkdir('.')
1888
source = make_file_factory(True, mapper)(source_transport)
1889
self.get_diamond_files(source, trailing_eol=False)
1890
stream = source.get_record_stream(source.keys(), 'topological',
1892
files.insert_record_stream(stream)
1893
self.assertIdenticalVersionedFile(source, files)
1895
def test_insert_record_stream_plain_knits(self):
1896
"""Any file should accept a stream from plain knits."""
1897
files = self.get_versionedfiles()
1898
mapper = self.get_mapper()
1899
source_transport = self.get_transport('source')
1900
source_transport.mkdir('.')
1901
source = make_file_factory(False, mapper)(source_transport)
1902
self.get_diamond_files(source)
1903
stream = source.get_record_stream(source.keys(), 'topological',
1905
files.insert_record_stream(stream)
1906
self.assertIdenticalVersionedFile(source, files)
1908
def test_insert_record_stream_plain_knits_noeol(self):
1909
"""Any file should accept a stream from plain knits."""
1910
files = self.get_versionedfiles()
1911
mapper = self.get_mapper()
1912
source_transport = self.get_transport('source')
1913
source_transport.mkdir('.')
1914
source = make_file_factory(False, mapper)(source_transport)
1915
self.get_diamond_files(source, trailing_eol=False)
1916
stream = source.get_record_stream(source.keys(), 'topological',
1918
files.insert_record_stream(stream)
1919
self.assertIdenticalVersionedFile(source, files)
1921
def test_insert_record_stream_existing_keys(self):
1922
"""Inserting keys already in a file should not error."""
1923
files = self.get_versionedfiles()
1924
source = self.get_versionedfiles('source')
1925
self.get_diamond_files(source)
1926
# insert some keys into f.
1927
self.get_diamond_files(files, left_only=True)
1928
stream = source.get_record_stream(source.keys(), 'topological',
1930
files.insert_record_stream(stream)
1931
self.assertIdenticalVersionedFile(source, files)
1933
def test_insert_record_stream_missing_keys(self):
1934
"""Inserting a stream with absent keys should raise an error."""
1935
files = self.get_versionedfiles()
1936
source = self.get_versionedfiles('source')
1937
stream = source.get_record_stream([('missing',) * self.key_length],
1938
'topological', False)
1939
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
1942
def test_insert_record_stream_out_of_order(self):
1943
"""An out of order stream can either error or work."""
1944
files = self.get_versionedfiles()
1945
source = self.get_versionedfiles('source')
1946
self.get_diamond_files(source)
1947
if self.key_length == 1:
1948
origin_keys = [('origin',)]
1949
end_keys = [('merged',), ('left',)]
1950
start_keys = [('right',), ('base',)]
1952
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
1953
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
1954
('FileB', 'merged',), ('FileB', 'left',)]
1955
start_keys = [('FileA', 'right',), ('FileA', 'base',),
1956
('FileB', 'right',), ('FileB', 'base',)]
1957
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
1958
end_entries = source.get_record_stream(end_keys, 'topological', False)
1959
start_entries = source.get_record_stream(start_keys, 'topological', False)
1960
entries = chain(origin_entries, end_entries, start_entries)
1962
files.insert_record_stream(entries)
1963
except RevisionNotPresent:
1964
# Must not have corrupted the file.
1967
self.assertIdenticalVersionedFile(source, files)
1969
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1970
"""Insertion where a needed basis is not included aborts safely."""
1971
# We use a knit always here to be sure we are getting a binary delta.
1972
mapper = self.get_mapper()
1973
source_transport = self.get_transport('source')
1974
source_transport.mkdir('.')
1975
source = make_file_factory(False, mapper)(source_transport)
1976
self.get_diamond_files(source)
1977
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1978
files = self.get_versionedfiles()
1979
self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1982
self.assertEqual({}, files.get_parent_map([]))
1984
def test_iter_lines_added_or_present_in_keys(self):
1985
# test that we get at least an equalset of the lines added by
1986
# versions in the store.
1987
# the ordering here is to make a tree so that dumb searches have
1988
# more changes to muck up.
1990
class InstrumentedProgress(progress.DummyProgress):
1994
progress.DummyProgress.__init__(self)
1997
def update(self, msg=None, current=None, total=None):
1998
self.updates.append((msg, current, total))
2000
files = self.get_versionedfiles()
2001
# add a base to get included
2002
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2003
# add a ancestor to be included on one side
2004
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2005
# add a ancestor to be included on the other side
2006
files.add_lines(self.get_simple_key('rancestor'),
2007
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2008
# add a child of rancestor with no eofile-nl
2009
files.add_lines(self.get_simple_key('child'),
2010
self.get_parents([self.get_simple_key('rancestor')]),
2011
['base\n', 'child\n'])
2012
# add a child of lancestor and base to join the two roots
2013
files.add_lines(self.get_simple_key('otherchild'),
2014
self.get_parents([self.get_simple_key('lancestor'),
2015
self.get_simple_key('base')]),
2016
['base\n', 'lancestor\n', 'otherchild\n'])
2017
def iter_with_keys(keys, expected):
2018
# now we need to see what lines are returned, and how often.
2020
progress = InstrumentedProgress()
2021
# iterate over the lines
2022
for line in files.iter_lines_added_or_present_in_keys(keys,
2024
lines.setdefault(line, 0)
2026
if []!= progress.updates:
2027
self.assertEqual(expected, progress.updates)
2029
lines = iter_with_keys(
2030
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2031
[('Walking content.', 0, 2),
2032
('Walking content.', 1, 2),
2033
('Walking content.', 2, 2)])
2034
# we must see child and otherchild
2035
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2037
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2038
# we dont care if we got more than that.
2041
lines = iter_with_keys(files.keys(),
2042
[('Walking content.', 0, 5),
2043
('Walking content.', 1, 5),
2044
('Walking content.', 2, 5),
2045
('Walking content.', 3, 5),
2046
('Walking content.', 4, 5),
2047
('Walking content.', 5, 5)])
2048
# all lines must be seen at least once
2049
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2051
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2053
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2054
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2056
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2058
def test_make_mpdiffs(self):
2059
from bzrlib import multiparent
2060
files = self.get_versionedfiles('source')
2061
# add texts that should trip the knit maximum delta chain threshold
2062
# as well as doing parallel chains of data in knits.
2063
# this is done by two chains of 25 insertions
2064
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2065
files.add_lines(self.get_simple_key('noeol'),
2066
self.get_parents([self.get_simple_key('base')]), ['line'])
2067
# detailed eol tests:
2068
# shared last line with parent no-eol
2069
files.add_lines(self.get_simple_key('noeolsecond'),
2070
self.get_parents([self.get_simple_key('noeol')]),
2072
# differing last line with parent, both no-eol
2073
files.add_lines(self.get_simple_key('noeolnotshared'),
2074
self.get_parents([self.get_simple_key('noeolsecond')]),
2075
['line\n', 'phone'])
2076
# add eol following a noneol parent, change content
2077
files.add_lines(self.get_simple_key('eol'),
2078
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2079
# add eol following a noneol parent, no change content
2080
files.add_lines(self.get_simple_key('eolline'),
2081
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2082
# noeol with no parents:
2083
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2084
# noeol preceeding its leftmost parent in the output:
2085
# this is done by making it a merge of two parents with no common
2086
# anestry: noeolbase and noeol with the
2087
# later-inserted parent the leftmost.
2088
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2089
self.get_parents([self.get_simple_key('noeolbase'),
2090
self.get_simple_key('noeol')]),
2092
# two identical eol texts
2093
files.add_lines(self.get_simple_key('noeoldup'),
2094
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2095
next_parent = self.get_simple_key('base')
2096
text_name = 'chain1-'
2098
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2099
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2100
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2101
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2102
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2103
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2104
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2105
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2106
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2107
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2108
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2109
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2110
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2111
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2112
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2113
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2114
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2115
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2116
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2117
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2118
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2119
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2120
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2121
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2122
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2123
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2125
for depth in range(26):
2126
new_version = self.get_simple_key(text_name + '%s' % depth)
2127
text = text + ['line\n']
2128
files.add_lines(new_version, self.get_parents([next_parent]), text)
2129
next_parent = new_version
2130
next_parent = self.get_simple_key('base')
2131
text_name = 'chain2-'
2133
for depth in range(26):
2134
new_version = self.get_simple_key(text_name + '%s' % depth)
2135
text = text + ['line\n']
2136
files.add_lines(new_version, self.get_parents([next_parent]), text)
2137
next_parent = new_version
2138
target = self.get_versionedfiles('target')
2139
for key in multiparent.topo_iter_keys(files, files.keys()):
2140
mpdiff = files.make_mpdiffs([key])[0]
2141
parents = files.get_parent_map([key])[key] or []
2143
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2144
self.assertEqualDiff(
2145
files.get_record_stream([key], 'unordered',
2146
True).next().get_bytes_as('fulltext'),
2147
target.get_record_stream([key], 'unordered',
2148
True).next().get_bytes_as('fulltext')
2151
def test_keys(self):
2152
# While use is discouraged, versions() is still needed by aspects of
2154
files = self.get_versionedfiles()
2155
self.assertEqual(set(), set(files.keys()))
2156
if self.key_length == 1:
2159
key = ('foo', 'bar',)
2160
files.add_lines(key, (), [])
2161
self.assertEqual(set([key]), set(files.keys()))
2164
class VirtualVersionedFilesTests(TestCase):
2165
"""Basic tests for the VirtualVersionedFiles implementations."""
2167
def _get_parent_map(self, keys):
2170
if k in self._parent_map:
2171
ret[k] = self._parent_map[k]
2175
TestCase.setUp(self)
2177
self._parent_map = {}
2178
self.texts = VirtualVersionedFiles(self._get_parent_map,
2181
def test_add_lines(self):
2182
self.assertRaises(NotImplementedError,
2183
self.texts.add_lines, "foo", [], [])
2185
def test_add_mpdiffs(self):
2186
self.assertRaises(NotImplementedError,
2187
self.texts.add_mpdiffs, [])
2189
def test_check(self):
2190
self.assertTrue(self.texts.check())
2192
def test_insert_record_stream(self):
2193
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2196
def test_get_sha1s_nonexistent(self):
2197
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2199
def test_get_sha1s(self):
2200
self._lines["key"] = ["dataline1", "dataline2"]
2201
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2202
self.texts.get_sha1s([("key",)]))
2204
def test_get_parent_map(self):
2205
self._parent_map = {"G": ("A", "B")}
2206
self.assertEquals({("G",): (("A",),("B",))},
2207
self.texts.get_parent_map([("G",), ("L",)]))
2209
def test_get_record_stream(self):
2210
self._lines["A"] = ["FOO", "BAR"]
2211
it = self.texts.get_record_stream([("A",)], "unordered", True)
2213
self.assertEquals("fulltext", record.storage_kind)
2214
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2216
def test_get_record_stream_absent(self):
2217
it = self.texts.get_record_stream([("A",)], "unordered", True)
2219
self.assertEquals("absent", record.storage_kind)