146
376
self.assertRaises(errors.ReservedId,
147
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
149
self.assertRaises(errors.ReservedId,
150
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))
379
def test_add_lines_nostoresha(self):
380
"""When nostore_sha is supplied using old content raises."""
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
386
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
sha, _, _ = vf.add_lines(version, [], lines)
389
# we now have a copy of all the lines in the vf.
390
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
self.assertRaises(errors.ExistingContent,
393
vf.add_lines, version + "2", [], lines,
395
# and no new version should have been added.
396
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
def test_add_lines_with_ghosts_nostoresha(self):
400
"""When nostore_sha is supplied using old content raises."""
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
406
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
sha, _, _ = vf.add_lines(version, [], lines)
409
# we now have a copy of all the lines in the vf.
410
# is the test applicable to this vf implementation?
412
vf.add_lines_with_ghosts('d', [], [])
413
except NotImplementedError:
414
raise TestSkipped("add_lines_with_ghosts is optional")
415
for sha, (version, lines) in zip(
416
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
417
self.assertRaises(errors.ExistingContent,
418
vf.add_lines_with_ghosts, version + "2", [], lines,
420
# and no new version should have been added.
421
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
424
def test_add_lines_return_value(self):
425
# add_lines should return the sha1 and the text size.
427
empty_text = ('a', [])
428
sample_text_nl = ('b', ["foo\n", "bar\n"])
429
sample_text_no_nl = ('c', ["foo\n", "bar"])
430
# check results for the three cases:
431
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
432
# the first two elements are the same for all versioned files:
433
# - the digest and the size of the text. For some versioned files
434
# additional data is returned in additional tuple elements.
435
result = vf.add_lines(version, [], lines)
436
self.assertEqual(3, len(result))
437
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
439
# parents should not affect the result:
440
lines = sample_text_nl[1]
441
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
442
vf.add_lines('d', ['b', 'c'], lines)[0:2])
152
444
def test_get_reserved(self):
153
445
vf = self.get_file()
154
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')
155
446
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
156
447
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
157
448
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
159
def test_get_delta(self):
161
sha1s = self._setup_for_deltas(f)
162
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
163
[(0, 0, 1, [('base', 'line\n')])])
164
self.assertEqual(expected_delta, f.get_delta('base'))
166
text_name = 'chain1-'
167
for depth in range(26):
168
new_version = text_name + '%s' % depth
169
expected_delta = (next_parent, sha1s[depth],
171
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
172
self.assertEqual(expected_delta, f.get_delta(new_version))
173
next_parent = new_version
175
text_name = 'chain2-'
176
for depth in range(26):
177
new_version = text_name + '%s' % depth
178
expected_delta = (next_parent, sha1s[depth], False,
179
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
180
self.assertEqual(expected_delta, f.get_delta(new_version))
181
next_parent = new_version
182
# smoke test for eol support
183
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
184
self.assertEqual(['line'], f.get_lines('noeol'))
185
self.assertEqual(expected_delta, f.get_delta('noeol'))
187
def test_get_deltas(self):
189
sha1s = self._setup_for_deltas(f)
190
deltas = f.get_deltas(f.versions())
191
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
192
[(0, 0, 1, [('base', 'line\n')])])
193
self.assertEqual(expected_delta, deltas['base'])
195
text_name = 'chain1-'
196
for depth in range(26):
197
new_version = text_name + '%s' % depth
198
expected_delta = (next_parent, sha1s[depth],
200
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
201
self.assertEqual(expected_delta, deltas[new_version])
202
next_parent = new_version
204
text_name = 'chain2-'
205
for depth in range(26):
206
new_version = text_name + '%s' % depth
207
expected_delta = (next_parent, sha1s[depth], False,
208
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
209
self.assertEqual(expected_delta, deltas[new_version])
210
next_parent = new_version
211
# smoke tests for eol support
212
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
213
self.assertEqual(['line'], f.get_lines('noeol'))
214
self.assertEqual(expected_delta, deltas['noeol'])
215
# smoke tests for eol support - two noeol in a row same content
216
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
217
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),
218
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
219
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
220
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
221
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
222
# two no-eol in a row, different content
223
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
224
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])
225
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
226
self.assertEqual(expected_delta, deltas['noeolnotshared'])
227
# eol folling a no-eol with content change
228
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
229
[(0, 1, 1, [('eol', 'phone\n')])])
230
self.assertEqual(['phone\n'], f.get_lines('eol'))
231
self.assertEqual(expected_delta, deltas['eol'])
232
# eol folling a no-eol with content change
233
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
234
[(0, 1, 1, [('eolline', 'line\n')])])
235
self.assertEqual(['line\n'], f.get_lines('eolline'))
236
self.assertEqual(expected_delta, deltas['eolline'])
237
# eol with no parents
238
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
239
[(0, 0, 1, [('noeolbase', 'line\n')])])
240
self.assertEqual(['line'], f.get_lines('noeolbase'))
241
self.assertEqual(expected_delta, deltas['noeolbase'])
242
# eol with two parents, in inverse insertion order
243
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
244
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),
245
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
246
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))
247
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
248
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
450
def test_add_unchanged_last_line_noeol_snapshot(self):
451
"""Add a text with an unchanged last line with no eol should work."""
452
# Test adding this in a number of chain lengths; because the interface
453
# for VersionedFile does not allow forcing a specific chain length, we
454
# just use a small base to get the first snapshot, then a much longer
455
# first line for the next add (which will make the third add snapshot)
456
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
457
# as a capped delta length, but ideally we would have some way of
458
# tuning the test to the store (e.g. keep going until a snapshot
460
for length in range(20):
462
vf = self.get_file('case-%d' % length)
465
for step in range(length):
466
version = prefix % step
467
lines = (['prelude \n'] * step) + ['line']
468
vf.add_lines(version, parents, lines)
469
version_lines[version] = lines
471
vf.add_lines('no-eol', parents, ['line'])
472
vf.get_texts(version_lines.keys())
473
self.assertEqualDiff('line', vf.get_text('no-eol'))
475
def test_get_texts_eol_variation(self):
476
# similar to the failure in <http://bugs.launchpad.net/234748>
478
sample_text_nl = ["line\n"]
479
sample_text_no_nl = ["line"]
486
lines = sample_text_nl
488
lines = sample_text_no_nl
489
# left_matching blocks is an internal api; it operates on the
490
# *internal* representation for a knit, which is with *all* lines
491
# being normalised to end with \n - even the final line in a no_nl
492
# file. Using it here ensures that a broken internal implementation
493
# (which is what this test tests) will generate a correct line
494
# delta (which is to say, an empty delta).
495
vf.add_lines(version, parents, lines,
496
left_matching_blocks=[(0, 0, 1)])
498
versions.append(version)
499
version_lines[version] = lines
501
vf.get_texts(versions)
502
vf.get_texts(reversed(versions))
504
def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
"""Add a text with an unchanged last line with no eol should work."""
506
from bzrlib import multiparent
507
# Hand verified sha1 of the text we're adding.
508
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
# Create a mpdiff which adds a new line before the trailing line, and
510
# reuse the last line unaltered (which can cause annotation reuse).
511
# Test adding this in two situations:
512
# On top of a new insertion
513
vf = self.get_file('fulltext')
514
vf.add_lines('noeol', [], ['line'])
515
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
516
left_matching_blocks=[(0, 1, 1)])
517
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
519
vf = self.get_file('delta')
520
vf.add_lines('base', [], ['line'])
521
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
522
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
523
left_matching_blocks=[(1, 1, 1)])
524
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
526
def test_make_mpdiffs(self):
527
from bzrlib import multiparent
528
vf = self.get_file('foo')
529
sha1s = self._setup_for_deltas(vf)
530
new_vf = self.get_file('bar')
531
for version in multiparent.topo_iter(vf):
532
mpdiff = vf.make_mpdiffs([version])[0]
533
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
534
vf.get_sha1s([version])[version], mpdiff)])
535
self.assertEqualDiff(vf.get_text(version),
536
new_vf.get_text(version))
538
def test_make_mpdiffs_with_ghosts(self):
539
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
542
except NotImplementedError:
543
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
250
547
def _setup_for_deltas(self, f):
251
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
548
self.assertFalse(f.has_version('base'))
252
549
# add texts that should trip the knit maximum delta chain threshold
253
550
# as well as doing parallel chains of data in knits.
254
551
# this is done by two chains of 25 insertions
1217
1273
write_weave(w, tmpf)
1218
1274
self.log(tmpf.getvalue())
1220
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1276
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1221
1277
'xxx', '>>>>>>> ', 'bbb']
1280
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1282
def test_select_adaptor(self):
1283
"""Test expected adapters exist."""
1284
# One scenario for each lookup combination we expect to use.
1285
# Each is source_kind, requested_kind, adapter class
1287
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1288
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1289
('knit-annotated-delta-gz', 'knit-delta-gz',
1290
_mod_knit.DeltaAnnotatedToUnannotated),
1291
('knit-annotated-delta-gz', 'fulltext',
1292
_mod_knit.DeltaAnnotatedToFullText),
1293
('knit-annotated-ft-gz', 'knit-ft-gz',
1294
_mod_knit.FTAnnotatedToUnannotated),
1295
('knit-annotated-ft-gz', 'fulltext',
1296
_mod_knit.FTAnnotatedToFullText),
1298
for source, requested, klass in scenarios:
1299
adapter_factory = versionedfile.adapter_registry.get(
1300
(source, requested))
1301
adapter = adapter_factory(None)
1302
self.assertIsInstance(adapter, klass)
1304
def get_knit(self, annotated=True):
1305
mapper = ConstantMapper('knit')
1306
transport = self.get_transport()
1307
return make_file_factory(annotated, mapper)(transport)
1309
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1310
"""Grab the interested adapted texts for tests."""
1311
# origin is a fulltext
1312
entries = f.get_record_stream([('origin',)], 'unordered', False)
1313
base = entries.next()
1314
ft_data = ft_adapter.get_bytes(base)
1315
# merged is both a delta and multiple parents.
1316
entries = f.get_record_stream([('merged',)], 'unordered', False)
1317
merged = entries.next()
1318
delta_data = delta_adapter.get_bytes(merged)
1319
return ft_data, delta_data
1321
def test_deannotation_noeol(self):
1322
"""Test converting annotated knits to unannotated knits."""
1323
# we need a full text, and a delta
1325
get_diamond_files(f, 1, trailing_eol=False)
1326
ft_data, delta_data = self.helpGetBytes(f,
1327
_mod_knit.FTAnnotatedToUnannotated(None),
1328
_mod_knit.DeltaAnnotatedToUnannotated(None))
1330
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1333
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1335
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1336
'1,2,3\nleft\nright\nmerged\nend merged\n',
1337
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1339
def test_deannotation(self):
1340
"""Test converting annotated knits to unannotated knits."""
1341
# we need a full text, and a delta
1343
get_diamond_files(f, 1)
1344
ft_data, delta_data = self.helpGetBytes(f,
1345
_mod_knit.FTAnnotatedToUnannotated(None),
1346
_mod_knit.DeltaAnnotatedToUnannotated(None))
1348
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1351
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1353
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1354
'2,2,2\nright\nmerged\nend merged\n',
1355
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1357
def test_annotated_to_fulltext_no_eol(self):
1358
"""Test adapting annotated knits to full texts (for -> weaves)."""
1359
# we need a full text, and a delta
1361
get_diamond_files(f, 1, trailing_eol=False)
1362
# Reconstructing a full text requires a backing versioned file, and it
1363
# must have the base lines requested from it.
1364
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1365
ft_data, delta_data = self.helpGetBytes(f,
1366
_mod_knit.FTAnnotatedToFullText(None),
1367
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1368
self.assertEqual('origin', ft_data)
1369
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1370
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1371
True)], logged_vf.calls)
1373
def test_annotated_to_fulltext(self):
1374
"""Test adapting annotated knits to full texts (for -> weaves)."""
1375
# we need a full text, and a delta
1377
get_diamond_files(f, 1)
1378
# Reconstructing a full text requires a backing versioned file, and it
1379
# must have the base lines requested from it.
1380
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1381
ft_data, delta_data = self.helpGetBytes(f,
1382
_mod_knit.FTAnnotatedToFullText(None),
1383
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1384
self.assertEqual('origin\n', ft_data)
1385
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1386
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1387
True)], logged_vf.calls)
1389
def test_unannotated_to_fulltext(self):
1390
"""Test adapting unannotated knits to full texts.
1392
This is used for -> weaves, and for -> annotated knits.
1394
# we need a full text, and a delta
1395
f = self.get_knit(annotated=False)
1396
get_diamond_files(f, 1)
1397
# Reconstructing a full text requires a backing versioned file, and it
1398
# must have the base lines requested from it.
1399
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1400
ft_data, delta_data = self.helpGetBytes(f,
1401
_mod_knit.FTPlainToFullText(None),
1402
_mod_knit.DeltaPlainToFullText(logged_vf))
1403
self.assertEqual('origin\n', ft_data)
1404
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1405
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1406
True)], logged_vf.calls)
1408
def test_unannotated_to_fulltext_no_eol(self):
1409
"""Test adapting unannotated knits to full texts.
1411
This is used for -> weaves, and for -> annotated knits.
1413
# we need a full text, and a delta
1414
f = self.get_knit(annotated=False)
1415
get_diamond_files(f, 1, trailing_eol=False)
1416
# Reconstructing a full text requires a backing versioned file, and it
1417
# must have the base lines requested from it.
1418
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1419
ft_data, delta_data = self.helpGetBytes(f,
1420
_mod_knit.FTPlainToFullText(None),
1421
_mod_knit.DeltaPlainToFullText(logged_vf))
1422
self.assertEqual('origin', ft_data)
1423
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1424
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1425
True)], logged_vf.calls)
1428
class TestKeyMapper(TestCaseWithMemoryTransport):
1429
"""Tests for various key mapping logic."""
1431
def test_identity_mapper(self):
1432
mapper = versionedfile.ConstantMapper("inventory")
1433
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1434
self.assertEqual("inventory", mapper.map(('quux',)))
1436
def test_prefix_mapper(self):
1438
mapper = versionedfile.PrefixMapper()
1439
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1440
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1441
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1442
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1444
def test_hash_prefix_mapper(self):
1445
#format6: hash + plain
1446
mapper = versionedfile.HashPrefixMapper()
1447
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1448
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1449
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1450
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1452
def test_hash_escaped_mapper(self):
1453
#knit1: hash + escaped
1454
mapper = versionedfile.HashEscapedPrefixMapper()
1455
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1456
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1458
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1460
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1461
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1464
class TestVersionedFiles(TestCaseWithMemoryTransport):
1465
"""Tests for the multiple-file variant of VersionedFile."""
1467
def get_versionedfiles(self, relpath='files'):
1468
transport = self.get_transport(relpath)
1470
transport.mkdir('.')
1471
files = self.factory(transport)
1472
if self.cleanup is not None:
1473
self.addCleanup(self.cleanup, files)
1476
def get_simple_key(self, suffix):
1477
"""Return a key for the object under test."""
1478
if self.key_length == 1:
1481
return ('FileA',) + (suffix,)
1483
def test_add_lines(self):
1484
f = self.get_versionedfiles()
1485
key0 = self.get_simple_key('r0')
1486
key1 = self.get_simple_key('r1')
1487
key2 = self.get_simple_key('r2')
1488
keyf = self.get_simple_key('foo')
1489
f.add_lines(key0, [], ['a\n', 'b\n'])
1491
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1493
f.add_lines(key1, [], ['b\n', 'c\n'])
1495
self.assertTrue(key0 in keys)
1496
self.assertTrue(key1 in keys)
1498
for record in f.get_record_stream([key0, key1], 'unordered', True):
1499
records.append((record.key, record.get_bytes_as('fulltext')))
1501
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1503
def test__add_text(self):
1504
f = self.get_versionedfiles()
1505
key0 = self.get_simple_key('r0')
1506
key1 = self.get_simple_key('r1')
1507
key2 = self.get_simple_key('r2')
1508
keyf = self.get_simple_key('foo')
1509
f._add_text(key0, [], 'a\nb\n')
1511
f._add_text(key1, [key0], 'b\nc\n')
1513
f._add_text(key1, [], 'b\nc\n')
1515
self.assertTrue(key0 in keys)
1516
self.assertTrue(key1 in keys)
1518
for record in f.get_record_stream([key0, key1], 'unordered', True):
1519
records.append((record.key, record.get_bytes_as('fulltext')))
1521
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1523
def test_annotate(self):
1524
files = self.get_versionedfiles()
1525
self.get_diamond_files(files)
1526
if self.key_length == 1:
1530
# introduced full text
1531
origins = files.annotate(prefix + ('origin',))
1533
(prefix + ('origin',), 'origin\n')],
1536
origins = files.annotate(prefix + ('base',))
1538
(prefix + ('base',), 'base\n')],
1541
origins = files.annotate(prefix + ('merged',))
1544
(prefix + ('base',), 'base\n'),
1545
(prefix + ('left',), 'left\n'),
1546
(prefix + ('right',), 'right\n'),
1547
(prefix + ('merged',), 'merged\n')
1551
# Without a graph everything is new.
1553
(prefix + ('merged',), 'base\n'),
1554
(prefix + ('merged',), 'left\n'),
1555
(prefix + ('merged',), 'right\n'),
1556
(prefix + ('merged',), 'merged\n')
1559
self.assertRaises(RevisionNotPresent,
1560
files.annotate, prefix + ('missing-key',))
1562
def test_check_no_parameters(self):
1563
files = self.get_versionedfiles()
1565
def test_check_progressbar_parameter(self):
1566
"""A progress bar can be supplied because check can be a generator."""
1567
pb = ui.ui_factory.nested_progress_bar()
1568
self.addCleanup(pb.finished)
1569
files = self.get_versionedfiles()
1570
files.check(progress_bar=pb)
1572
def test_check_with_keys_becomes_generator(self):
1573
files = self.get_versionedfiles()
1574
self.get_diamond_files(files)
1576
entries = files.check(keys=keys)
1578
# Texts output should be fulltexts.
1579
self.capture_stream(files, entries, seen.add,
1580
files.get_parent_map(keys), require_fulltext=True)
1581
# All texts should be output.
1582
self.assertEqual(set(keys), seen)
1584
def test_clear_cache(self):
1585
files = self.get_versionedfiles()
1588
def test_construct(self):
1589
"""Each parameterised test can be constructed on a transport."""
1590
files = self.get_versionedfiles()
1592
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1594
return get_diamond_files(files, self.key_length,
1595
trailing_eol=trailing_eol, nograph=not self.graph,
1596
left_only=left_only, nokeys=nokeys)
1598
def _add_content_nostoresha(self, add_lines):
1599
"""When nostore_sha is supplied using old content raises."""
1600
vf = self.get_versionedfiles()
1601
empty_text = ('a', [])
1602
sample_text_nl = ('b', ["foo\n", "bar\n"])
1603
sample_text_no_nl = ('c', ["foo\n", "bar"])
1605
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1607
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1610
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1613
# we now have a copy of all the lines in the vf.
1614
for sha, (version, lines) in zip(
1615
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1616
new_key = self.get_simple_key(version + "2")
1617
self.assertRaises(errors.ExistingContent,
1618
vf.add_lines, new_key, [], lines,
1620
self.assertRaises(errors.ExistingContent,
1621
vf._add_text, new_key, [], ''.join(lines),
1623
# and no new version should have been added.
1624
record = vf.get_record_stream([new_key], 'unordered', True).next()
1625
self.assertEqual('absent', record.storage_kind)
1627
def test_add_lines_nostoresha(self):
1628
self._add_content_nostoresha(add_lines=True)
1630
def test__add_text_nostoresha(self):
1631
self._add_content_nostoresha(add_lines=False)
1633
def test_add_lines_return(self):
1634
files = self.get_versionedfiles()
1635
# save code by using the stock data insertion helper.
1636
adds = self.get_diamond_files(files)
1638
# We can only validate the first 2 elements returned from add_lines.
1640
self.assertEqual(3, len(add))
1641
results.append(add[:2])
1642
if self.key_length == 1:
1644
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1645
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1646
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1647
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1648
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1650
elif self.key_length == 2:
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1654
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1655
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1656
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1657
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1658
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1659
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1660
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1661
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1664
def test_add_lines_no_key_generates_chk_key(self):
1665
files = self.get_versionedfiles()
1666
# save code by using the stock data insertion helper.
1667
adds = self.get_diamond_files(files, nokeys=True)
1669
# We can only validate the first 2 elements returned from add_lines.
1671
self.assertEqual(3, len(add))
1672
results.append(add[:2])
1673
if self.key_length == 1:
1675
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1676
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1677
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1678
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1679
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1681
# Check the added items got CHK keys.
1682
self.assertEqual(set([
1683
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1684
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1685
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1686
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1687
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1690
elif self.key_length == 2:
1692
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1694
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1696
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1698
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1701
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1703
# Check the added items got CHK keys.
1704
self.assertEqual(set([
1705
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1706
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1707
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1708
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1709
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1710
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1711
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1712
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1713
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1714
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1718
def test_empty_lines(self):
1719
"""Empty files can be stored."""
1720
f = self.get_versionedfiles()
1721
key_a = self.get_simple_key('a')
1722
f.add_lines(key_a, [], [])
1723
self.assertEqual('',
1724
f.get_record_stream([key_a], 'unordered', True
1725
).next().get_bytes_as('fulltext'))
1726
key_b = self.get_simple_key('b')
1727
f.add_lines(key_b, self.get_parents([key_a]), [])
1728
self.assertEqual('',
1729
f.get_record_stream([key_b], 'unordered', True
1730
).next().get_bytes_as('fulltext'))
1732
def test_newline_only(self):
1733
f = self.get_versionedfiles()
1734
key_a = self.get_simple_key('a')
1735
f.add_lines(key_a, [], ['\n'])
1736
self.assertEqual('\n',
1737
f.get_record_stream([key_a], 'unordered', True
1738
).next().get_bytes_as('fulltext'))
1739
key_b = self.get_simple_key('b')
1740
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1741
self.assertEqual('\n',
1742
f.get_record_stream([key_b], 'unordered', True
1743
).next().get_bytes_as('fulltext'))
1745
def test_get_known_graph_ancestry(self):
1746
f = self.get_versionedfiles()
1748
raise TestNotApplicable('ancestry info only relevant with graph.')
1749
key_a = self.get_simple_key('a')
1750
key_b = self.get_simple_key('b')
1751
key_c = self.get_simple_key('c')
1757
f.add_lines(key_a, [], ['\n'])
1758
f.add_lines(key_b, [key_a], ['\n'])
1759
f.add_lines(key_c, [key_a, key_b], ['\n'])
1760
kg = f.get_known_graph_ancestry([key_c])
1761
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1762
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1764
def test_known_graph_with_fallbacks(self):
1765
f = self.get_versionedfiles('files')
1767
raise TestNotApplicable('ancestry info only relevant with graph.')
1768
if getattr(f, 'add_fallback_versioned_files', None) is None:
1769
raise TestNotApplicable("%s doesn't support fallbacks"
1770
% (f.__class__.__name__,))
1771
key_a = self.get_simple_key('a')
1772
key_b = self.get_simple_key('b')
1773
key_c = self.get_simple_key('c')
1774
# A only in fallback
1779
g = self.get_versionedfiles('fallback')
1780
g.add_lines(key_a, [], ['\n'])
1781
f.add_fallback_versioned_files(g)
1782
f.add_lines(key_b, [key_a], ['\n'])
1783
f.add_lines(key_c, [key_a, key_b], ['\n'])
1784
kg = f.get_known_graph_ancestry([key_c])
1785
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1787
def test_get_record_stream_empty(self):
1788
"""An empty stream can be requested without error."""
1789
f = self.get_versionedfiles()
1790
entries = f.get_record_stream([], 'unordered', False)
1791
self.assertEqual([], list(entries))
1793
def assertValidStorageKind(self, storage_kind):
1794
"""Assert that storage_kind is a valid storage_kind."""
1795
self.assertSubset([storage_kind],
1796
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1797
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1798
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1800
'knit-delta-closure', 'knit-delta-closure-ref',
1801
'groupcompress-block', 'groupcompress-block-ref'])
1803
def capture_stream(self, f, entries, on_seen, parents,
1804
require_fulltext=False):
1805
"""Capture a stream for testing."""
1806
for factory in entries:
1807
on_seen(factory.key)
1808
self.assertValidStorageKind(factory.storage_kind)
1809
if factory.sha1 is not None:
1810
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1812
self.assertEqual(parents[factory.key], factory.parents)
1813
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1815
if require_fulltext:
1816
factory.get_bytes_as('fulltext')
1818
def test_get_record_stream_interface(self):
1819
"""each item in a stream has to provide a regular interface."""
1820
files = self.get_versionedfiles()
1821
self.get_diamond_files(files)
1822
keys, _ = self.get_keys_and_sort_order()
1823
parent_map = files.get_parent_map(keys)
1824
entries = files.get_record_stream(keys, 'unordered', False)
1826
self.capture_stream(files, entries, seen.add, parent_map)
1827
self.assertEqual(set(keys), seen)
1829
def get_keys_and_sort_order(self):
1830
"""Get diamond test keys list, and their sort ordering."""
1831
if self.key_length == 1:
1832
keys = [('merged',), ('left',), ('right',), ('base',)]
1833
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1836
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1838
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1842
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1843
('FileA', 'base'):0,
1844
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1845
('FileB', 'base'):0,
1847
return keys, sort_order
1849
def get_keys_and_groupcompress_sort_order(self):
1850
"""Get diamond test keys list, and their groupcompress sort ordering."""
1851
if self.key_length == 1:
1852
keys = [('merged',), ('left',), ('right',), ('base',)]
1853
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1856
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1858
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1862
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1863
('FileA', 'base'):2,
1864
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1865
('FileB', 'base'):5,
1867
return keys, sort_order
1869
def test_get_record_stream_interface_ordered(self):
1870
"""each item in a stream has to provide a regular interface."""
1871
files = self.get_versionedfiles()
1872
self.get_diamond_files(files)
1873
keys, sort_order = self.get_keys_and_sort_order()
1874
parent_map = files.get_parent_map(keys)
1875
entries = files.get_record_stream(keys, 'topological', False)
1877
self.capture_stream(files, entries, seen.append, parent_map)
1878
self.assertStreamOrder(sort_order, seen, keys)
1880
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1881
"""each item must be accessible as a fulltext."""
1882
files = self.get_versionedfiles()
1883
self.get_diamond_files(files)
1884
keys, sort_order = self.get_keys_and_sort_order()
1885
parent_map = files.get_parent_map(keys)
1886
entries = files.get_record_stream(keys, 'topological', True)
1888
for factory in entries:
1889
seen.append(factory.key)
1890
self.assertValidStorageKind(factory.storage_kind)
1891
self.assertSubset([factory.sha1],
1892
[None, files.get_sha1s([factory.key])[factory.key]])
1893
self.assertEqual(parent_map[factory.key], factory.parents)
1894
# self.assertEqual(files.get_text(factory.key),
1895
ft_bytes = factory.get_bytes_as('fulltext')
1896
self.assertIsInstance(ft_bytes, str)
1897
chunked_bytes = factory.get_bytes_as('chunked')
1898
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1900
self.assertStreamOrder(sort_order, seen, keys)
1902
def test_get_record_stream_interface_groupcompress(self):
1903
"""each item in a stream has to provide a regular interface."""
1904
files = self.get_versionedfiles()
1905
self.get_diamond_files(files)
1906
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1907
parent_map = files.get_parent_map(keys)
1908
entries = files.get_record_stream(keys, 'groupcompress', False)
1910
self.capture_stream(files, entries, seen.append, parent_map)
1911
self.assertStreamOrder(sort_order, seen, keys)
1913
def assertStreamOrder(self, sort_order, seen, keys):
1914
self.assertEqual(len(set(seen)), len(keys))
1915
if self.key_length == 1:
1918
lows = {('FileA',):0, ('FileB',):0}
1920
self.assertEqual(set(keys), set(seen))
1923
sort_pos = sort_order[key]
1924
self.assertTrue(sort_pos >= lows[key[:-1]],
1925
"Out of order in sorted stream: %r, %r" % (key, seen))
1926
lows[key[:-1]] = sort_pos
1928
def test_get_record_stream_unknown_storage_kind_raises(self):
1929
"""Asking for a storage kind that the stream cannot supply raises."""
1930
files = self.get_versionedfiles()
1931
self.get_diamond_files(files)
1932
if self.key_length == 1:
1933
keys = [('merged',), ('left',), ('right',), ('base',)]
1936
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1938
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1941
parent_map = files.get_parent_map(keys)
1942
entries = files.get_record_stream(keys, 'unordered', False)
1943
# We track the contents because we should be able to try, fail a
1944
# particular kind and then ask for one that works and continue.
1946
for factory in entries:
1947
seen.add(factory.key)
1948
self.assertValidStorageKind(factory.storage_kind)
1949
if factory.sha1 is not None:
1950
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1952
self.assertEqual(parent_map[factory.key], factory.parents)
1953
# currently no stream emits mpdiff
1954
self.assertRaises(errors.UnavailableRepresentation,
1955
factory.get_bytes_as, 'mpdiff')
1956
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1958
self.assertEqual(set(keys), seen)
1960
def test_get_record_stream_missing_records_are_absent(self):
1961
files = self.get_versionedfiles()
1962
self.get_diamond_files(files)
1963
if self.key_length == 1:
1964
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1967
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1968
('FileA', 'absent'), ('FileA', 'base'),
1969
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1970
('FileB', 'absent'), ('FileB', 'base'),
1971
('absent', 'absent'),
1973
parent_map = files.get_parent_map(keys)
1974
entries = files.get_record_stream(keys, 'unordered', False)
1975
self.assertAbsentRecord(files, keys, parent_map, entries)
1976
entries = files.get_record_stream(keys, 'topological', False)
1977
self.assertAbsentRecord(files, keys, parent_map, entries)
1979
def assertRecordHasContent(self, record, bytes):
1980
"""Assert that record has the bytes bytes."""
1981
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1982
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1984
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1985
files = self.get_versionedfiles()
1986
key = self.get_simple_key('foo')
1987
files.add_lines(key, (), ['my text\n', 'content'])
1988
stream = files.get_record_stream([key], 'unordered', False)
1989
record = stream.next()
1990
if record.storage_kind in ('chunked', 'fulltext'):
1991
# chunked and fulltext representations are for direct use not wire
1992
# serialisation: check they are able to be used directly. To send
1993
# such records over the wire translation will be needed.
1994
self.assertRecordHasContent(record, "my text\ncontent")
1996
bytes = [record.get_bytes_as(record.storage_kind)]
1997
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1998
source_record = record
2000
for record in network_stream:
2001
records.append(record)
2002
self.assertEqual(source_record.storage_kind,
2003
record.storage_kind)
2004
self.assertEqual(source_record.parents, record.parents)
2006
source_record.get_bytes_as(source_record.storage_kind),
2007
record.get_bytes_as(record.storage_kind))
2008
self.assertEqual(1, len(records))
2010
def assertStreamMetaEqual(self, records, expected, stream):
2011
"""Assert that streams expected and stream have the same records.
2013
:param records: A list to collect the seen records.
2014
:return: A generator of the records in stream.
2016
# We make assertions during copying to catch things early for
2018
for record, ref_record in izip(stream, expected):
2019
records.append(record)
2020
self.assertEqual(ref_record.key, record.key)
2021
self.assertEqual(ref_record.storage_kind, record.storage_kind)
2022
self.assertEqual(ref_record.parents, record.parents)
2025
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2027
"""Convert a stream to a bytes iterator.
2029
:param skipped_records: A list with one element to increment when a
2031
:param full_texts: A dict from key->fulltext representation, for
2032
checking chunked or fulltext stored records.
2033
:param stream: A record_stream.
2034
:return: An iterator over the bytes of each record.
2036
for record in stream:
2037
if record.storage_kind in ('chunked', 'fulltext'):
2038
skipped_records[0] += 1
2039
# check the content is correct for direct use.
2040
self.assertRecordHasContent(record, full_texts[record.key])
2042
yield record.get_bytes_as(record.storage_kind)
2044
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2045
files = self.get_versionedfiles()
2046
target_files = self.get_versionedfiles('target')
2047
key = self.get_simple_key('ft')
2048
key_delta = self.get_simple_key('delta')
2049
files.add_lines(key, (), ['my text\n', 'content'])
2051
delta_parents = (key,)
2054
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2055
local = files.get_record_stream([key, key_delta], 'unordered', False)
2056
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2057
skipped_records = [0]
2059
key: "my text\ncontent",
2060
key_delta: "different\ncontent\n",
2062
byte_stream = self.stream_to_bytes_or_skip_counter(
2063
skipped_records, full_texts, local)
2064
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2066
# insert the stream from the network into a versioned files object so we can
2067
# check the content was carried across correctly without doing delta
2069
target_files.insert_record_stream(
2070
self.assertStreamMetaEqual(records, ref, network_stream))
2071
# No duplicates on the wire thank you!
2072
self.assertEqual(2, len(records) + skipped_records[0])
2074
# if any content was copied it all must have all been.
2075
self.assertIdenticalVersionedFile(files, target_files)
2077
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2078
# copy a delta over the wire
2079
files = self.get_versionedfiles()
2080
target_files = self.get_versionedfiles('target')
2081
key = self.get_simple_key('ft')
2082
key_delta = self.get_simple_key('delta')
2083
files.add_lines(key, (), ['my text\n', 'content'])
2085
delta_parents = (key,)
2088
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2089
# Copy the basis text across so we can reconstruct the delta during
2090
# insertion into target.
2091
target_files.insert_record_stream(files.get_record_stream([key],
2092
'unordered', False))
2093
local = files.get_record_stream([key_delta], 'unordered', False)
2094
ref = files.get_record_stream([key_delta], 'unordered', False)
2095
skipped_records = [0]
2097
key_delta: "different\ncontent\n",
2099
byte_stream = self.stream_to_bytes_or_skip_counter(
2100
skipped_records, full_texts, local)
2101
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2103
# insert the stream from the network into a versioned files object so we can
2104
# check the content was carried across correctly without doing delta
2105
# inspection during check_stream.
2106
target_files.insert_record_stream(
2107
self.assertStreamMetaEqual(records, ref, network_stream))
2108
# No duplicates on the wire thank you!
2109
self.assertEqual(1, len(records) + skipped_records[0])
2111
# if any content was copied it all must have all been
2112
self.assertIdenticalVersionedFile(files, target_files)
2114
def test_get_record_stream_wire_ready_delta_closure_included(self):
2115
# copy a delta over the wire with the ability to get its full text.
2116
files = self.get_versionedfiles()
2117
key = self.get_simple_key('ft')
2118
key_delta = self.get_simple_key('delta')
2119
files.add_lines(key, (), ['my text\n', 'content'])
2121
delta_parents = (key,)
2124
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2125
local = files.get_record_stream([key_delta], 'unordered', True)
2126
ref = files.get_record_stream([key_delta], 'unordered', True)
2127
skipped_records = [0]
2129
key_delta: "different\ncontent\n",
2131
byte_stream = self.stream_to_bytes_or_skip_counter(
2132
skipped_records, full_texts, local)
2133
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2135
# insert the stream from the network into a versioned files object so we can
2136
# check the content was carried across correctly without doing delta
2137
# inspection during check_stream.
2138
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2139
# we have to be able to get the full text out:
2140
self.assertRecordHasContent(record, full_texts[record.key])
2141
# No duplicates on the wire thank you!
2142
self.assertEqual(1, len(records) + skipped_records[0])
2144
def assertAbsentRecord(self, files, keys, parents, entries):
2145
"""Helper for test_get_record_stream_missing_records_are_absent."""
2147
for factory in entries:
2148
seen.add(factory.key)
2149
if factory.key[-1] == 'absent':
2150
self.assertEqual('absent', factory.storage_kind)
2151
self.assertEqual(None, factory.sha1)
2152
self.assertEqual(None, factory.parents)
2154
self.assertValidStorageKind(factory.storage_kind)
2155
if factory.sha1 is not None:
2156
sha1 = files.get_sha1s([factory.key])[factory.key]
2157
self.assertEqual(sha1, factory.sha1)
2158
self.assertEqual(parents[factory.key], factory.parents)
2159
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2161
self.assertEqual(set(keys), seen)
2163
def test_filter_absent_records(self):
2164
"""Requested missing records can be filter trivially."""
2165
files = self.get_versionedfiles()
2166
self.get_diamond_files(files)
2167
keys, _ = self.get_keys_and_sort_order()
2168
parent_map = files.get_parent_map(keys)
2169
# Add an absent record in the middle of the present keys. (We don't ask
2170
# for just absent keys to ensure that content before and after the
2171
# absent keys is still delivered).
2172
present_keys = list(keys)
2173
if self.key_length == 1:
2174
keys.insert(2, ('extra',))
2176
keys.insert(2, ('extra', 'extra'))
2177
entries = files.get_record_stream(keys, 'unordered', False)
2179
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2181
self.assertEqual(set(present_keys), seen)
2183
def get_mapper(self):
2184
"""Get a mapper suitable for the key length of the test interface."""
2185
if self.key_length == 1:
2186
return ConstantMapper('source')
2188
return HashEscapedPrefixMapper()
2190
def get_parents(self, parents):
2191
"""Get parents, taking self.graph into consideration."""
2197
def test_get_annotator(self):
2198
files = self.get_versionedfiles()
2199
self.get_diamond_files(files)
2200
origin_key = self.get_simple_key('origin')
2201
base_key = self.get_simple_key('base')
2202
left_key = self.get_simple_key('left')
2203
right_key = self.get_simple_key('right')
2204
merged_key = self.get_simple_key('merged')
2205
# annotator = files.get_annotator()
2206
# introduced full text
2207
origins, lines = files.get_annotator().annotate(origin_key)
2208
self.assertEqual([(origin_key,)], origins)
2209
self.assertEqual(['origin\n'], lines)
2211
origins, lines = files.get_annotator().annotate(base_key)
2212
self.assertEqual([(base_key,)], origins)
2214
origins, lines = files.get_annotator().annotate(merged_key)
2223
# Without a graph everything is new.
2230
self.assertRaises(RevisionNotPresent,
2231
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2233
def test_get_parent_map(self):
2234
files = self.get_versionedfiles()
2235
if self.key_length == 1:
2237
(('r0',), self.get_parents(())),
2238
(('r1',), self.get_parents((('r0',),))),
2239
(('r2',), self.get_parents(())),
2240
(('r3',), self.get_parents(())),
2241
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2245
(('FileA', 'r0'), self.get_parents(())),
2246
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2247
(('FileA', 'r2'), self.get_parents(())),
2248
(('FileA', 'r3'), self.get_parents(())),
2249
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2250
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2252
for key, parents in parent_details:
2253
files.add_lines(key, parents, [])
2254
# immediately after adding it should be queryable.
2255
self.assertEqual({key:parents}, files.get_parent_map([key]))
2256
# We can ask for an empty set
2257
self.assertEqual({}, files.get_parent_map([]))
2258
# We can ask for many keys
2259
all_parents = dict(parent_details)
2260
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2261
# Absent keys are just not included in the result.
2262
keys = all_parents.keys()
2263
if self.key_length == 1:
2264
keys.insert(1, ('missing',))
2266
keys.insert(1, ('missing', 'missing'))
2267
# Absent keys are just ignored
2268
self.assertEqual(all_parents, files.get_parent_map(keys))
2270
def test_get_sha1s(self):
2271
files = self.get_versionedfiles()
2272
self.get_diamond_files(files)
2273
if self.key_length == 1:
2274
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2276
# ask for shas from different prefixes.
2278
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2279
('FileA', 'merged'), ('FileB', 'right'),
2282
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2283
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2284
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2285
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2286
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2288
files.get_sha1s(keys))
2290
def test_insert_record_stream_empty(self):
2291
"""Inserting an empty record stream should work."""
2292
files = self.get_versionedfiles()
2293
files.insert_record_stream([])
2295
def assertIdenticalVersionedFile(self, expected, actual):
2296
"""Assert that left and right have the same contents."""
2297
self.assertEqual(set(actual.keys()), set(expected.keys()))
2298
actual_parents = actual.get_parent_map(actual.keys())
2300
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2302
for key, parents in actual_parents.items():
2303
self.assertEqual(None, parents)
2304
for key in actual.keys():
2305
actual_text = actual.get_record_stream(
2306
[key], 'unordered', True).next().get_bytes_as('fulltext')
2307
expected_text = expected.get_record_stream(
2308
[key], 'unordered', True).next().get_bytes_as('fulltext')
2309
self.assertEqual(actual_text, expected_text)
2311
def test_insert_record_stream_fulltexts(self):
2312
"""Any file should accept a stream of fulltexts."""
2313
files = self.get_versionedfiles()
2314
mapper = self.get_mapper()
2315
source_transport = self.get_transport('source')
2316
source_transport.mkdir('.')
2317
# weaves always output fulltexts.
2318
source = make_versioned_files_factory(WeaveFile, mapper)(
2320
self.get_diamond_files(source, trailing_eol=False)
2321
stream = source.get_record_stream(source.keys(), 'topological',
2323
files.insert_record_stream(stream)
2324
self.assertIdenticalVersionedFile(source, files)
2326
def test_insert_record_stream_fulltexts_noeol(self):
2327
"""Any file should accept a stream of fulltexts."""
2328
files = self.get_versionedfiles()
2329
mapper = self.get_mapper()
2330
source_transport = self.get_transport('source')
2331
source_transport.mkdir('.')
2332
# weaves always output fulltexts.
2333
source = make_versioned_files_factory(WeaveFile, mapper)(
2335
self.get_diamond_files(source, trailing_eol=False)
2336
stream = source.get_record_stream(source.keys(), 'topological',
2338
files.insert_record_stream(stream)
2339
self.assertIdenticalVersionedFile(source, files)
2341
def test_insert_record_stream_annotated_knits(self):
2342
"""Any file should accept a stream from plain knits."""
2343
files = self.get_versionedfiles()
2344
mapper = self.get_mapper()
2345
source_transport = self.get_transport('source')
2346
source_transport.mkdir('.')
2347
source = make_file_factory(True, mapper)(source_transport)
2348
self.get_diamond_files(source)
2349
stream = source.get_record_stream(source.keys(), 'topological',
2351
files.insert_record_stream(stream)
2352
self.assertIdenticalVersionedFile(source, files)
2354
def test_insert_record_stream_annotated_knits_noeol(self):
2355
"""Any file should accept a stream from plain knits."""
2356
files = self.get_versionedfiles()
2357
mapper = self.get_mapper()
2358
source_transport = self.get_transport('source')
2359
source_transport.mkdir('.')
2360
source = make_file_factory(True, mapper)(source_transport)
2361
self.get_diamond_files(source, trailing_eol=False)
2362
stream = source.get_record_stream(source.keys(), 'topological',
2364
files.insert_record_stream(stream)
2365
self.assertIdenticalVersionedFile(source, files)
2367
def test_insert_record_stream_plain_knits(self):
2368
"""Any file should accept a stream from plain knits."""
2369
files = self.get_versionedfiles()
2370
mapper = self.get_mapper()
2371
source_transport = self.get_transport('source')
2372
source_transport.mkdir('.')
2373
source = make_file_factory(False, mapper)(source_transport)
2374
self.get_diamond_files(source)
2375
stream = source.get_record_stream(source.keys(), 'topological',
2377
files.insert_record_stream(stream)
2378
self.assertIdenticalVersionedFile(source, files)
2380
def test_insert_record_stream_plain_knits_noeol(self):
2381
"""Any file should accept a stream from plain knits."""
2382
files = self.get_versionedfiles()
2383
mapper = self.get_mapper()
2384
source_transport = self.get_transport('source')
2385
source_transport.mkdir('.')
2386
source = make_file_factory(False, mapper)(source_transport)
2387
self.get_diamond_files(source, trailing_eol=False)
2388
stream = source.get_record_stream(source.keys(), 'topological',
2390
files.insert_record_stream(stream)
2391
self.assertIdenticalVersionedFile(source, files)
2393
def test_insert_record_stream_existing_keys(self):
2394
"""Inserting keys already in a file should not error."""
2395
files = self.get_versionedfiles()
2396
source = self.get_versionedfiles('source')
2397
self.get_diamond_files(source)
2398
# insert some keys into f.
2399
self.get_diamond_files(files, left_only=True)
2400
stream = source.get_record_stream(source.keys(), 'topological',
2402
files.insert_record_stream(stream)
2403
self.assertIdenticalVersionedFile(source, files)
2405
def test_insert_record_stream_missing_keys(self):
2406
"""Inserting a stream with absent keys should raise an error."""
2407
files = self.get_versionedfiles()
2408
source = self.get_versionedfiles('source')
2409
stream = source.get_record_stream([('missing',) * self.key_length],
2410
'topological', False)
2411
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2414
def test_insert_record_stream_out_of_order(self):
2415
"""An out of order stream can either error or work."""
2416
files = self.get_versionedfiles()
2417
source = self.get_versionedfiles('source')
2418
self.get_diamond_files(source)
2419
if self.key_length == 1:
2420
origin_keys = [('origin',)]
2421
end_keys = [('merged',), ('left',)]
2422
start_keys = [('right',), ('base',)]
2424
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2425
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2426
('FileB', 'merged',), ('FileB', 'left',)]
2427
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2428
('FileB', 'right',), ('FileB', 'base',)]
2429
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2430
end_entries = source.get_record_stream(end_keys, 'topological', False)
2431
start_entries = source.get_record_stream(start_keys, 'topological', False)
2432
entries = chain(origin_entries, end_entries, start_entries)
2434
files.insert_record_stream(entries)
2435
except RevisionNotPresent:
2436
# Must not have corrupted the file.
2439
self.assertIdenticalVersionedFile(source, files)
2441
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2442
"""An out of order stream can either error or work."""
2444
raise TestNotApplicable('ancestry info only relevant with graph.')
2445
# Create a reasonably long chain of records based on each other, where
2446
# most will be deltas.
2447
source = self.get_versionedfiles('source')
2450
content = [('same same %d\n' % n) for n in range(500)]
2451
for letter in 'abcdefghijklmnopqrstuvwxyz':
2452
key = ('key-' + letter,)
2453
if self.key_length == 2:
2454
key = ('prefix',) + key
2455
content.append('content for ' + letter + '\n')
2456
source.add_lines(key, parents, content)
2459
# Create a stream of these records, excluding the first record that the
2460
# rest ultimately depend upon, and insert it into a new vf.
2462
for key in reversed(keys):
2463
streams.append(source.get_record_stream([key], 'unordered', False))
2464
deltas = chain(*streams[:-1])
2465
files = self.get_versionedfiles()
2467
files.insert_record_stream(deltas)
2468
except RevisionNotPresent:
2469
# Must not have corrupted the file.
2472
# Must only report either just the first key as a missing parent,
2473
# no key as missing (for nodelta scenarios).
2474
missing = set(files.get_missing_compression_parent_keys())
2475
missing.discard(keys[0])
2476
self.assertEqual(set(), missing)
2478
def get_knit_delta_source(self):
2479
"""Get a source that can produce a stream with knit delta records,
2480
regardless of this test's scenario.
2482
mapper = self.get_mapper()
2483
source_transport = self.get_transport('source')
2484
source_transport.mkdir('.')
2485
source = make_file_factory(False, mapper)(source_transport)
2486
get_diamond_files(source, self.key_length, trailing_eol=True,
2487
nograph=False, left_only=False)
2490
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2491
"""Insertion where a needed basis is not included notifies the caller
2492
of the missing basis. In the meantime a record missing its basis is
2495
source = self.get_knit_delta_source()
2496
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2497
entries = source.get_record_stream(keys, 'unordered', False)
2498
files = self.get_versionedfiles()
2499
if self.support_partial_insertion:
2500
self.assertEqual([],
2501
list(files.get_missing_compression_parent_keys()))
2502
files.insert_record_stream(entries)
2503
missing_bases = files.get_missing_compression_parent_keys()
2504
self.assertEqual(set([self.get_simple_key('left')]),
2506
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2509
errors.RevisionNotPresent, files.insert_record_stream, entries)
2512
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2513
"""Insertion where a needed basis is not included notifies the caller
2514
of the missing basis. That basis can be added in a second
2515
insert_record_stream call that does not need to repeat records present
2516
in the previous stream. The record(s) that required that basis are
2517
fully inserted once their basis is no longer missing.
2519
if not self.support_partial_insertion:
2520
raise TestNotApplicable(
2521
'versioned file scenario does not support partial insertion')
2522
source = self.get_knit_delta_source()
2523
entries = source.get_record_stream([self.get_simple_key('origin'),
2524
self.get_simple_key('merged')], 'unordered', False)
2525
files = self.get_versionedfiles()
2526
files.insert_record_stream(entries)
2527
missing_bases = files.get_missing_compression_parent_keys()
2528
self.assertEqual(set([self.get_simple_key('left')]),
2530
# 'merged' is inserted (although a commit of a write group involving
2531
# this versionedfiles would fail).
2532
merged_key = self.get_simple_key('merged')
2534
[merged_key], files.get_parent_map([merged_key]).keys())
2535
# Add the full delta closure of the missing records
2536
missing_entries = source.get_record_stream(
2537
missing_bases, 'unordered', True)
2538
files.insert_record_stream(missing_entries)
2539
# Now 'merged' is fully inserted (and a commit would succeed).
2540
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2542
[merged_key], files.get_parent_map([merged_key]).keys())
2545
def test_iter_lines_added_or_present_in_keys(self):
2546
# test that we get at least an equalset of the lines added by
2547
# versions in the store.
2548
# the ordering here is to make a tree so that dumb searches have
2549
# more changes to muck up.
2551
class InstrumentedProgress(progress.DummyProgress):
2555
progress.DummyProgress.__init__(self)
2558
def update(self, msg=None, current=None, total=None):
2559
self.updates.append((msg, current, total))
2561
files = self.get_versionedfiles()
2562
# add a base to get included
2563
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2564
# add a ancestor to be included on one side
2565
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2566
# add a ancestor to be included on the other side
2567
files.add_lines(self.get_simple_key('rancestor'),
2568
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2569
# add a child of rancestor with no eofile-nl
2570
files.add_lines(self.get_simple_key('child'),
2571
self.get_parents([self.get_simple_key('rancestor')]),
2572
['base\n', 'child\n'])
2573
# add a child of lancestor and base to join the two roots
2574
files.add_lines(self.get_simple_key('otherchild'),
2575
self.get_parents([self.get_simple_key('lancestor'),
2576
self.get_simple_key('base')]),
2577
['base\n', 'lancestor\n', 'otherchild\n'])
2578
def iter_with_keys(keys, expected):
2579
# now we need to see what lines are returned, and how often.
2581
progress = InstrumentedProgress()
2582
# iterate over the lines
2583
for line in files.iter_lines_added_or_present_in_keys(keys,
2585
lines.setdefault(line, 0)
2587
if []!= progress.updates:
2588
self.assertEqual(expected, progress.updates)
2590
lines = iter_with_keys(
2591
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2592
[('Walking content', 0, 2),
2593
('Walking content', 1, 2),
2594
('Walking content', 2, 2)])
2595
# we must see child and otherchild
2596
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2598
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2599
# we dont care if we got more than that.
2602
lines = iter_with_keys(files.keys(),
2603
[('Walking content', 0, 5),
2604
('Walking content', 1, 5),
2605
('Walking content', 2, 5),
2606
('Walking content', 3, 5),
2607
('Walking content', 4, 5),
2608
('Walking content', 5, 5)])
2609
# all lines must be seen at least once
2610
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2612
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2614
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2615
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2617
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2619
def test_make_mpdiffs(self):
2620
from bzrlib import multiparent
2621
files = self.get_versionedfiles('source')
2622
# add texts that should trip the knit maximum delta chain threshold
2623
# as well as doing parallel chains of data in knits.
2624
# this is done by two chains of 25 insertions
2625
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2626
files.add_lines(self.get_simple_key('noeol'),
2627
self.get_parents([self.get_simple_key('base')]), ['line'])
2628
# detailed eol tests:
2629
# shared last line with parent no-eol
2630
files.add_lines(self.get_simple_key('noeolsecond'),
2631
self.get_parents([self.get_simple_key('noeol')]),
2633
# differing last line with parent, both no-eol
2634
files.add_lines(self.get_simple_key('noeolnotshared'),
2635
self.get_parents([self.get_simple_key('noeolsecond')]),
2636
['line\n', 'phone'])
2637
# add eol following a noneol parent, change content
2638
files.add_lines(self.get_simple_key('eol'),
2639
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2640
# add eol following a noneol parent, no change content
2641
files.add_lines(self.get_simple_key('eolline'),
2642
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2643
# noeol with no parents:
2644
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2645
# noeol preceeding its leftmost parent in the output:
2646
# this is done by making it a merge of two parents with no common
2647
# anestry: noeolbase and noeol with the
2648
# later-inserted parent the leftmost.
2649
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2650
self.get_parents([self.get_simple_key('noeolbase'),
2651
self.get_simple_key('noeol')]),
2653
# two identical eol texts
2654
files.add_lines(self.get_simple_key('noeoldup'),
2655
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2656
next_parent = self.get_simple_key('base')
2657
text_name = 'chain1-'
2659
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2660
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2661
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2662
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2663
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2664
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2665
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2666
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2667
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2668
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2669
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2670
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2671
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2672
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2673
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2674
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2675
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2676
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2677
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2678
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2679
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2680
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2681
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2682
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2683
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2684
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2686
for depth in range(26):
2687
new_version = self.get_simple_key(text_name + '%s' % depth)
2688
text = text + ['line\n']
2689
files.add_lines(new_version, self.get_parents([next_parent]), text)
2690
next_parent = new_version
2691
next_parent = self.get_simple_key('base')
2692
text_name = 'chain2-'
2694
for depth in range(26):
2695
new_version = self.get_simple_key(text_name + '%s' % depth)
2696
text = text + ['line\n']
2697
files.add_lines(new_version, self.get_parents([next_parent]), text)
2698
next_parent = new_version
2699
target = self.get_versionedfiles('target')
2700
for key in multiparent.topo_iter_keys(files, files.keys()):
2701
mpdiff = files.make_mpdiffs([key])[0]
2702
parents = files.get_parent_map([key])[key] or []
2704
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2705
self.assertEqualDiff(
2706
files.get_record_stream([key], 'unordered',
2707
True).next().get_bytes_as('fulltext'),
2708
target.get_record_stream([key], 'unordered',
2709
True).next().get_bytes_as('fulltext')
2712
def test_keys(self):
2713
# While use is discouraged, versions() is still needed by aspects of
2715
files = self.get_versionedfiles()
2716
self.assertEqual(set(), set(files.keys()))
2717
if self.key_length == 1:
2720
key = ('foo', 'bar',)
2721
files.add_lines(key, (), [])
2722
self.assertEqual(set([key]), set(files.keys()))
2725
class VirtualVersionedFilesTests(TestCase):
2726
"""Basic tests for the VirtualVersionedFiles implementations."""
2728
def _get_parent_map(self, keys):
2731
if k in self._parent_map:
2732
ret[k] = self._parent_map[k]
2736
TestCase.setUp(self)
2738
self._parent_map = {}
2739
self.texts = VirtualVersionedFiles(self._get_parent_map,
2742
def test_add_lines(self):
2743
self.assertRaises(NotImplementedError,
2744
self.texts.add_lines, "foo", [], [])
2746
def test_add_mpdiffs(self):
2747
self.assertRaises(NotImplementedError,
2748
self.texts.add_mpdiffs, [])
2750
def test_check_noerrors(self):
2753
def test_insert_record_stream(self):
2754
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2757
def test_get_sha1s_nonexistent(self):
2758
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2760
def test_get_sha1s(self):
2761
self._lines["key"] = ["dataline1", "dataline2"]
2762
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2763
self.texts.get_sha1s([("key",)]))
2765
def test_get_parent_map(self):
2766
self._parent_map = {"G": ("A", "B")}
2767
self.assertEquals({("G",): (("A",),("B",))},
2768
self.texts.get_parent_map([("G",), ("L",)]))
2770
def test_get_record_stream(self):
2771
self._lines["A"] = ["FOO", "BAR"]
2772
it = self.texts.get_record_stream([("A",)], "unordered", True)
2774
self.assertEquals("chunked", record.storage_kind)
2775
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2776
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2778
def test_get_record_stream_absent(self):
2779
it = self.texts.get_record_stream([("A",)], "unordered", True)
2781
self.assertEquals("absent", record.storage_kind)
2783
def test_iter_lines_added_or_present_in_keys(self):
2784
self._lines["A"] = ["FOO", "BAR"]
2785
self._lines["B"] = ["HEY"]
2786
self._lines["C"] = ["Alberta"]
2787
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2788
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2792
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2794
def get_ordering_vf(self, key_priority):
2795
builder = self.make_branch_builder('test')
2796
builder.start_series()
2797
builder.build_snapshot('A', None, [
2798
('add', ('', 'TREE_ROOT', 'directory', None))])
2799
builder.build_snapshot('B', ['A'], [])
2800
builder.build_snapshot('C', ['B'], [])
2801
builder.build_snapshot('D', ['C'], [])
2802
builder.finish_series()
2803
b = builder.get_branch()
2805
self.addCleanup(b.unlock)
2806
vf = b.repository.inventories
2807
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2809
def test_get_empty(self):
2810
vf = self.get_ordering_vf({})
2811
self.assertEqual([], vf.calls)
2813
def test_get_record_stream_topological(self):
2814
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2815
request_keys = [('B',), ('C',), ('D',), ('A',)]
2816
keys = [r.key for r in vf.get_record_stream(request_keys,
2817
'topological', False)]
2818
# We should have gotten the keys in topological order
2819
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2820
# And recorded that the request was made
2821
self.assertEqual([('get_record_stream', request_keys, 'topological',
2824
def test_get_record_stream_ordered(self):
2825
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2826
request_keys = [('B',), ('C',), ('D',), ('A',)]
2827
keys = [r.key for r in vf.get_record_stream(request_keys,
2828
'unordered', False)]
2829
# They should be returned based on their priority
2830
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2831
# And the request recorded
2832
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2835
def test_get_record_stream_implicit_order(self):
2836
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2837
request_keys = [('B',), ('C',), ('D',), ('A',)]
2838
keys = [r.key for r in vf.get_record_stream(request_keys,
2839
'unordered', False)]
2840
# A and C are not in the map, so they get sorted to the front. A comes
2841
# before C alphabetically, so it comes back first
2842
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2843
# And the request recorded
2844
self.assertEqual([('get_record_stream', request_keys, 'unordered',