371
137
except NotImplementedError:
374
def test_add_reserved(self):
376
self.assertRaises(errors.ReservedId,
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
379
def test_add_lines_nostoresha(self):
380
"""When nostore_sha is supplied using old content raises."""
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
386
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
sha, _, _ = vf.add_lines(version, [], lines)
389
# we now have a copy of all the lines in the vf.
390
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
self.assertRaises(errors.ExistingContent,
393
vf.add_lines, version + "2", [], lines,
395
# and no new version should have been added.
396
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
def test_add_lines_with_ghosts_nostoresha(self):
400
"""When nostore_sha is supplied using old content raises."""
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
406
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
sha, _, _ = vf.add_lines(version, [], lines)
409
# we now have a copy of all the lines in the vf.
410
# is the test applicable to this vf implementation?
412
vf.add_lines_with_ghosts('d', [], [])
413
except NotImplementedError:
414
raise TestSkipped("add_lines_with_ghosts is optional")
415
for sha, (version, lines) in zip(
416
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
417
self.assertRaises(errors.ExistingContent,
418
vf.add_lines_with_ghosts, version + "2", [], lines,
420
# and no new version should have been added.
421
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
424
def test_add_lines_return_value(self):
425
# add_lines should return the sha1 and the text size.
427
empty_text = ('a', [])
428
sample_text_nl = ('b', ["foo\n", "bar\n"])
429
sample_text_no_nl = ('c', ["foo\n", "bar"])
430
# check results for the three cases:
431
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
432
# the first two elements are the same for all versioned files:
433
# - the digest and the size of the text. For some versioned files
434
# additional data is returned in additional tuple elements.
435
result = vf.add_lines(version, [], lines)
436
self.assertEqual(3, len(result))
437
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
439
# parents should not affect the result:
440
lines = sample_text_nl[1]
441
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
442
vf.add_lines('d', ['b', 'c'], lines)[0:2])
444
def test_get_reserved(self):
446
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
447
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
448
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
450
def test_add_unchanged_last_line_noeol_snapshot(self):
451
"""Add a text with an unchanged last line with no eol should work."""
452
# Test adding this in a number of chain lengths; because the interface
453
# for VersionedFile does not allow forcing a specific chain length, we
454
# just use a small base to get the first snapshot, then a much longer
455
# first line for the next add (which will make the third add snapshot)
456
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
457
# as a capped delta length, but ideally we would have some way of
458
# tuning the test to the store (e.g. keep going until a snapshot
460
for length in range(20):
462
vf = self.get_file('case-%d' % length)
465
for step in range(length):
466
version = prefix % step
467
lines = (['prelude \n'] * step) + ['line']
468
vf.add_lines(version, parents, lines)
469
version_lines[version] = lines
471
vf.add_lines('no-eol', parents, ['line'])
472
vf.get_texts(version_lines.keys())
473
self.assertEqualDiff('line', vf.get_text('no-eol'))
475
def test_get_texts_eol_variation(self):
476
# similar to the failure in <http://bugs.launchpad.net/234748>
478
sample_text_nl = ["line\n"]
479
sample_text_no_nl = ["line"]
486
lines = sample_text_nl
488
lines = sample_text_no_nl
489
# left_matching blocks is an internal api; it operates on the
490
# *internal* representation for a knit, which is with *all* lines
491
# being normalised to end with \n - even the final line in a no_nl
492
# file. Using it here ensures that a broken internal implementation
493
# (which is what this test tests) will generate a correct line
494
# delta (which is to say, an empty delta).
495
vf.add_lines(version, parents, lines,
496
left_matching_blocks=[(0, 0, 1)])
498
versions.append(version)
499
version_lines[version] = lines
501
vf.get_texts(versions)
502
vf.get_texts(reversed(versions))
504
def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
"""Add a text with an unchanged last line with no eol should work."""
506
from bzrlib import multiparent
507
# Hand verified sha1 of the text we're adding.
508
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
# Create a mpdiff which adds a new line before the trailing line, and
510
# reuse the last line unaltered (which can cause annotation reuse).
511
# Test adding this in two situations:
512
# On top of a new insertion
513
vf = self.get_file('fulltext')
514
vf.add_lines('noeol', [], ['line'])
515
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
516
left_matching_blocks=[(0, 1, 1)])
517
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
519
vf = self.get_file('delta')
520
vf.add_lines('base', [], ['line'])
521
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
522
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
523
left_matching_blocks=[(1, 1, 1)])
524
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
526
def test_make_mpdiffs(self):
527
from bzrlib import multiparent
528
vf = self.get_file('foo')
529
sha1s = self._setup_for_deltas(vf)
530
new_vf = self.get_file('bar')
531
for version in multiparent.topo_iter(vf):
532
mpdiff = vf.make_mpdiffs([version])[0]
533
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
534
vf.get_sha1s([version])[version], mpdiff)])
535
self.assertEqualDiff(vf.get_text(version),
536
new_vf.get_text(version))
538
def test_make_mpdiffs_with_ghosts(self):
539
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
542
except NotImplementedError:
543
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
140
def test_get_delta(self):
142
sha1s = self._setup_for_deltas(f)
143
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
144
[(0, 0, 1, [('base', 'line\n')])])
145
self.assertEqual(expected_delta, f.get_delta('base'))
147
text_name = 'chain1-'
148
for depth in range(26):
149
new_version = text_name + '%s' % depth
150
expected_delta = (next_parent, sha1s[depth],
152
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
153
self.assertEqual(expected_delta, f.get_delta(new_version))
154
next_parent = new_version
156
text_name = 'chain2-'
157
for depth in range(26):
158
new_version = text_name + '%s' % depth
159
expected_delta = (next_parent, sha1s[depth], False,
160
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
161
self.assertEqual(expected_delta, f.get_delta(new_version))
162
next_parent = new_version
163
# smoke test for eol support
164
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
165
self.assertEqual(['line'], f.get_lines('noeol'))
166
self.assertEqual(expected_delta, f.get_delta('noeol'))
168
def test_get_deltas(self):
170
sha1s = self._setup_for_deltas(f)
171
deltas = f.get_deltas(f.versions())
172
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
173
[(0, 0, 1, [('base', 'line\n')])])
174
self.assertEqual(expected_delta, deltas['base'])
176
text_name = 'chain1-'
177
for depth in range(26):
178
new_version = text_name + '%s' % depth
179
expected_delta = (next_parent, sha1s[depth],
181
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
182
self.assertEqual(expected_delta, deltas[new_version])
183
next_parent = new_version
185
text_name = 'chain2-'
186
for depth in range(26):
187
new_version = text_name + '%s' % depth
188
expected_delta = (next_parent, sha1s[depth], False,
189
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
190
self.assertEqual(expected_delta, deltas[new_version])
191
next_parent = new_version
192
# smoke tests for eol support
193
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
194
self.assertEqual(['line'], f.get_lines('noeol'))
195
self.assertEqual(expected_delta, deltas['noeol'])
196
# smoke tests for eol support - two noeol in a row same content
197
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
198
[(0, 1, 2, [(u'noeolsecond', 'line\n'), (u'noeolsecond', 'line\n')])]),
199
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
200
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
201
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
202
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
203
# two no-eol in a row, different content
204
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
205
[(1, 2, 1, [(u'noeolnotshared', 'phone\n')])])
206
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
207
self.assertEqual(expected_delta, deltas['noeolnotshared'])
208
# eol folling a no-eol with content change
209
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
210
[(0, 1, 1, [(u'eol', 'phone\n')])])
211
self.assertEqual(['phone\n'], f.get_lines('eol'))
212
self.assertEqual(expected_delta, deltas['eol'])
213
# eol folling a no-eol with content change
214
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
215
[(0, 1, 1, [(u'eolline', 'line\n')])])
216
self.assertEqual(['line\n'], f.get_lines('eolline'))
217
self.assertEqual(expected_delta, deltas['eolline'])
218
# eol with no parents
219
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
220
[(0, 0, 1, [(u'noeolbase', 'line\n')])])
221
self.assertEqual(['line'], f.get_lines('noeolbase'))
222
self.assertEqual(expected_delta, deltas['noeolbase'])
223
# eol with two parents, in inverse insertion order
224
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
225
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]),
226
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
227
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]))
228
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
229
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
547
231
def _setup_for_deltas(self, f):
548
self.assertFalse(f.has_version('base'))
232
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
549
233
# add texts that should trip the knit maximum delta chain threshold
550
234
# as well as doing parallel chains of data in knits.
551
235
# this is done by two chains of 25 insertions
1273
1162
write_weave(w, tmpf)
1274
1163
self.log(tmpf.getvalue())
1276
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1165
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1277
1166
'xxx', '>>>>>>> ', 'bbb']
1280
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1282
def test_select_adaptor(self):
1283
"""Test expected adapters exist."""
1284
# One scenario for each lookup combination we expect to use.
1285
# Each is source_kind, requested_kind, adapter class
1287
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1288
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1289
('knit-annotated-delta-gz', 'knit-delta-gz',
1290
_mod_knit.DeltaAnnotatedToUnannotated),
1291
('knit-annotated-delta-gz', 'fulltext',
1292
_mod_knit.DeltaAnnotatedToFullText),
1293
('knit-annotated-ft-gz', 'knit-ft-gz',
1294
_mod_knit.FTAnnotatedToUnannotated),
1295
('knit-annotated-ft-gz', 'fulltext',
1296
_mod_knit.FTAnnotatedToFullText),
1298
for source, requested, klass in scenarios:
1299
adapter_factory = versionedfile.adapter_registry.get(
1300
(source, requested))
1301
adapter = adapter_factory(None)
1302
self.assertIsInstance(adapter, klass)
1304
def get_knit(self, annotated=True):
1305
mapper = ConstantMapper('knit')
1306
transport = self.get_transport()
1307
return make_file_factory(annotated, mapper)(transport)
1309
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1310
"""Grab the interested adapted texts for tests."""
1311
# origin is a fulltext
1312
entries = f.get_record_stream([('origin',)], 'unordered', False)
1313
base = entries.next()
1314
ft_data = ft_adapter.get_bytes(base)
1315
# merged is both a delta and multiple parents.
1316
entries = f.get_record_stream([('merged',)], 'unordered', False)
1317
merged = entries.next()
1318
delta_data = delta_adapter.get_bytes(merged)
1319
return ft_data, delta_data
1321
def test_deannotation_noeol(self):
1322
"""Test converting annotated knits to unannotated knits."""
1323
# we need a full text, and a delta
1325
get_diamond_files(f, 1, trailing_eol=False)
1326
ft_data, delta_data = self.helpGetBytes(f,
1327
_mod_knit.FTAnnotatedToUnannotated(None),
1328
_mod_knit.DeltaAnnotatedToUnannotated(None))
1330
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1333
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1335
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1336
'1,2,3\nleft\nright\nmerged\nend merged\n',
1337
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1339
def test_deannotation(self):
1340
"""Test converting annotated knits to unannotated knits."""
1341
# we need a full text, and a delta
1343
get_diamond_files(f, 1)
1344
ft_data, delta_data = self.helpGetBytes(f,
1345
_mod_knit.FTAnnotatedToUnannotated(None),
1346
_mod_knit.DeltaAnnotatedToUnannotated(None))
1348
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1351
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1353
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1354
'2,2,2\nright\nmerged\nend merged\n',
1355
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1357
def test_annotated_to_fulltext_no_eol(self):
1358
"""Test adapting annotated knits to full texts (for -> weaves)."""
1359
# we need a full text, and a delta
1361
get_diamond_files(f, 1, trailing_eol=False)
1362
# Reconstructing a full text requires a backing versioned file, and it
1363
# must have the base lines requested from it.
1364
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1365
ft_data, delta_data = self.helpGetBytes(f,
1366
_mod_knit.FTAnnotatedToFullText(None),
1367
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1368
self.assertEqual('origin', ft_data)
1369
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1370
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1371
True)], logged_vf.calls)
1373
def test_annotated_to_fulltext(self):
1374
"""Test adapting annotated knits to full texts (for -> weaves)."""
1375
# we need a full text, and a delta
1377
get_diamond_files(f, 1)
1378
# Reconstructing a full text requires a backing versioned file, and it
1379
# must have the base lines requested from it.
1380
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1381
ft_data, delta_data = self.helpGetBytes(f,
1382
_mod_knit.FTAnnotatedToFullText(None),
1383
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1384
self.assertEqual('origin\n', ft_data)
1385
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1386
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1387
True)], logged_vf.calls)
1389
def test_unannotated_to_fulltext(self):
1390
"""Test adapting unannotated knits to full texts.
1392
This is used for -> weaves, and for -> annotated knits.
1394
# we need a full text, and a delta
1395
f = self.get_knit(annotated=False)
1396
get_diamond_files(f, 1)
1397
# Reconstructing a full text requires a backing versioned file, and it
1398
# must have the base lines requested from it.
1399
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1400
ft_data, delta_data = self.helpGetBytes(f,
1401
_mod_knit.FTPlainToFullText(None),
1402
_mod_knit.DeltaPlainToFullText(logged_vf))
1403
self.assertEqual('origin\n', ft_data)
1404
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1405
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1406
True)], logged_vf.calls)
1408
def test_unannotated_to_fulltext_no_eol(self):
1409
"""Test adapting unannotated knits to full texts.
1411
This is used for -> weaves, and for -> annotated knits.
1413
# we need a full text, and a delta
1414
f = self.get_knit(annotated=False)
1415
get_diamond_files(f, 1, trailing_eol=False)
1416
# Reconstructing a full text requires a backing versioned file, and it
1417
# must have the base lines requested from it.
1418
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1419
ft_data, delta_data = self.helpGetBytes(f,
1420
_mod_knit.FTPlainToFullText(None),
1421
_mod_knit.DeltaPlainToFullText(logged_vf))
1422
self.assertEqual('origin', ft_data)
1423
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1424
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1425
True)], logged_vf.calls)
1428
class TestKeyMapper(TestCaseWithMemoryTransport):
1429
"""Tests for various key mapping logic."""
1431
def test_identity_mapper(self):
1432
mapper = versionedfile.ConstantMapper("inventory")
1433
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1434
self.assertEqual("inventory", mapper.map(('quux',)))
1436
def test_prefix_mapper(self):
1438
mapper = versionedfile.PrefixMapper()
1439
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1440
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1441
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1442
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1444
def test_hash_prefix_mapper(self):
1445
#format6: hash + plain
1446
mapper = versionedfile.HashPrefixMapper()
1447
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1448
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1449
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1450
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1452
def test_hash_escaped_mapper(self):
1453
#knit1: hash + escaped
1454
mapper = versionedfile.HashEscapedPrefixMapper()
1455
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1456
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1458
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1460
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1461
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1464
class TestVersionedFiles(TestCaseWithMemoryTransport):
1465
"""Tests for the multiple-file variant of VersionedFile."""
1467
def get_versionedfiles(self, relpath='files'):
1468
transport = self.get_transport(relpath)
1470
transport.mkdir('.')
1471
files = self.factory(transport)
1472
if self.cleanup is not None:
1473
self.addCleanup(lambda:self.cleanup(files))
1476
def get_simple_key(self, suffix):
1477
"""Return a key for the object under test."""
1478
if self.key_length == 1:
1481
return ('FileA',) + (suffix,)
1483
def test_add_lines(self):
1484
f = self.get_versionedfiles()
1485
key0 = self.get_simple_key('r0')
1486
key1 = self.get_simple_key('r1')
1487
key2 = self.get_simple_key('r2')
1488
keyf = self.get_simple_key('foo')
1489
f.add_lines(key0, [], ['a\n', 'b\n'])
1491
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1493
f.add_lines(key1, [], ['b\n', 'c\n'])
1495
self.assertTrue(key0 in keys)
1496
self.assertTrue(key1 in keys)
1498
for record in f.get_record_stream([key0, key1], 'unordered', True):
1499
records.append((record.key, record.get_bytes_as('fulltext')))
1501
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1503
def test__add_text(self):
1504
f = self.get_versionedfiles()
1505
key0 = self.get_simple_key('r0')
1506
key1 = self.get_simple_key('r1')
1507
key2 = self.get_simple_key('r2')
1508
keyf = self.get_simple_key('foo')
1509
f._add_text(key0, [], 'a\nb\n')
1511
f._add_text(key1, [key0], 'b\nc\n')
1513
f._add_text(key1, [], 'b\nc\n')
1515
self.assertTrue(key0 in keys)
1516
self.assertTrue(key1 in keys)
1518
for record in f.get_record_stream([key0, key1], 'unordered', True):
1519
records.append((record.key, record.get_bytes_as('fulltext')))
1521
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1523
def test_annotate(self):
1524
files = self.get_versionedfiles()
1525
self.get_diamond_files(files)
1526
if self.key_length == 1:
1530
# introduced full text
1531
origins = files.annotate(prefix + ('origin',))
1533
(prefix + ('origin',), 'origin\n')],
1536
origins = files.annotate(prefix + ('base',))
1538
(prefix + ('base',), 'base\n')],
1541
origins = files.annotate(prefix + ('merged',))
1544
(prefix + ('base',), 'base\n'),
1545
(prefix + ('left',), 'left\n'),
1546
(prefix + ('right',), 'right\n'),
1547
(prefix + ('merged',), 'merged\n')
1551
# Without a graph everything is new.
1553
(prefix + ('merged',), 'base\n'),
1554
(prefix + ('merged',), 'left\n'),
1555
(prefix + ('merged',), 'right\n'),
1556
(prefix + ('merged',), 'merged\n')
1559
self.assertRaises(RevisionNotPresent,
1560
files.annotate, prefix + ('missing-key',))
1562
def test_check_no_parameters(self):
1563
files = self.get_versionedfiles()
1565
def test_check_progressbar_parameter(self):
1566
"""A progress bar can be supplied because check can be a generator."""
1567
pb = ui.ui_factory.nested_progress_bar()
1568
self.addCleanup(pb.finished)
1569
files = self.get_versionedfiles()
1570
files.check(progress_bar=pb)
1572
def test_check_with_keys_becomes_generator(self):
1573
files = self.get_versionedfiles()
1574
self.get_diamond_files(files)
1576
entries = files.check(keys=keys)
1578
# Texts output should be fulltexts.
1579
self.capture_stream(files, entries, seen.add,
1580
files.get_parent_map(keys), require_fulltext=True)
1581
# All texts should be output.
1582
self.assertEqual(set(keys), seen)
1584
def test_construct(self):
1585
"""Each parameterised test can be constructed on a transport."""
1586
files = self.get_versionedfiles()
1588
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1590
return get_diamond_files(files, self.key_length,
1591
trailing_eol=trailing_eol, nograph=not self.graph,
1592
left_only=left_only, nokeys=nokeys)
1594
def _add_content_nostoresha(self, add_lines):
1595
"""When nostore_sha is supplied using old content raises."""
1596
vf = self.get_versionedfiles()
1597
empty_text = ('a', [])
1598
sample_text_nl = ('b', ["foo\n", "bar\n"])
1599
sample_text_no_nl = ('c', ["foo\n", "bar"])
1601
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1603
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1606
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1609
# we now have a copy of all the lines in the vf.
1610
for sha, (version, lines) in zip(
1611
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1612
new_key = self.get_simple_key(version + "2")
1613
self.assertRaises(errors.ExistingContent,
1614
vf.add_lines, new_key, [], lines,
1616
self.assertRaises(errors.ExistingContent,
1617
vf._add_text, new_key, [], ''.join(lines),
1619
# and no new version should have been added.
1620
record = vf.get_record_stream([new_key], 'unordered', True).next()
1621
self.assertEqual('absent', record.storage_kind)
1623
def test_add_lines_nostoresha(self):
1624
self._add_content_nostoresha(add_lines=True)
1626
def test__add_text_nostoresha(self):
1627
self._add_content_nostoresha(add_lines=False)
1629
def test_add_lines_return(self):
1630
files = self.get_versionedfiles()
1631
# save code by using the stock data insertion helper.
1632
adds = self.get_diamond_files(files)
1634
# We can only validate the first 2 elements returned from add_lines.
1636
self.assertEqual(3, len(add))
1637
results.append(add[:2])
1638
if self.key_length == 1:
1640
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1641
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1642
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1643
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1644
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1646
elif self.key_length == 2:
1648
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1649
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1650
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1651
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1652
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1653
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1654
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1655
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1656
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1657
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1660
def test_add_lines_no_key_generates_chk_key(self):
1661
files = self.get_versionedfiles()
1662
# save code by using the stock data insertion helper.
1663
adds = self.get_diamond_files(files, nokeys=True)
1665
# We can only validate the first 2 elements returned from add_lines.
1667
self.assertEqual(3, len(add))
1668
results.append(add[:2])
1669
if self.key_length == 1:
1671
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1672
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1673
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1674
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1675
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1677
# Check the added items got CHK keys.
1678
self.assertEqual(set([
1679
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1680
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1681
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1682
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1683
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1686
elif self.key_length == 2:
1688
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1690
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1691
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1692
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1693
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1694
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1695
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1696
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1697
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1699
# Check the added items got CHK keys.
1700
self.assertEqual(set([
1701
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1702
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1703
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1704
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1705
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1706
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1707
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1708
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1709
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1710
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1714
def test_empty_lines(self):
1715
"""Empty files can be stored."""
1716
f = self.get_versionedfiles()
1717
key_a = self.get_simple_key('a')
1718
f.add_lines(key_a, [], [])
1719
self.assertEqual('',
1720
f.get_record_stream([key_a], 'unordered', True
1721
).next().get_bytes_as('fulltext'))
1722
key_b = self.get_simple_key('b')
1723
f.add_lines(key_b, self.get_parents([key_a]), [])
1724
self.assertEqual('',
1725
f.get_record_stream([key_b], 'unordered', True
1726
).next().get_bytes_as('fulltext'))
1728
def test_newline_only(self):
1729
f = self.get_versionedfiles()
1730
key_a = self.get_simple_key('a')
1731
f.add_lines(key_a, [], ['\n'])
1732
self.assertEqual('\n',
1733
f.get_record_stream([key_a], 'unordered', True
1734
).next().get_bytes_as('fulltext'))
1735
key_b = self.get_simple_key('b')
1736
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1737
self.assertEqual('\n',
1738
f.get_record_stream([key_b], 'unordered', True
1739
).next().get_bytes_as('fulltext'))
1741
def test_get_known_graph_ancestry(self):
1742
f = self.get_versionedfiles()
1744
raise TestNotApplicable('ancestry info only relevant with graph.')
1745
key_a = self.get_simple_key('a')
1746
key_b = self.get_simple_key('b')
1747
key_c = self.get_simple_key('c')
1753
f.add_lines(key_a, [], ['\n'])
1754
f.add_lines(key_b, [key_a], ['\n'])
1755
f.add_lines(key_c, [key_a, key_b], ['\n'])
1756
kg = f.get_known_graph_ancestry([key_c])
1757
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1758
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1760
def test_known_graph_with_fallbacks(self):
1761
f = self.get_versionedfiles('files')
1763
raise TestNotApplicable('ancestry info only relevant with graph.')
1764
if getattr(f, 'add_fallback_versioned_files', None) is None:
1765
raise TestNotApplicable("%s doesn't support fallbacks"
1766
% (f.__class__.__name__,))
1767
key_a = self.get_simple_key('a')
1768
key_b = self.get_simple_key('b')
1769
key_c = self.get_simple_key('c')
1770
# A only in fallback
1775
g = self.get_versionedfiles('fallback')
1776
g.add_lines(key_a, [], ['\n'])
1777
f.add_fallback_versioned_files(g)
1778
f.add_lines(key_b, [key_a], ['\n'])
1779
f.add_lines(key_c, [key_a, key_b], ['\n'])
1780
kg = f.get_known_graph_ancestry([key_c])
1781
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1783
def test_get_record_stream_empty(self):
1784
"""An empty stream can be requested without error."""
1785
f = self.get_versionedfiles()
1786
entries = f.get_record_stream([], 'unordered', False)
1787
self.assertEqual([], list(entries))
1789
def assertValidStorageKind(self, storage_kind):
1790
"""Assert that storage_kind is a valid storage_kind."""
1791
self.assertSubset([storage_kind],
1792
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1793
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1794
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1796
'knit-delta-closure', 'knit-delta-closure-ref',
1797
'groupcompress-block', 'groupcompress-block-ref'])
1799
def capture_stream(self, f, entries, on_seen, parents,
1800
require_fulltext=False):
1801
"""Capture a stream for testing."""
1802
for factory in entries:
1803
on_seen(factory.key)
1804
self.assertValidStorageKind(factory.storage_kind)
1805
if factory.sha1 is not None:
1806
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1808
self.assertEqual(parents[factory.key], factory.parents)
1809
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1811
if require_fulltext:
1812
factory.get_bytes_as('fulltext')
1814
def test_get_record_stream_interface(self):
1815
"""each item in a stream has to provide a regular interface."""
1816
files = self.get_versionedfiles()
1817
self.get_diamond_files(files)
1818
keys, _ = self.get_keys_and_sort_order()
1819
parent_map = files.get_parent_map(keys)
1820
entries = files.get_record_stream(keys, 'unordered', False)
1822
self.capture_stream(files, entries, seen.add, parent_map)
1823
self.assertEqual(set(keys), seen)
1825
def get_keys_and_sort_order(self):
1826
"""Get diamond test keys list, and their sort ordering."""
1827
if self.key_length == 1:
1828
keys = [('merged',), ('left',), ('right',), ('base',)]
1829
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1832
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1834
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1838
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1839
('FileA', 'base'):0,
1840
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1841
('FileB', 'base'):0,
1843
return keys, sort_order
1845
def get_keys_and_groupcompress_sort_order(self):
1846
"""Get diamond test keys list, and their groupcompress sort ordering."""
1847
if self.key_length == 1:
1848
keys = [('merged',), ('left',), ('right',), ('base',)]
1849
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1852
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1854
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1858
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1859
('FileA', 'base'):2,
1860
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1861
('FileB', 'base'):5,
1863
return keys, sort_order
1865
def test_get_record_stream_interface_ordered(self):
1866
"""each item in a stream has to provide a regular interface."""
1867
files = self.get_versionedfiles()
1868
self.get_diamond_files(files)
1869
keys, sort_order = self.get_keys_and_sort_order()
1870
parent_map = files.get_parent_map(keys)
1871
entries = files.get_record_stream(keys, 'topological', False)
1873
self.capture_stream(files, entries, seen.append, parent_map)
1874
self.assertStreamOrder(sort_order, seen, keys)
1876
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1877
"""each item must be accessible as a fulltext."""
1878
files = self.get_versionedfiles()
1879
self.get_diamond_files(files)
1880
keys, sort_order = self.get_keys_and_sort_order()
1881
parent_map = files.get_parent_map(keys)
1882
entries = files.get_record_stream(keys, 'topological', True)
1884
for factory in entries:
1885
seen.append(factory.key)
1886
self.assertValidStorageKind(factory.storage_kind)
1887
self.assertSubset([factory.sha1],
1888
[None, files.get_sha1s([factory.key])[factory.key]])
1889
self.assertEqual(parent_map[factory.key], factory.parents)
1890
# self.assertEqual(files.get_text(factory.key),
1891
ft_bytes = factory.get_bytes_as('fulltext')
1892
self.assertIsInstance(ft_bytes, str)
1893
chunked_bytes = factory.get_bytes_as('chunked')
1894
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1896
self.assertStreamOrder(sort_order, seen, keys)
1898
def test_get_record_stream_interface_groupcompress(self):
1899
"""each item in a stream has to provide a regular interface."""
1900
files = self.get_versionedfiles()
1901
self.get_diamond_files(files)
1902
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1903
parent_map = files.get_parent_map(keys)
1904
entries = files.get_record_stream(keys, 'groupcompress', False)
1906
self.capture_stream(files, entries, seen.append, parent_map)
1907
self.assertStreamOrder(sort_order, seen, keys)
1909
def assertStreamOrder(self, sort_order, seen, keys):
1910
self.assertEqual(len(set(seen)), len(keys))
1911
if self.key_length == 1:
1914
lows = {('FileA',):0, ('FileB',):0}
1916
self.assertEqual(set(keys), set(seen))
1919
sort_pos = sort_order[key]
1920
self.assertTrue(sort_pos >= lows[key[:-1]],
1921
"Out of order in sorted stream: %r, %r" % (key, seen))
1922
lows[key[:-1]] = sort_pos
1924
def test_get_record_stream_unknown_storage_kind_raises(self):
1925
"""Asking for a storage kind that the stream cannot supply raises."""
1926
files = self.get_versionedfiles()
1927
self.get_diamond_files(files)
1928
if self.key_length == 1:
1929
keys = [('merged',), ('left',), ('right',), ('base',)]
1932
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1934
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1937
parent_map = files.get_parent_map(keys)
1938
entries = files.get_record_stream(keys, 'unordered', False)
1939
# We track the contents because we should be able to try, fail a
1940
# particular kind and then ask for one that works and continue.
1942
for factory in entries:
1943
seen.add(factory.key)
1944
self.assertValidStorageKind(factory.storage_kind)
1945
if factory.sha1 is not None:
1946
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1948
self.assertEqual(parent_map[factory.key], factory.parents)
1949
# currently no stream emits mpdiff
1950
self.assertRaises(errors.UnavailableRepresentation,
1951
factory.get_bytes_as, 'mpdiff')
1952
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1954
self.assertEqual(set(keys), seen)
1956
def test_get_record_stream_missing_records_are_absent(self):
1957
files = self.get_versionedfiles()
1958
self.get_diamond_files(files)
1959
if self.key_length == 1:
1960
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1963
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1964
('FileA', 'absent'), ('FileA', 'base'),
1965
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1966
('FileB', 'absent'), ('FileB', 'base'),
1967
('absent', 'absent'),
1969
parent_map = files.get_parent_map(keys)
1970
entries = files.get_record_stream(keys, 'unordered', False)
1971
self.assertAbsentRecord(files, keys, parent_map, entries)
1972
entries = files.get_record_stream(keys, 'topological', False)
1973
self.assertAbsentRecord(files, keys, parent_map, entries)
1975
def assertRecordHasContent(self, record, bytes):
1976
"""Assert that record has the bytes bytes."""
1977
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1978
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1980
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1981
files = self.get_versionedfiles()
1982
key = self.get_simple_key('foo')
1983
files.add_lines(key, (), ['my text\n', 'content'])
1984
stream = files.get_record_stream([key], 'unordered', False)
1985
record = stream.next()
1986
if record.storage_kind in ('chunked', 'fulltext'):
1987
# chunked and fulltext representations are for direct use not wire
1988
# serialisation: check they are able to be used directly. To send
1989
# such records over the wire translation will be needed.
1990
self.assertRecordHasContent(record, "my text\ncontent")
1992
bytes = [record.get_bytes_as(record.storage_kind)]
1993
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1994
source_record = record
1996
for record in network_stream:
1997
records.append(record)
1998
self.assertEqual(source_record.storage_kind,
1999
record.storage_kind)
2000
self.assertEqual(source_record.parents, record.parents)
2002
source_record.get_bytes_as(source_record.storage_kind),
2003
record.get_bytes_as(record.storage_kind))
2004
self.assertEqual(1, len(records))
2006
def assertStreamMetaEqual(self, records, expected, stream):
2007
"""Assert that streams expected and stream have the same records.
2009
:param records: A list to collect the seen records.
2010
:return: A generator of the records in stream.
2012
# We make assertions during copying to catch things early for
2014
for record, ref_record in izip(stream, expected):
2015
records.append(record)
2016
self.assertEqual(ref_record.key, record.key)
2017
self.assertEqual(ref_record.storage_kind, record.storage_kind)
2018
self.assertEqual(ref_record.parents, record.parents)
2021
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2023
"""Convert a stream to a bytes iterator.
2025
:param skipped_records: A list with one element to increment when a
2027
:param full_texts: A dict from key->fulltext representation, for
2028
checking chunked or fulltext stored records.
2029
:param stream: A record_stream.
2030
:return: An iterator over the bytes of each record.
2032
for record in stream:
2033
if record.storage_kind in ('chunked', 'fulltext'):
2034
skipped_records[0] += 1
2035
# check the content is correct for direct use.
2036
self.assertRecordHasContent(record, full_texts[record.key])
2038
yield record.get_bytes_as(record.storage_kind)
2040
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2041
files = self.get_versionedfiles()
2042
target_files = self.get_versionedfiles('target')
2043
key = self.get_simple_key('ft')
2044
key_delta = self.get_simple_key('delta')
2045
files.add_lines(key, (), ['my text\n', 'content'])
2047
delta_parents = (key,)
2050
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2051
local = files.get_record_stream([key, key_delta], 'unordered', False)
2052
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2053
skipped_records = [0]
2055
key: "my text\ncontent",
2056
key_delta: "different\ncontent\n",
2058
byte_stream = self.stream_to_bytes_or_skip_counter(
2059
skipped_records, full_texts, local)
2060
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2062
# insert the stream from the network into a versioned files object so we can
2063
# check the content was carried across correctly without doing delta
2065
target_files.insert_record_stream(
2066
self.assertStreamMetaEqual(records, ref, network_stream))
2067
# No duplicates on the wire thank you!
2068
self.assertEqual(2, len(records) + skipped_records[0])
2070
# if any content was copied it all must have all been.
2071
self.assertIdenticalVersionedFile(files, target_files)
2073
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2074
# copy a delta over the wire
2075
files = self.get_versionedfiles()
2076
target_files = self.get_versionedfiles('target')
2077
key = self.get_simple_key('ft')
2078
key_delta = self.get_simple_key('delta')
2079
files.add_lines(key, (), ['my text\n', 'content'])
2081
delta_parents = (key,)
2084
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2085
# Copy the basis text across so we can reconstruct the delta during
2086
# insertion into target.
2087
target_files.insert_record_stream(files.get_record_stream([key],
2088
'unordered', False))
2089
local = files.get_record_stream([key_delta], 'unordered', False)
2090
ref = files.get_record_stream([key_delta], 'unordered', False)
2091
skipped_records = [0]
2093
key_delta: "different\ncontent\n",
2095
byte_stream = self.stream_to_bytes_or_skip_counter(
2096
skipped_records, full_texts, local)
2097
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2099
# insert the stream from the network into a versioned files object so we can
2100
# check the content was carried across correctly without doing delta
2101
# inspection during check_stream.
2102
target_files.insert_record_stream(
2103
self.assertStreamMetaEqual(records, ref, network_stream))
2104
# No duplicates on the wire thank you!
2105
self.assertEqual(1, len(records) + skipped_records[0])
2107
# if any content was copied it all must have all been
2108
self.assertIdenticalVersionedFile(files, target_files)
2110
def test_get_record_stream_wire_ready_delta_closure_included(self):
2111
# copy a delta over the wire with the ability to get its full text.
2112
files = self.get_versionedfiles()
2113
key = self.get_simple_key('ft')
2114
key_delta = self.get_simple_key('delta')
2115
files.add_lines(key, (), ['my text\n', 'content'])
2117
delta_parents = (key,)
2120
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2121
local = files.get_record_stream([key_delta], 'unordered', True)
2122
ref = files.get_record_stream([key_delta], 'unordered', True)
2123
skipped_records = [0]
2125
key_delta: "different\ncontent\n",
2127
byte_stream = self.stream_to_bytes_or_skip_counter(
2128
skipped_records, full_texts, local)
2129
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2131
# insert the stream from the network into a versioned files object so we can
2132
# check the content was carried across correctly without doing delta
2133
# inspection during check_stream.
2134
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2135
# we have to be able to get the full text out:
2136
self.assertRecordHasContent(record, full_texts[record.key])
2137
# No duplicates on the wire thank you!
2138
self.assertEqual(1, len(records) + skipped_records[0])
2140
def assertAbsentRecord(self, files, keys, parents, entries):
2141
"""Helper for test_get_record_stream_missing_records_are_absent."""
2143
for factory in entries:
2144
seen.add(factory.key)
2145
if factory.key[-1] == 'absent':
2146
self.assertEqual('absent', factory.storage_kind)
2147
self.assertEqual(None, factory.sha1)
2148
self.assertEqual(None, factory.parents)
2150
self.assertValidStorageKind(factory.storage_kind)
2151
if factory.sha1 is not None:
2152
sha1 = files.get_sha1s([factory.key])[factory.key]
2153
self.assertEqual(sha1, factory.sha1)
2154
self.assertEqual(parents[factory.key], factory.parents)
2155
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2157
self.assertEqual(set(keys), seen)
2159
def test_filter_absent_records(self):
2160
"""Requested missing records can be filter trivially."""
2161
files = self.get_versionedfiles()
2162
self.get_diamond_files(files)
2163
keys, _ = self.get_keys_and_sort_order()
2164
parent_map = files.get_parent_map(keys)
2165
# Add an absent record in the middle of the present keys. (We don't ask
2166
# for just absent keys to ensure that content before and after the
2167
# absent keys is still delivered).
2168
present_keys = list(keys)
2169
if self.key_length == 1:
2170
keys.insert(2, ('extra',))
2172
keys.insert(2, ('extra', 'extra'))
2173
entries = files.get_record_stream(keys, 'unordered', False)
2175
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2177
self.assertEqual(set(present_keys), seen)
2179
def get_mapper(self):
2180
"""Get a mapper suitable for the key length of the test interface."""
2181
if self.key_length == 1:
2182
return ConstantMapper('source')
2184
return HashEscapedPrefixMapper()
2186
def get_parents(self, parents):
2187
"""Get parents, taking self.graph into consideration."""
2193
def test_get_annotator(self):
2194
files = self.get_versionedfiles()
2195
self.get_diamond_files(files)
2196
origin_key = self.get_simple_key('origin')
2197
base_key = self.get_simple_key('base')
2198
left_key = self.get_simple_key('left')
2199
right_key = self.get_simple_key('right')
2200
merged_key = self.get_simple_key('merged')
2201
# annotator = files.get_annotator()
2202
# introduced full text
2203
origins, lines = files.get_annotator().annotate(origin_key)
2204
self.assertEqual([(origin_key,)], origins)
2205
self.assertEqual(['origin\n'], lines)
2207
origins, lines = files.get_annotator().annotate(base_key)
2208
self.assertEqual([(base_key,)], origins)
2210
origins, lines = files.get_annotator().annotate(merged_key)
2219
# Without a graph everything is new.
2226
self.assertRaises(RevisionNotPresent,
2227
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2229
def test_get_parent_map(self):
2230
files = self.get_versionedfiles()
2231
if self.key_length == 1:
2233
(('r0',), self.get_parents(())),
2234
(('r1',), self.get_parents((('r0',),))),
2235
(('r2',), self.get_parents(())),
2236
(('r3',), self.get_parents(())),
2237
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2241
(('FileA', 'r0'), self.get_parents(())),
2242
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2243
(('FileA', 'r2'), self.get_parents(())),
2244
(('FileA', 'r3'), self.get_parents(())),
2245
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2246
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2248
for key, parents in parent_details:
2249
files.add_lines(key, parents, [])
2250
# immediately after adding it should be queryable.
2251
self.assertEqual({key:parents}, files.get_parent_map([key]))
2252
# We can ask for an empty set
2253
self.assertEqual({}, files.get_parent_map([]))
2254
# We can ask for many keys
2255
all_parents = dict(parent_details)
2256
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2257
# Absent keys are just not included in the result.
2258
keys = all_parents.keys()
2259
if self.key_length == 1:
2260
keys.insert(1, ('missing',))
2262
keys.insert(1, ('missing', 'missing'))
2263
# Absent keys are just ignored
2264
self.assertEqual(all_parents, files.get_parent_map(keys))
2266
def test_get_sha1s(self):
2267
files = self.get_versionedfiles()
2268
self.get_diamond_files(files)
2269
if self.key_length == 1:
2270
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2272
# ask for shas from different prefixes.
2274
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2275
('FileA', 'merged'), ('FileB', 'right'),
2278
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2279
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2280
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2281
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2282
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2284
files.get_sha1s(keys))
2286
def test_insert_record_stream_empty(self):
2287
"""Inserting an empty record stream should work."""
2288
files = self.get_versionedfiles()
2289
files.insert_record_stream([])
2291
def assertIdenticalVersionedFile(self, expected, actual):
2292
"""Assert that left and right have the same contents."""
2293
self.assertEqual(set(actual.keys()), set(expected.keys()))
2294
actual_parents = actual.get_parent_map(actual.keys())
2296
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2298
for key, parents in actual_parents.items():
2299
self.assertEqual(None, parents)
2300
for key in actual.keys():
2301
actual_text = actual.get_record_stream(
2302
[key], 'unordered', True).next().get_bytes_as('fulltext')
2303
expected_text = expected.get_record_stream(
2304
[key], 'unordered', True).next().get_bytes_as('fulltext')
2305
self.assertEqual(actual_text, expected_text)
2307
def test_insert_record_stream_fulltexts(self):
2308
"""Any file should accept a stream of fulltexts."""
2309
files = self.get_versionedfiles()
2310
mapper = self.get_mapper()
2311
source_transport = self.get_transport('source')
2312
source_transport.mkdir('.')
2313
# weaves always output fulltexts.
2314
source = make_versioned_files_factory(WeaveFile, mapper)(
2316
self.get_diamond_files(source, trailing_eol=False)
2317
stream = source.get_record_stream(source.keys(), 'topological',
2319
files.insert_record_stream(stream)
2320
self.assertIdenticalVersionedFile(source, files)
2322
def test_insert_record_stream_fulltexts_noeol(self):
2323
"""Any file should accept a stream of fulltexts."""
2324
files = self.get_versionedfiles()
2325
mapper = self.get_mapper()
2326
source_transport = self.get_transport('source')
2327
source_transport.mkdir('.')
2328
# weaves always output fulltexts.
2329
source = make_versioned_files_factory(WeaveFile, mapper)(
2331
self.get_diamond_files(source, trailing_eol=False)
2332
stream = source.get_record_stream(source.keys(), 'topological',
2334
files.insert_record_stream(stream)
2335
self.assertIdenticalVersionedFile(source, files)
2337
def test_insert_record_stream_annotated_knits(self):
2338
"""Any file should accept a stream from plain knits."""
2339
files = self.get_versionedfiles()
2340
mapper = self.get_mapper()
2341
source_transport = self.get_transport('source')
2342
source_transport.mkdir('.')
2343
source = make_file_factory(True, mapper)(source_transport)
2344
self.get_diamond_files(source)
2345
stream = source.get_record_stream(source.keys(), 'topological',
2347
files.insert_record_stream(stream)
2348
self.assertIdenticalVersionedFile(source, files)
2350
def test_insert_record_stream_annotated_knits_noeol(self):
2351
"""Any file should accept a stream from plain knits."""
2352
files = self.get_versionedfiles()
2353
mapper = self.get_mapper()
2354
source_transport = self.get_transport('source')
2355
source_transport.mkdir('.')
2356
source = make_file_factory(True, mapper)(source_transport)
2357
self.get_diamond_files(source, trailing_eol=False)
2358
stream = source.get_record_stream(source.keys(), 'topological',
2360
files.insert_record_stream(stream)
2361
self.assertIdenticalVersionedFile(source, files)
2363
def test_insert_record_stream_plain_knits(self):
2364
"""Any file should accept a stream from plain knits."""
2365
files = self.get_versionedfiles()
2366
mapper = self.get_mapper()
2367
source_transport = self.get_transport('source')
2368
source_transport.mkdir('.')
2369
source = make_file_factory(False, mapper)(source_transport)
2370
self.get_diamond_files(source)
2371
stream = source.get_record_stream(source.keys(), 'topological',
2373
files.insert_record_stream(stream)
2374
self.assertIdenticalVersionedFile(source, files)
2376
def test_insert_record_stream_plain_knits_noeol(self):
2377
"""Any file should accept a stream from plain knits."""
2378
files = self.get_versionedfiles()
2379
mapper = self.get_mapper()
2380
source_transport = self.get_transport('source')
2381
source_transport.mkdir('.')
2382
source = make_file_factory(False, mapper)(source_transport)
2383
self.get_diamond_files(source, trailing_eol=False)
2384
stream = source.get_record_stream(source.keys(), 'topological',
2386
files.insert_record_stream(stream)
2387
self.assertIdenticalVersionedFile(source, files)
2389
def test_insert_record_stream_existing_keys(self):
2390
"""Inserting keys already in a file should not error."""
2391
files = self.get_versionedfiles()
2392
source = self.get_versionedfiles('source')
2393
self.get_diamond_files(source)
2394
# insert some keys into f.
2395
self.get_diamond_files(files, left_only=True)
2396
stream = source.get_record_stream(source.keys(), 'topological',
2398
files.insert_record_stream(stream)
2399
self.assertIdenticalVersionedFile(source, files)
2401
def test_insert_record_stream_missing_keys(self):
2402
"""Inserting a stream with absent keys should raise an error."""
2403
files = self.get_versionedfiles()
2404
source = self.get_versionedfiles('source')
2405
stream = source.get_record_stream([('missing',) * self.key_length],
2406
'topological', False)
2407
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2410
def test_insert_record_stream_out_of_order(self):
2411
"""An out of order stream can either error or work."""
2412
files = self.get_versionedfiles()
2413
source = self.get_versionedfiles('source')
2414
self.get_diamond_files(source)
2415
if self.key_length == 1:
2416
origin_keys = [('origin',)]
2417
end_keys = [('merged',), ('left',)]
2418
start_keys = [('right',), ('base',)]
2420
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2421
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2422
('FileB', 'merged',), ('FileB', 'left',)]
2423
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2424
('FileB', 'right',), ('FileB', 'base',)]
2425
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2426
end_entries = source.get_record_stream(end_keys, 'topological', False)
2427
start_entries = source.get_record_stream(start_keys, 'topological', False)
2428
entries = chain(origin_entries, end_entries, start_entries)
2430
files.insert_record_stream(entries)
2431
except RevisionNotPresent:
2432
# Must not have corrupted the file.
2435
self.assertIdenticalVersionedFile(source, files)
2437
def get_knit_delta_source(self):
2438
"""Get a source that can produce a stream with knit delta records,
2439
regardless of this test's scenario.
2441
mapper = self.get_mapper()
2442
source_transport = self.get_transport('source')
2443
source_transport.mkdir('.')
2444
source = make_file_factory(False, mapper)(source_transport)
2445
get_diamond_files(source, self.key_length, trailing_eol=True,
2446
nograph=False, left_only=False)
2449
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2450
"""Insertion where a needed basis is not included notifies the caller
2451
of the missing basis. In the meantime a record missing its basis is
2454
source = self.get_knit_delta_source()
2455
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2456
entries = source.get_record_stream(keys, 'unordered', False)
2457
files = self.get_versionedfiles()
2458
if self.support_partial_insertion:
2459
self.assertEqual([],
2460
list(files.get_missing_compression_parent_keys()))
2461
files.insert_record_stream(entries)
2462
missing_bases = files.get_missing_compression_parent_keys()
2463
self.assertEqual(set([self.get_simple_key('left')]),
2465
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2468
errors.RevisionNotPresent, files.insert_record_stream, entries)
2471
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2472
"""Insertion where a needed basis is not included notifies the caller
2473
of the missing basis. That basis can be added in a second
2474
insert_record_stream call that does not need to repeat records present
2475
in the previous stream. The record(s) that required that basis are
2476
fully inserted once their basis is no longer missing.
2478
if not self.support_partial_insertion:
2479
raise TestNotApplicable(
2480
'versioned file scenario does not support partial insertion')
2481
source = self.get_knit_delta_source()
2482
entries = source.get_record_stream([self.get_simple_key('origin'),
2483
self.get_simple_key('merged')], 'unordered', False)
2484
files = self.get_versionedfiles()
2485
files.insert_record_stream(entries)
2486
missing_bases = files.get_missing_compression_parent_keys()
2487
self.assertEqual(set([self.get_simple_key('left')]),
2489
# 'merged' is inserted (although a commit of a write group involving
2490
# this versionedfiles would fail).
2491
merged_key = self.get_simple_key('merged')
2493
[merged_key], files.get_parent_map([merged_key]).keys())
2494
# Add the full delta closure of the missing records
2495
missing_entries = source.get_record_stream(
2496
missing_bases, 'unordered', True)
2497
files.insert_record_stream(missing_entries)
2498
# Now 'merged' is fully inserted (and a commit would succeed).
2499
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2501
[merged_key], files.get_parent_map([merged_key]).keys())
2504
def test_iter_lines_added_or_present_in_keys(self):
2505
# test that we get at least an equalset of the lines added by
2506
# versions in the store.
2507
# the ordering here is to make a tree so that dumb searches have
2508
# more changes to muck up.
2510
class InstrumentedProgress(progress.DummyProgress):
2514
progress.DummyProgress.__init__(self)
2517
def update(self, msg=None, current=None, total=None):
2518
self.updates.append((msg, current, total))
2520
files = self.get_versionedfiles()
2521
# add a base to get included
2522
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2523
# add a ancestor to be included on one side
2524
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2525
# add a ancestor to be included on the other side
2526
files.add_lines(self.get_simple_key('rancestor'),
2527
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2528
# add a child of rancestor with no eofile-nl
2529
files.add_lines(self.get_simple_key('child'),
2530
self.get_parents([self.get_simple_key('rancestor')]),
2531
['base\n', 'child\n'])
2532
# add a child of lancestor and base to join the two roots
2533
files.add_lines(self.get_simple_key('otherchild'),
2534
self.get_parents([self.get_simple_key('lancestor'),
2535
self.get_simple_key('base')]),
2536
['base\n', 'lancestor\n', 'otherchild\n'])
2537
def iter_with_keys(keys, expected):
2538
# now we need to see what lines are returned, and how often.
2540
progress = InstrumentedProgress()
2541
# iterate over the lines
2542
for line in files.iter_lines_added_or_present_in_keys(keys,
2544
lines.setdefault(line, 0)
2546
if []!= progress.updates:
2547
self.assertEqual(expected, progress.updates)
2549
lines = iter_with_keys(
2550
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2551
[('Walking content', 0, 2),
2552
('Walking content', 1, 2),
2553
('Walking content', 2, 2)])
2554
# we must see child and otherchild
2555
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2557
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2558
# we dont care if we got more than that.
2561
lines = iter_with_keys(files.keys(),
2562
[('Walking content', 0, 5),
2563
('Walking content', 1, 5),
2564
('Walking content', 2, 5),
2565
('Walking content', 3, 5),
2566
('Walking content', 4, 5),
2567
('Walking content', 5, 5)])
2568
# all lines must be seen at least once
2569
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2571
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2573
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2574
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2576
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2578
def test_make_mpdiffs(self):
2579
from bzrlib import multiparent
2580
files = self.get_versionedfiles('source')
2581
# add texts that should trip the knit maximum delta chain threshold
2582
# as well as doing parallel chains of data in knits.
2583
# this is done by two chains of 25 insertions
2584
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2585
files.add_lines(self.get_simple_key('noeol'),
2586
self.get_parents([self.get_simple_key('base')]), ['line'])
2587
# detailed eol tests:
2588
# shared last line with parent no-eol
2589
files.add_lines(self.get_simple_key('noeolsecond'),
2590
self.get_parents([self.get_simple_key('noeol')]),
2592
# differing last line with parent, both no-eol
2593
files.add_lines(self.get_simple_key('noeolnotshared'),
2594
self.get_parents([self.get_simple_key('noeolsecond')]),
2595
['line\n', 'phone'])
2596
# add eol following a noneol parent, change content
2597
files.add_lines(self.get_simple_key('eol'),
2598
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2599
# add eol following a noneol parent, no change content
2600
files.add_lines(self.get_simple_key('eolline'),
2601
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2602
# noeol with no parents:
2603
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2604
# noeol preceeding its leftmost parent in the output:
2605
# this is done by making it a merge of two parents with no common
2606
# anestry: noeolbase and noeol with the
2607
# later-inserted parent the leftmost.
2608
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2609
self.get_parents([self.get_simple_key('noeolbase'),
2610
self.get_simple_key('noeol')]),
2612
# two identical eol texts
2613
files.add_lines(self.get_simple_key('noeoldup'),
2614
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2615
next_parent = self.get_simple_key('base')
2616
text_name = 'chain1-'
2618
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2619
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2620
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2621
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2622
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2623
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2624
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2625
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2626
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2627
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2628
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2629
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2630
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2631
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2632
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2633
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2634
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2635
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2636
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2637
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2638
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2639
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2640
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2641
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2642
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2643
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2645
for depth in range(26):
2646
new_version = self.get_simple_key(text_name + '%s' % depth)
2647
text = text + ['line\n']
2648
files.add_lines(new_version, self.get_parents([next_parent]), text)
2649
next_parent = new_version
2650
next_parent = self.get_simple_key('base')
2651
text_name = 'chain2-'
2653
for depth in range(26):
2654
new_version = self.get_simple_key(text_name + '%s' % depth)
2655
text = text + ['line\n']
2656
files.add_lines(new_version, self.get_parents([next_parent]), text)
2657
next_parent = new_version
2658
target = self.get_versionedfiles('target')
2659
for key in multiparent.topo_iter_keys(files, files.keys()):
2660
mpdiff = files.make_mpdiffs([key])[0]
2661
parents = files.get_parent_map([key])[key] or []
2663
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2664
self.assertEqualDiff(
2665
files.get_record_stream([key], 'unordered',
2666
True).next().get_bytes_as('fulltext'),
2667
target.get_record_stream([key], 'unordered',
2668
True).next().get_bytes_as('fulltext')
2671
def test_keys(self):
2672
# While use is discouraged, versions() is still needed by aspects of
2674
files = self.get_versionedfiles()
2675
self.assertEqual(set(), set(files.keys()))
2676
if self.key_length == 1:
2679
key = ('foo', 'bar',)
2680
files.add_lines(key, (), [])
2681
self.assertEqual(set([key]), set(files.keys()))
2684
class VirtualVersionedFilesTests(TestCase):
2685
"""Basic tests for the VirtualVersionedFiles implementations."""
2687
def _get_parent_map(self, keys):
2690
if k in self._parent_map:
2691
ret[k] = self._parent_map[k]
2695
TestCase.setUp(self)
2697
self._parent_map = {}
2698
self.texts = VirtualVersionedFiles(self._get_parent_map,
2701
def test_add_lines(self):
2702
self.assertRaises(NotImplementedError,
2703
self.texts.add_lines, "foo", [], [])
2705
def test_add_mpdiffs(self):
2706
self.assertRaises(NotImplementedError,
2707
self.texts.add_mpdiffs, [])
2709
def test_check_noerrors(self):
2712
def test_insert_record_stream(self):
2713
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2716
def test_get_sha1s_nonexistent(self):
2717
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2719
def test_get_sha1s(self):
2720
self._lines["key"] = ["dataline1", "dataline2"]
2721
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2722
self.texts.get_sha1s([("key",)]))
2724
def test_get_parent_map(self):
2725
self._parent_map = {"G": ("A", "B")}
2726
self.assertEquals({("G",): (("A",),("B",))},
2727
self.texts.get_parent_map([("G",), ("L",)]))
2729
def test_get_record_stream(self):
2730
self._lines["A"] = ["FOO", "BAR"]
2731
it = self.texts.get_record_stream([("A",)], "unordered", True)
2733
self.assertEquals("chunked", record.storage_kind)
2734
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2735
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2737
def test_get_record_stream_absent(self):
2738
it = self.texts.get_record_stream([("A",)], "unordered", True)
2740
self.assertEquals("absent", record.storage_kind)
2742
def test_iter_lines_added_or_present_in_keys(self):
2743
self._lines["A"] = ["FOO", "BAR"]
2744
self._lines["B"] = ["HEY"]
2745
self._lines["C"] = ["Alberta"]
2746
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2747
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2751
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2753
def get_ordering_vf(self, key_priority):
2754
builder = self.make_branch_builder('test')
2755
builder.start_series()
2756
builder.build_snapshot('A', None, [
2757
('add', ('', 'TREE_ROOT', 'directory', None))])
2758
builder.build_snapshot('B', ['A'], [])
2759
builder.build_snapshot('C', ['B'], [])
2760
builder.build_snapshot('D', ['C'], [])
2761
builder.finish_series()
2762
b = builder.get_branch()
2764
self.addCleanup(b.unlock)
2765
vf = b.repository.inventories
2766
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2768
def test_get_empty(self):
2769
vf = self.get_ordering_vf({})
2770
self.assertEqual([], vf.calls)
2772
def test_get_record_stream_topological(self):
2773
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2774
request_keys = [('B',), ('C',), ('D',), ('A',)]
2775
keys = [r.key for r in vf.get_record_stream(request_keys,
2776
'topological', False)]
2777
# We should have gotten the keys in topological order
2778
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2779
# And recorded that the request was made
2780
self.assertEqual([('get_record_stream', request_keys, 'topological',
2783
def test_get_record_stream_ordered(self):
2784
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2785
request_keys = [('B',), ('C',), ('D',), ('A',)]
2786
keys = [r.key for r in vf.get_record_stream(request_keys,
2787
'unordered', False)]
2788
# They should be returned based on their priority
2789
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2790
# And the request recorded
2791
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2794
def test_get_record_stream_implicit_order(self):
2795
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2796
request_keys = [('B',), ('C',), ('D',), ('A',)]
2797
keys = [r.key for r in vf.get_record_stream(request_keys,
2798
'unordered', False)]
2799
# A and C are not in the map, so they get sorted to the front. A comes
2800
# before C alphabetically, so it comes back first
2801
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2802
# And the request recorded
2803
self.assertEqual([('get_record_stream', request_keys, 'unordered',