265
454
writer = pack.ContainerWriter(write_data)
267
456
access.set_writer(writer, index, (transport, packname))
268
memos = access.add_raw_records([10], '1234567890')
457
memos = access.add_raw_records([('key', 10)], '1234567890')
270
459
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
461
def test_missing_index_raises_retry(self):
462
memos = self.make_pack_file()
463
transport = self.get_transport()
464
reload_called, reload_func = self.make_reload_func()
465
# Note that the index key has changed from 'foo' to 'bar'
466
access = _DirectPackAccess({'bar':(transport, 'packname')},
467
reload_func=reload_func)
468
e = self.assertListRaises(errors.RetryWithNewPacks,
469
access.get_raw_records, memos)
470
# Because a key was passed in which does not match our index list, we
471
# assume that the listing was already reloaded
472
self.assertTrue(e.reload_occurred)
473
self.assertIsInstance(e.exc_info, tuple)
474
self.assertIs(e.exc_info[0], KeyError)
475
self.assertIsInstance(e.exc_info[1], KeyError)
477
def test_missing_index_raises_key_error_with_no_reload(self):
478
memos = self.make_pack_file()
479
transport = self.get_transport()
480
# Note that the index key has changed from 'foo' to 'bar'
481
access = _DirectPackAccess({'bar':(transport, 'packname')})
482
e = self.assertListRaises(KeyError, access.get_raw_records, memos)
484
def test_missing_file_raises_retry(self):
485
memos = self.make_pack_file()
486
transport = self.get_transport()
487
reload_called, reload_func = self.make_reload_func()
488
# Note that the 'filename' has been changed to 'different-packname'
489
access = _DirectPackAccess({'foo':(transport, 'different-packname')},
490
reload_func=reload_func)
491
e = self.assertListRaises(errors.RetryWithNewPacks,
492
access.get_raw_records, memos)
493
# The file has gone missing, so we assume we need to reload
494
self.assertFalse(e.reload_occurred)
495
self.assertIsInstance(e.exc_info, tuple)
496
self.assertIs(e.exc_info[0], errors.NoSuchFile)
497
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
498
self.assertEqual('different-packname', e.exc_info[1].path)
500
def test_missing_file_raises_no_such_file_with_no_reload(self):
501
memos = self.make_pack_file()
502
transport = self.get_transport()
503
# Note that the 'filename' has been changed to 'different-packname'
504
access = _DirectPackAccess({'foo':(transport, 'different-packname')})
505
e = self.assertListRaises(errors.NoSuchFile,
506
access.get_raw_records, memos)
508
def test_failing_readv_raises_retry(self):
509
memos = self.make_pack_file()
510
transport = self.get_transport()
511
failing_transport = MockReadvFailingTransport(
512
[transport.get_bytes('packname')])
513
reload_called, reload_func = self.make_reload_func()
514
access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
515
reload_func=reload_func)
516
# Asking for a single record will not trigger the Mock failure
517
self.assertEqual(['1234567890'],
518
list(access.get_raw_records(memos[:1])))
519
self.assertEqual(['12345'],
520
list(access.get_raw_records(memos[1:2])))
521
# A multiple offset readv() will fail mid-way through
522
e = self.assertListRaises(errors.RetryWithNewPacks,
523
access.get_raw_records, memos)
524
# The file has gone missing, so we assume we need to reload
525
self.assertFalse(e.reload_occurred)
526
self.assertIsInstance(e.exc_info, tuple)
527
self.assertIs(e.exc_info[0], errors.NoSuchFile)
528
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
529
self.assertEqual('packname', e.exc_info[1].path)
531
def test_failing_readv_raises_no_such_file_with_no_reload(self):
532
memos = self.make_pack_file()
533
transport = self.get_transport()
534
failing_transport = MockReadvFailingTransport(
535
[transport.get_bytes('packname')])
536
reload_called, reload_func = self.make_reload_func()
537
access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
538
# Asking for a single record will not trigger the Mock failure
539
self.assertEqual(['1234567890'],
540
list(access.get_raw_records(memos[:1])))
541
self.assertEqual(['12345'],
542
list(access.get_raw_records(memos[1:2])))
543
# A multiple offset readv() will fail mid-way through
544
e = self.assertListRaises(errors.NoSuchFile,
545
access.get_raw_records, memos)
547
def test_reload_or_raise_no_reload(self):
548
access = _DirectPackAccess({}, reload_func=None)
549
retry_exc = self.make_retry_exception()
550
# Without a reload_func, we will just re-raise the original exception
551
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
553
def test_reload_or_raise_reload_changed(self):
554
reload_called, reload_func = self.make_reload_func(return_val=True)
555
access = _DirectPackAccess({}, reload_func=reload_func)
556
retry_exc = self.make_retry_exception()
557
access.reload_or_raise(retry_exc)
558
self.assertEqual([1], reload_called)
559
retry_exc.reload_occurred=True
560
access.reload_or_raise(retry_exc)
561
self.assertEqual([2], reload_called)
563
def test_reload_or_raise_reload_no_change(self):
564
reload_called, reload_func = self.make_reload_func(return_val=False)
565
access = _DirectPackAccess({}, reload_func=reload_func)
566
retry_exc = self.make_retry_exception()
567
# If reload_occurred is False, then we consider it an error to have
568
# reload_func() return False (no changes).
569
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
570
self.assertEqual([1], reload_called)
571
retry_exc.reload_occurred=True
572
# If reload_occurred is True, then we assume nothing changed because
573
# it had changed earlier, but didn't change again
574
access.reload_or_raise(retry_exc)
575
self.assertEqual([2], reload_called)
577
def test_annotate_retries(self):
578
vf, reload_counter = self.make_vf_for_retrying()
579
# It is a little bit bogus to annotate the Revision VF, but it works,
580
# as we have ancestry stored there
582
reload_lines = vf.annotate(key)
583
self.assertEqual([1, 1, 0], reload_counter)
584
plain_lines = vf.annotate(key)
585
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
586
if reload_lines != plain_lines:
587
self.fail('Annotation was not identical with reloading.')
588
# Now delete the packs-in-use, which should trigger another reload, but
589
# this time we just raise an exception because we can't recover
590
for trans, name in vf._access._indices.itervalues():
592
self.assertRaises(errors.NoSuchFile, vf.annotate, key)
593
self.assertEqual([2, 1, 1], reload_counter)
595
def test__get_record_map_retries(self):
596
vf, reload_counter = self.make_vf_for_retrying()
597
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
598
records = vf._get_record_map(keys)
599
self.assertEqual(keys, sorted(records.keys()))
600
self.assertEqual([1, 1, 0], reload_counter)
601
# Now delete the packs-in-use, which should trigger another reload, but
602
# this time we just raise an exception because we can't recover
603
for trans, name in vf._access._indices.itervalues():
605
self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
606
self.assertEqual([2, 1, 1], reload_counter)
608
def test_get_record_stream_retries(self):
609
vf, reload_counter = self.make_vf_for_retrying()
610
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
611
record_stream = vf.get_record_stream(keys, 'topological', False)
612
record = record_stream.next()
613
self.assertEqual(('rev-1',), record.key)
614
self.assertEqual([0, 0, 0], reload_counter)
615
record = record_stream.next()
616
self.assertEqual(('rev-2',), record.key)
617
self.assertEqual([1, 1, 0], reload_counter)
618
record = record_stream.next()
619
self.assertEqual(('rev-3',), record.key)
620
self.assertEqual([1, 1, 0], reload_counter)
621
# Now delete all pack files, and see that we raise the right error
622
for trans, name in vf._access._indices.itervalues():
624
self.assertListRaises(errors.NoSuchFile,
625
vf.get_record_stream, keys, 'topological', False)
627
def test_iter_lines_added_or_present_in_keys_retries(self):
628
vf, reload_counter = self.make_vf_for_retrying()
629
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
630
# Unfortunately, iter_lines_added_or_present_in_keys iterates the
631
# result in random order (determined by the iteration order from a
632
# set()), so we don't have any solid way to trigger whether data is
633
# read before or after. However we tried to delete the middle node to
634
# exercise the code well.
635
# What we care about is that all lines are always yielded, but not
638
reload_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
639
self.assertEqual([1, 1, 0], reload_counter)
640
# Now do it again, to make sure the result is equivalent
641
plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
642
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
643
self.assertEqual(plain_lines, reload_lines)
644
self.assertEqual(21, len(plain_lines))
645
# Now delete all pack files, and see that we raise the right error
646
for trans, name in vf._access._indices.itervalues():
648
self.assertListRaises(errors.NoSuchFile,
649
vf.iter_lines_added_or_present_in_keys, keys)
650
self.assertEqual([2, 1, 1], reload_counter)
652
def test_get_record_stream_yields_disk_sorted_order(self):
653
# if we get 'unordered' pick a semi-optimal order for reading. The
654
# order should be grouped by pack file, and then by position in file
655
repo = self.make_repository('test', format='pack-0.92')
657
self.addCleanup(repo.unlock)
658
repo.start_write_group()
660
vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
661
vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
662
vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
663
repo.commit_write_group()
664
# We inserted them as rev-5, rev-1, rev-2, we should get them back in
666
stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
667
('f-id', 'rev-2')], 'unordered', False)
668
keys = [r.key for r in stream]
669
self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
670
('f-id', 'rev-2')], keys)
671
repo.start_write_group()
672
vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
673
vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
674
vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
675
repo.commit_write_group()
676
# Request in random order, to make sure the output order isn't based on
678
request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
679
stream = vf.get_record_stream(request_keys, 'unordered', False)
680
keys = [r.key for r in stream]
681
# We want to get the keys back in disk order, but it doesn't matter
682
# which pack we read from first. So this can come back in 2 orders
683
alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
684
alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
685
if keys != alt1 and keys != alt2:
686
self.fail('Returned key order did not match either expected order.'
687
' expected %s or %s, not %s'
688
% (alt1, alt2, keys))
273
691
class LowLevelKnitDataTests(TestCase):
275
693
def create_gz_content(self, text):
277
gz_file = gzip.GzipFile(mode='wb', fileobj=sio)
695
gz_file = tuned_gzip.GzipFile(mode='wb', fileobj=sio)
278
696
gz_file.write(text)
280
698
return sio.getvalue()
700
def make_multiple_records(self):
701
"""Create the content for multiple records."""
702
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
704
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
709
record_1 = (0, len(gz_txt), sha1sum)
710
total_txt.append(gz_txt)
711
sha1sum = osutils.sha('baz\n').hexdigest()
712
gz_txt = self.create_gz_content('version rev-id-2 1 %s\n'
716
record_2 = (record_1[1], len(gz_txt), sha1sum)
717
total_txt.append(gz_txt)
718
return total_txt, record_1, record_2
282
720
def test_valid_knit_data(self):
283
sha1sum = sha.new('foo\nbar\n').hexdigest()
721
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
284
722
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
289
727
transport = MockTransport([gz_txt])
290
access = _KnitAccess(transport, 'filename', None, None, False, False)
291
data = _KnitData(access=access)
292
records = [('rev-id-1', (None, 0, len(gz_txt)))]
294
contents = data.read_records(records)
295
self.assertEqual({'rev-id-1':(['foo\n', 'bar\n'], sha1sum)}, contents)
297
raw_contents = list(data.read_records_iter_raw(records))
298
self.assertEqual([('rev-id-1', gz_txt)], raw_contents)
728
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
729
knit = KnitVersionedFiles(None, access)
730
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
732
contents = list(knit._read_records_iter(records))
733
self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'],
734
'4e48e2c9a3d2ca8a708cb0cc545700544efb5021')], contents)
736
raw_contents = list(knit._read_records_iter_raw(records))
737
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
739
def test_multiple_records_valid(self):
740
total_txt, record_1, record_2 = self.make_multiple_records()
741
transport = MockTransport([''.join(total_txt)])
742
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
743
knit = KnitVersionedFiles(None, access)
744
records = [(('rev-id-1',), (('rev-id-1',), record_1[0], record_1[1])),
745
(('rev-id-2',), (('rev-id-2',), record_2[0], record_2[1]))]
747
contents = list(knit._read_records_iter(records))
748
self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'], record_1[2]),
749
(('rev-id-2',), ['baz\n'], record_2[2])],
752
raw_contents = list(knit._read_records_iter_raw(records))
753
self.assertEqual([(('rev-id-1',), total_txt[0], record_1[2]),
754
(('rev-id-2',), total_txt[1], record_2[2])],
300
757
def test_not_enough_lines(self):
301
sha1sum = sha.new('foo\n').hexdigest()
758
sha1sum = osutils.sha('foo\n').hexdigest()
302
759
# record says 2 lines data says 1
303
760
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
307
764
transport = MockTransport([gz_txt])
308
access = _KnitAccess(transport, 'filename', None, None, False, False)
309
data = _KnitData(access=access)
310
records = [('rev-id-1', (None, 0, len(gz_txt)))]
311
self.assertRaises(errors.KnitCorrupt, data.read_records, records)
765
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
766
knit = KnitVersionedFiles(None, access)
767
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
768
self.assertRaises(errors.KnitCorrupt, list,
769
knit._read_records_iter(records))
313
771
# read_records_iter_raw won't detect that sort of mismatch/corruption
314
raw_contents = list(data.read_records_iter_raw(records))
315
self.assertEqual([('rev-id-1', gz_txt)], raw_contents)
772
raw_contents = list(knit._read_records_iter_raw(records))
773
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
317
775
def test_too_many_lines(self):
318
sha1sum = sha.new('foo\nbar\n').hexdigest()
776
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
319
777
# record says 1 lines data says 2
320
778
gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
379
840
# Change 2 bytes in the middle to \xff
380
841
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
381
842
transport = MockTransport([gz_txt])
382
access = _KnitAccess(transport, 'filename', None, None, False, False)
383
data = _KnitData(access=access)
384
records = [('rev-id-1', (None, 0, len(gz_txt)))]
386
self.assertRaises(errors.KnitCorrupt, data.read_records, records)
388
# read_records_iter_raw will notice if we request the wrong version.
389
self.assertRaises(errors.KnitCorrupt, list,
390
data.read_records_iter_raw(records))
843
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
844
knit = KnitVersionedFiles(None, access)
845
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
846
self.assertRaises(errors.KnitCorrupt, list,
847
knit._read_records_iter(records))
848
# read_records_iter_raw will barf on bad gz data
849
self.assertRaises(errors.KnitCorrupt, list,
850
knit._read_records_iter_raw(records))
393
853
class LowLevelKnitIndexTests(TestCase):
395
def get_knit_index(self, *args, **kwargs):
396
orig = knit._load_data
398
knit._load_data = orig
399
self.addCleanup(reset)
855
def get_knit_index(self, transport, name, mode):
856
mapper = ConstantMapper(name)
400
857
from bzrlib._knit_load_data_py import _load_data_py
401
knit._load_data = _load_data_py
402
return _KnitIndex(*args, **kwargs)
404
def test_no_such_file(self):
405
transport = MockTransport()
407
self.assertRaises(NoSuchFile, self.get_knit_index,
408
transport, "filename", "r")
409
self.assertRaises(NoSuchFile, self.get_knit_index,
410
transport, "filename", "w", create=False)
858
self.overrideAttr(knit, '_load_data', _load_data_py)
859
allow_writes = lambda: 'w' in mode
860
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
412
862
def test_create_file(self):
413
863
transport = MockTransport()
415
index = self.get_knit_index(transport, "filename", "w",
416
file_mode="wb", create=True)
418
("put_bytes_non_atomic",
419
("filename", index.HEADER), {"mode": "wb"}),
420
transport.calls.pop(0))
422
def test_delay_create_file(self):
423
transport = MockTransport()
425
index = self.get_knit_index(transport, "filename", "w",
426
create=True, file_mode="wb", create_parent_dir=True,
427
delay_create=True, dir_mode=0777)
428
self.assertEqual([], transport.calls)
430
index.add_versions([])
431
name, (filename, f), kwargs = transport.calls.pop(0)
432
self.assertEqual("put_file_non_atomic", name)
434
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
436
self.assertEqual("filename", filename)
437
self.assertEqual(index.HEADER, f.read())
439
index.add_versions([])
440
self.assertEqual(("append_bytes", ("filename", ""), {}),
441
transport.calls.pop(0))
864
index = self.get_knit_index(transport, "filename", "w")
866
call = transport.calls.pop(0)
867
# call[1][1] is a StringIO - we can't test it by simple equality.
868
self.assertEqual('put_file_non_atomic', call[0])
869
self.assertEqual('filename.kndx', call[1][0])
870
# With no history, _KndxIndex writes a new index:
871
self.assertEqual(_KndxIndex.HEADER,
872
call[1][1].getvalue())
873
self.assertEqual({'create_parent_dir': True}, call[2])
443
875
def test_read_utf8_version_id(self):
444
876
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
445
877
utf8_revision_id = unicode_revision_id.encode('utf-8')
446
878
transport = MockTransport([
448
880
'%s option 0 1 :' % (utf8_revision_id,)
450
882
index = self.get_knit_index(transport, "filename", "r")
451
# _KnitIndex is a private class, and deals in utf8 revision_ids, not
883
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
452
884
# Unicode revision_ids.
453
self.assertTrue(index.has_version(utf8_revision_id))
454
self.assertFalse(index.has_version(unicode_revision_id))
885
self.assertEqual({(utf8_revision_id,):()},
886
index.get_parent_map(index.keys()))
887
self.assertFalse((unicode_revision_id,) in index.keys())
456
889
def test_read_utf8_parents(self):
457
890
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
458
891
utf8_revision_id = unicode_revision_id.encode('utf-8')
459
892
transport = MockTransport([
461
894
"version option 0 1 .%s :" % (utf8_revision_id,)
463
896
index = self.get_knit_index(transport, "filename", "r")
464
self.assertEqual([utf8_revision_id],
465
index.get_parents_with_ghosts("version"))
897
self.assertEqual({("version",):((utf8_revision_id,),)},
898
index.get_parent_map(index.keys()))
467
900
def test_read_ignore_corrupted_lines(self):
468
901
transport = MockTransport([
471
904
"corrupted options 0 1 .b .c ",
472
905
"version options 0 1 :"
474
907
index = self.get_knit_index(transport, "filename", "r")
475
self.assertEqual(1, index.num_versions())
476
self.assertTrue(index.has_version("version"))
908
self.assertEqual(1, len(index.keys()))
909
self.assertEqual(set([("version",)]), index.keys())
478
911
def test_read_corrupted_header(self):
479
912
transport = MockTransport(['not a bzr knit index header\n'])
480
self.assertRaises(KnitHeaderError,
481
self.get_knit_index, transport, "filename", "r")
913
index = self.get_knit_index(transport, "filename", "r")
914
self.assertRaises(KnitHeaderError, index.keys)
483
916
def test_read_duplicate_entries(self):
484
917
transport = MockTransport([
486
919
"parent options 0 1 :",
487
920
"version options1 0 1 0 :",
488
921
"version options2 1 2 .other :",
489
922
"version options3 3 4 0 .other :"
491
924
index = self.get_knit_index(transport, "filename", "r")
492
self.assertEqual(2, index.num_versions())
925
self.assertEqual(2, len(index.keys()))
493
926
# check that the index used is the first one written. (Specific
494
927
# to KnitIndex style indices.
495
self.assertEqual("1", index._version_list_to_index(["version"]))
496
self.assertEqual((None, 3, 4), index.get_position("version"))
497
self.assertEqual(["options3"], index.get_options("version"))
498
self.assertEqual(["parent", "other"],
499
index.get_parents_with_ghosts("version"))
928
self.assertEqual("1", index._dictionary_compress([("version",)]))
929
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
930
self.assertEqual(["options3"], index.get_options(("version",)))
931
self.assertEqual({("version",):(("parent",), ("other",))},
932
index.get_parent_map([("version",)]))
501
934
def test_read_compressed_parents(self):
502
935
transport = MockTransport([
504
937
"a option 0 1 :",
505
938
"b option 0 1 0 :",
506
939
"c option 0 1 1 0 :",
508
941
index = self.get_knit_index(transport, "filename", "r")
509
self.assertEqual(["a"], index.get_parents("b"))
510
self.assertEqual(["b", "a"], index.get_parents("c"))
942
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
943
index.get_parent_map([("b",), ("c",)]))
512
945
def test_write_utf8_version_id(self):
513
946
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
514
947
utf8_revision_id = unicode_revision_id.encode('utf-8')
515
948
transport = MockTransport([
518
951
index = self.get_knit_index(transport, "filename", "r")
519
index.add_version(utf8_revision_id, ["option"], (None, 0, 1), [])
520
self.assertEqual(("append_bytes", ("filename",
521
"\n%s option 0 1 :" % (utf8_revision_id,)),
523
transport.calls.pop(0))
953
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
954
call = transport.calls.pop(0)
955
# call[1][1] is a StringIO - we can't test it by simple equality.
956
self.assertEqual('put_file_non_atomic', call[0])
957
self.assertEqual('filename.kndx', call[1][0])
958
# With no history, _KndxIndex writes a new index:
959
self.assertEqual(_KndxIndex.HEADER +
960
"\n%s option 0 1 :" % (utf8_revision_id,),
961
call[1][1].getvalue())
962
self.assertEqual({'create_parent_dir': True}, call[2])
525
964
def test_write_utf8_parents(self):
526
965
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
527
966
utf8_revision_id = unicode_revision_id.encode('utf-8')
528
967
transport = MockTransport([
531
index = self.get_knit_index(transport, "filename", "r")
532
index.add_version("version", ["option"], (None, 0, 1), [utf8_revision_id])
533
self.assertEqual(("append_bytes", ("filename",
534
"\nversion option 0 1 .%s :" % (utf8_revision_id,)),
536
transport.calls.pop(0))
538
def test_get_graph(self):
539
transport = MockTransport()
540
index = self.get_knit_index(transport, "filename", "w", create=True)
541
self.assertEqual([], index.get_graph())
543
index.add_version("a", ["option"], (None, 0, 1), ["b"])
544
self.assertEqual([("a", ["b"])], index.get_graph())
546
index.add_version("c", ["option"], (None, 0, 1), ["d"])
547
self.assertEqual([("a", ["b"]), ("c", ["d"])],
548
sorted(index.get_graph()))
550
def test_get_ancestry(self):
551
transport = MockTransport([
554
"b option 0 1 0 .e :",
555
"c option 0 1 1 0 :",
556
"d option 0 1 2 .f :"
558
index = self.get_knit_index(transport, "filename", "r")
560
self.assertEqual([], index.get_ancestry([]))
561
self.assertEqual(["a"], index.get_ancestry(["a"]))
562
self.assertEqual(["a", "b"], index.get_ancestry(["b"]))
563
self.assertEqual(["a", "b", "c"], index.get_ancestry(["c"]))
564
self.assertEqual(["a", "b", "c", "d"], index.get_ancestry(["d"]))
565
self.assertEqual(["a", "b"], index.get_ancestry(["a", "b"]))
566
self.assertEqual(["a", "b", "c"], index.get_ancestry(["a", "c"]))
568
self.assertRaises(RevisionNotPresent, index.get_ancestry, ["e"])
570
def test_get_ancestry_with_ghosts(self):
571
transport = MockTransport([
574
"b option 0 1 0 .e :",
575
"c option 0 1 0 .f .g :",
576
"d option 0 1 2 .h .j .k :"
578
index = self.get_knit_index(transport, "filename", "r")
580
self.assertEqual([], index.get_ancestry_with_ghosts([]))
581
self.assertEqual(["a"], index.get_ancestry_with_ghosts(["a"]))
582
self.assertEqual(["a", "e", "b"],
583
index.get_ancestry_with_ghosts(["b"]))
584
self.assertEqual(["a", "g", "f", "c"],
585
index.get_ancestry_with_ghosts(["c"]))
586
self.assertEqual(["a", "g", "f", "c", "k", "j", "h", "d"],
587
index.get_ancestry_with_ghosts(["d"]))
588
self.assertEqual(["a", "e", "b"],
589
index.get_ancestry_with_ghosts(["a", "b"]))
590
self.assertEqual(["a", "g", "f", "c"],
591
index.get_ancestry_with_ghosts(["a", "c"]))
970
index = self.get_knit_index(transport, "filename", "r")
972
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
973
call = transport.calls.pop(0)
974
# call[1][1] is a StringIO - we can't test it by simple equality.
975
self.assertEqual('put_file_non_atomic', call[0])
976
self.assertEqual('filename.kndx', call[1][0])
977
# With no history, _KndxIndex writes a new index:
978
self.assertEqual(_KndxIndex.HEADER +
979
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
980
call[1][1].getvalue())
981
self.assertEqual({'create_parent_dir': True}, call[2])
984
transport = MockTransport([
987
index = self.get_knit_index(transport, "filename", "r")
989
self.assertEqual(set(), index.keys())
991
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
992
self.assertEqual(set([("a",)]), index.keys())
994
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
995
self.assertEqual(set([("a",)]), index.keys())
997
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
998
self.assertEqual(set([("a",), ("b",)]), index.keys())
1000
def add_a_b(self, index, random_id=None):
1002
if random_id is not None:
1003
kwargs["random_id"] = random_id
1005
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
1006
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
1007
(("b",), ["option"], (("b",), 2, 3), [("a",)])
1010
def assertIndexIsAB(self, index):
1015
index.get_parent_map(index.keys()))
1016
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
1017
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
1018
self.assertEqual(["opt"], index.get_options(("a",)))
1020
def test_add_versions(self):
1021
transport = MockTransport([
1024
index = self.get_knit_index(transport, "filename", "r")
1027
call = transport.calls.pop(0)
1028
# call[1][1] is a StringIO - we can't test it by simple equality.
1029
self.assertEqual('put_file_non_atomic', call[0])
1030
self.assertEqual('filename.kndx', call[1][0])
1031
# With no history, _KndxIndex writes a new index:
592
1032
self.assertEqual(
593
["a", "g", "f", "c", "e", "b", "k", "j", "h", "d"],
594
index.get_ancestry_with_ghosts(["b", "d"]))
596
self.assertRaises(RevisionNotPresent,
597
index.get_ancestry_with_ghosts, ["e"])
599
def test_iter_parents(self):
600
transport = MockTransport()
601
index = self.get_knit_index(transport, "filename", "w", create=True)
603
index.add_version('r0', ['option'], (None, 0, 1), [])
605
index.add_version('r1', ['option'], (None, 0, 1), ['r0'])
607
index.add_version('r2', ['option'], (None, 0, 1), ['r1', 'r0'])
609
# cases: each sample data individually:
610
self.assertEqual(set([('r0', ())]),
611
set(index.iter_parents(['r0'])))
612
self.assertEqual(set([('r1', ('r0', ))]),
613
set(index.iter_parents(['r1'])))
614
self.assertEqual(set([('r2', ('r1', 'r0'))]),
615
set(index.iter_parents(['r2'])))
616
# no nodes returned for a missing node
617
self.assertEqual(set(),
618
set(index.iter_parents(['missing'])))
619
# 1 node returned with missing nodes skipped
620
self.assertEqual(set([('r1', ('r0', ))]),
621
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
623
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
624
set(index.iter_parents(['r0', 'r1'])))
625
# 2 nodes returned, missing skipped
626
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
627
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
629
def test_num_versions(self):
630
transport = MockTransport([
633
index = self.get_knit_index(transport, "filename", "r")
635
self.assertEqual(0, index.num_versions())
636
self.assertEqual(0, len(index))
638
index.add_version("a", ["option"], (None, 0, 1), [])
639
self.assertEqual(1, index.num_versions())
640
self.assertEqual(1, len(index))
642
index.add_version("a", ["option2"], (None, 1, 2), [])
643
self.assertEqual(1, index.num_versions())
644
self.assertEqual(1, len(index))
646
index.add_version("b", ["option"], (None, 0, 1), [])
647
self.assertEqual(2, index.num_versions())
648
self.assertEqual(2, len(index))
650
def test_get_versions(self):
651
transport = MockTransport([
654
index = self.get_knit_index(transport, "filename", "r")
656
self.assertEqual([], index.get_versions())
658
index.add_version("a", ["option"], (None, 0, 1), [])
659
self.assertEqual(["a"], index.get_versions())
661
index.add_version("a", ["option"], (None, 0, 1), [])
662
self.assertEqual(["a"], index.get_versions())
664
index.add_version("b", ["option"], (None, 0, 1), [])
665
self.assertEqual(["a", "b"], index.get_versions())
667
def test_add_version(self):
668
transport = MockTransport([
671
index = self.get_knit_index(transport, "filename", "r")
673
index.add_version("a", ["option"], (None, 0, 1), ["b"])
674
self.assertEqual(("append_bytes",
675
("filename", "\na option 0 1 .b :"),
676
{}), transport.calls.pop(0))
677
self.assertTrue(index.has_version("a"))
678
self.assertEqual(1, index.num_versions())
679
self.assertEqual((None, 0, 1), index.get_position("a"))
680
self.assertEqual(["option"], index.get_options("a"))
681
self.assertEqual(["b"], index.get_parents_with_ghosts("a"))
683
index.add_version("a", ["opt"], (None, 1, 2), ["c"])
684
self.assertEqual(("append_bytes",
685
("filename", "\na opt 1 2 .c :"),
686
{}), transport.calls.pop(0))
687
self.assertTrue(index.has_version("a"))
688
self.assertEqual(1, index.num_versions())
689
self.assertEqual((None, 1, 2), index.get_position("a"))
690
self.assertEqual(["opt"], index.get_options("a"))
691
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
693
index.add_version("b", ["option"], (None, 2, 3), ["a"])
694
self.assertEqual(("append_bytes",
695
("filename", "\nb option 2 3 0 :"),
696
{}), transport.calls.pop(0))
697
self.assertTrue(index.has_version("b"))
698
self.assertEqual(2, index.num_versions())
699
self.assertEqual((None, 2, 3), index.get_position("b"))
700
self.assertEqual(["option"], index.get_options("b"))
701
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
703
def test_add_versions(self):
704
transport = MockTransport([
707
index = self.get_knit_index(transport, "filename", "r")
710
("a", ["option"], (None, 0, 1), ["b"]),
711
("a", ["opt"], (None, 1, 2), ["c"]),
712
("b", ["option"], (None, 2, 3), ["a"])
714
self.assertEqual(("append_bytes", ("filename",
715
1034
"\na option 0 1 .b :"
716
1035
"\na opt 1 2 .c :"
718
), {}), transport.calls.pop(0))
719
self.assertTrue(index.has_version("a"))
720
self.assertTrue(index.has_version("b"))
721
self.assertEqual(2, index.num_versions())
722
self.assertEqual((None, 1, 2), index.get_position("a"))
723
self.assertEqual((None, 2, 3), index.get_position("b"))
724
self.assertEqual(["opt"], index.get_options("a"))
725
self.assertEqual(["option"], index.get_options("b"))
726
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
727
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1036
"\nb option 2 3 0 :",
1037
call[1][1].getvalue())
1038
self.assertEqual({'create_parent_dir': True}, call[2])
1039
self.assertIndexIsAB(index)
1041
def test_add_versions_random_id_is_accepted(self):
1042
transport = MockTransport([
1045
index = self.get_knit_index(transport, "filename", "r")
1046
self.add_a_b(index, random_id=True)
729
1048
def test_delay_create_and_add_versions(self):
730
1049
transport = MockTransport()
732
index = self.get_knit_index(transport, "filename", "w",
733
create=True, file_mode="wb", create_parent_dir=True,
734
delay_create=True, dir_mode=0777)
1051
index = self.get_knit_index(transport, "filename", "w")
735
1053
self.assertEqual([], transport.calls)
738
("a", ["option"], (None, 0, 1), ["b"]),
739
("a", ["opt"], (None, 1, 2), ["c"]),
740
("b", ["option"], (None, 2, 3), ["a"])
742
name, (filename, f), kwargs = transport.calls.pop(0)
743
self.assertEqual("put_file_non_atomic", name)
745
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
747
self.assertEqual("filename", filename)
1056
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1058
# Two calls: one during which we load the existing index (and when its
1059
# missing create it), then a second where we write the contents out.
1060
self.assertEqual(2, len(transport.calls))
1061
call = transport.calls.pop(0)
1062
self.assertEqual('put_file_non_atomic', call[0])
1063
self.assertEqual('filename.kndx', call[1][0])
1064
# With no history, _KndxIndex writes a new index:
1065
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1066
self.assertEqual({'create_parent_dir': True}, call[2])
1067
call = transport.calls.pop(0)
1068
# call[1][1] is a StringIO - we can't test it by simple equality.
1069
self.assertEqual('put_file_non_atomic', call[0])
1070
self.assertEqual('filename.kndx', call[1][0])
1071
# With no history, _KndxIndex writes a new index:
750
1074
"\na option 0 1 .b :"
751
1075
"\na opt 1 2 .c :"
752
1076
"\nb option 2 3 0 :",
755
def test_has_version(self):
756
transport = MockTransport([
760
index = self.get_knit_index(transport, "filename", "r")
762
self.assertTrue(index.has_version("a"))
763
self.assertFalse(index.has_version("b"))
1077
call[1][1].getvalue())
1078
self.assertEqual({'create_parent_dir': True}, call[2])
1080
def assertTotalBuildSize(self, size, keys, positions):
1081
self.assertEqual(size,
1082
knit._get_total_build_size(None, keys, positions))
1084
def test__get_total_build_size(self):
1086
('a',): (('fulltext', False), (('a',), 0, 100), None),
1087
('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
1088
('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
1089
('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1091
self.assertTotalBuildSize(100, [('a',)], positions)
1092
self.assertTotalBuildSize(121, [('b',)], positions)
1093
# c needs both a & b
1094
self.assertTotalBuildSize(156, [('c',)], positions)
1095
# we shouldn't count 'b' twice
1096
self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
1097
self.assertTotalBuildSize(133, [('d',)], positions)
1098
self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
765
1100
def test_get_position(self):
766
1101
transport = MockTransport([
768
1103
"a option 0 1 :",
769
1104
"b option 1 2 :"
771
1106
index = self.get_knit_index(transport, "filename", "r")
773
self.assertEqual((None, 0, 1), index.get_position("a"))
774
self.assertEqual((None, 1, 2), index.get_position("b"))
1108
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
1109
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
776
1111
def test_get_method(self):
777
1112
transport = MockTransport([
779
1114
"a fulltext,unknown 0 1 :",
780
1115
"b unknown,line-delta 1 2 :",
1246
def test_scan_unvalidated_index_not_implemented(self):
1247
transport = MockTransport()
1248
index = self.get_knit_index(transport, 'filename', 'r')
1250
NotImplementedError, index.scan_unvalidated_index,
1251
'dummy graph_index')
1253
NotImplementedError, index.get_missing_compression_parents)
940
1255
def test_short_line(self):
941
1256
transport = MockTransport([
943
1258
"a option 0 10 :",
944
1259
"b option 10 10 0", # This line isn't terminated, ignored
946
1261
index = self.get_knit_index(transport, "filename", "r")
947
self.assertEqual(['a'], index.get_versions())
1262
self.assertEqual(set([('a',)]), index.keys())
949
1264
def test_skip_incomplete_record(self):
950
1265
# A line with bogus data should just be skipped
951
1266
transport = MockTransport([
953
1268
"a option 0 10 :",
954
1269
"b option 10 10 0", # This line isn't terminated, ignored
955
1270
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
957
1272
index = self.get_knit_index(transport, "filename", "r")
958
self.assertEqual(['a', 'c'], index.get_versions())
1273
self.assertEqual(set([('a',), ('c',)]), index.keys())
960
1275
def test_trailing_characters(self):
961
1276
# A line with bogus data should just be skipped
962
1277
transport = MockTransport([
964
1279
"a option 0 10 :",
965
1280
"b option 10 10 0 :a", # This line has extra trailing characters
966
1281
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
968
1283
index = self.get_knit_index(transport, "filename", "r")
969
self.assertEqual(['a', 'c'], index.get_versions())
1284
self.assertEqual(set([('a',), ('c',)]), index.keys())
972
1287
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
974
_test_needs_features = [CompiledKnitFeature]
976
def get_knit_index(self, *args, **kwargs):
977
orig = knit._load_data
979
knit._load_data = orig
980
self.addCleanup(reset)
981
from bzrlib._knit_load_data_c import _load_data_c
982
knit._load_data = _load_data_c
983
return _KnitIndex(*args, **kwargs)
1289
_test_needs_features = [compiled_knit_feature]
1291
def get_knit_index(self, transport, name, mode):
1292
mapper = ConstantMapper(name)
1293
from bzrlib._knit_load_data_pyx import _load_data_c
1294
self.overrideAttr(knit, '_load_data', _load_data_c)
1295
allow_writes = lambda: mode == 'w'
1296
return _KndxIndex(transport, mapper, lambda:None,
1297
allow_writes, lambda:True)
1300
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1302
def make_annotator(self):
1303
factory = knit.make_pack_factory(True, True, 1)
1304
vf = factory(self.get_transport())
1305
return knit._KnitAnnotator(vf)
1307
def test__expand_fulltext(self):
1308
ann = self.make_annotator()
1309
rev_key = ('rev-id',)
1310
ann._num_compression_children[rev_key] = 1
1311
res = ann._expand_record(rev_key, (('parent-id',),), None,
1312
['line1\n', 'line2\n'], ('fulltext', True))
1313
# The content object and text lines should be cached appropriately
1314
self.assertEqual(['line1\n', 'line2'], res)
1315
content_obj = ann._content_objects[rev_key]
1316
self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1317
self.assertEqual(res, content_obj.text())
1318
self.assertEqual(res, ann._text_cache[rev_key])
1320
def test__expand_delta_comp_parent_not_available(self):
1321
# Parent isn't available yet, so we return nothing, but queue up this
1322
# node for later processing
1323
ann = self.make_annotator()
1324
rev_key = ('rev-id',)
1325
parent_key = ('parent-id',)
1326
record = ['0,1,1\n', 'new-line\n']
1327
details = ('line-delta', False)
1328
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1330
self.assertEqual(None, res)
1331
self.assertTrue(parent_key in ann._pending_deltas)
1332
pending = ann._pending_deltas[parent_key]
1333
self.assertEqual(1, len(pending))
1334
self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1336
def test__expand_record_tracks_num_children(self):
1337
ann = self.make_annotator()
1338
rev_key = ('rev-id',)
1339
rev2_key = ('rev2-id',)
1340
parent_key = ('parent-id',)
1341
record = ['0,1,1\n', 'new-line\n']
1342
details = ('line-delta', False)
1343
ann._num_compression_children[parent_key] = 2
1344
ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1345
('fulltext', False))
1346
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1348
self.assertEqual({parent_key: 1}, ann._num_compression_children)
1349
# Expanding the second child should remove the content object, and the
1350
# num_compression_children entry
1351
res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1353
self.assertFalse(parent_key in ann._content_objects)
1354
self.assertEqual({}, ann._num_compression_children)
1355
# We should not cache the content_objects for rev2 and rev, because
1356
# they do not have compression children of their own.
1357
self.assertEqual({}, ann._content_objects)
1359
def test__expand_delta_records_blocks(self):
1360
ann = self.make_annotator()
1361
rev_key = ('rev-id',)
1362
parent_key = ('parent-id',)
1363
record = ['0,1,1\n', 'new-line\n']
1364
details = ('line-delta', True)
1365
ann._num_compression_children[parent_key] = 2
1366
ann._expand_record(parent_key, (), None,
1367
['line1\n', 'line2\n', 'line3\n'],
1368
('fulltext', False))
1369
ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1370
self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1371
ann._matching_blocks)
1372
rev2_key = ('rev2-id',)
1373
record = ['0,1,1\n', 'new-line\n']
1374
details = ('line-delta', False)
1375
ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1376
self.assertEqual([(1, 1, 2), (3, 3, 0)],
1377
ann._matching_blocks[(rev2_key, parent_key)])
1379
def test__get_parent_ann_uses_matching_blocks(self):
1380
ann = self.make_annotator()
1381
rev_key = ('rev-id',)
1382
parent_key = ('parent-id',)
1383
parent_ann = [(parent_key,)]*3
1384
block_key = (rev_key, parent_key)
1385
ann._annotations_cache[parent_key] = parent_ann
1386
ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1387
# We should not try to access any parent_lines content, because we know
1388
# we already have the matching blocks
1389
par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1390
['1\n', '2\n', '3\n'], parent_key)
1391
self.assertEqual(parent_ann, par_ann)
1392
self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1393
self.assertEqual({}, ann._matching_blocks)
1395
def test__process_pending(self):
1396
ann = self.make_annotator()
1397
rev_key = ('rev-id',)
1400
record = ['0,1,1\n', 'new-line\n']
1401
details = ('line-delta', False)
1402
p1_record = ['line1\n', 'line2\n']
1403
ann._num_compression_children[p1_key] = 1
1404
res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1406
self.assertEqual(None, res)
1407
# self.assertTrue(p1_key in ann._pending_deltas)
1408
self.assertEqual({}, ann._pending_annotation)
1409
# Now insert p1, and we should be able to expand the delta
1410
res = ann._expand_record(p1_key, (), None, p1_record,
1411
('fulltext', False))
1412
self.assertEqual(p1_record, res)
1413
ann._annotations_cache[p1_key] = [(p1_key,)]*2
1414
res = ann._process_pending(p1_key)
1415
self.assertEqual([], res)
1416
self.assertFalse(p1_key in ann._pending_deltas)
1417
self.assertTrue(p2_key in ann._pending_annotation)
1418
self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1419
ann._pending_annotation)
1420
# Now fill in parent 2, and pending annotation should be satisfied
1421
res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1422
ann._annotations_cache[p2_key] = []
1423
res = ann._process_pending(p2_key)
1424
self.assertEqual([rev_key], res)
1425
self.assertEqual({}, ann._pending_annotation)
1426
self.assertEqual({}, ann._pending_deltas)
1428
def test_record_delta_removes_basis(self):
1429
ann = self.make_annotator()
1430
ann._expand_record(('parent-id',), (), None,
1431
['line1\n', 'line2\n'], ('fulltext', False))
1432
ann._num_compression_children['parent-id'] = 2
1434
def test_annotate_special_text(self):
1435
ann = self.make_annotator()
1437
rev1_key = ('rev-1',)
1438
rev2_key = ('rev-2',)
1439
rev3_key = ('rev-3',)
1440
spec_key = ('special:',)
1441
vf.add_lines(rev1_key, [], ['initial content\n'])
1442
vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1445
vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1448
spec_text = ('initial content\n'
1452
ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1453
anns, lines = ann.annotate(spec_key)
1454
self.assertEqual([(rev1_key,),
1455
(rev2_key, rev3_key),
1459
self.assertEqualDiff(spec_text, ''.join(lines))
987
1462
class KnitTests(TestCaseWithTransport):
988
1463
"""Class containing knit test helper routines."""
990
def make_test_knit(self, annotate=False, delay_create=False, index=None,
993
factory = KnitPlainFactory()
996
return KnitVersionedFile(name, get_transport('.'), access_mode='w',
997
factory=factory, create=True,
998
delay_create=delay_create, index=index)
1000
def assertRecordContentEqual(self, knit, version_id, candidate_content):
1001
"""Assert that some raw record content matches the raw record content
1002
for a particular version_id in the given knit.
1004
index_memo = knit._index.get_position(version_id)
1005
record = (version_id, index_memo)
1006
[(_, expected_content)] = list(knit._data.read_records_iter_raw([record]))
1007
self.assertEqual(expected_content, candidate_content)
1010
class BasicKnitTests(KnitTests):
1012
def add_stock_one_and_one_a(self, k):
1013
k.add_lines('text-1', [], split_lines(TEXT_1))
1014
k.add_lines('text-1a', ['text-1'], split_lines(TEXT_1A))
1016
def test_knit_constructor(self):
1017
"""Construct empty k"""
1018
self.make_test_knit()
1020
def test_make_explicit_index(self):
1021
"""We can supply an index to use."""
1022
knit = KnitVersionedFile('test', get_transport('.'),
1023
index='strangelove')
1024
self.assertEqual(knit._index, 'strangelove')
1026
def test_knit_add(self):
1027
"""Store one text in knit and retrieve"""
1028
k = self.make_test_knit()
1029
k.add_lines('text-1', [], split_lines(TEXT_1))
1030
self.assertTrue(k.has_version('text-1'))
1031
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1033
def test_knit_reload(self):
1034
# test that the content in a reloaded knit is correct
1035
k = self.make_test_knit()
1036
k.add_lines('text-1', [], split_lines(TEXT_1))
1038
k2 = KnitVersionedFile('test', get_transport('.'), access_mode='r', factory=KnitPlainFactory(), create=True)
1039
self.assertTrue(k2.has_version('text-1'))
1040
self.assertEqualDiff(''.join(k2.get_lines('text-1')), TEXT_1)
1042
def test_knit_several(self):
1043
"""Store several texts in a knit"""
1044
k = self.make_test_knit()
1045
k.add_lines('text-1', [], split_lines(TEXT_1))
1046
k.add_lines('text-2', [], split_lines(TEXT_2))
1047
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1048
self.assertEqualDiff(''.join(k.get_lines('text-2')), TEXT_2)
1050
def test_repeated_add(self):
1051
"""Knit traps attempt to replace existing version"""
1052
k = self.make_test_knit()
1053
k.add_lines('text-1', [], split_lines(TEXT_1))
1054
self.assertRaises(RevisionAlreadyPresent,
1056
'text-1', [], split_lines(TEXT_1))
1058
def test_empty(self):
1059
k = self.make_test_knit(True)
1060
k.add_lines('text-1', [], [])
1061
self.assertEquals(k.get_lines('text-1'), [])
1063
def test_incomplete(self):
1064
"""Test if texts without a ending line-end can be inserted and
1066
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1067
k.add_lines('text-1', [], ['a\n', 'b' ])
1068
k.add_lines('text-2', ['text-1'], ['a\rb\n', 'b\n'])
1069
# reopening ensures maximum room for confusion
1070
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1071
self.assertEquals(k.get_lines('text-1'), ['a\n', 'b' ])
1072
self.assertEquals(k.get_lines('text-2'), ['a\rb\n', 'b\n'])
1074
def test_delta(self):
1075
"""Expression of knit delta as lines"""
1076
k = self.make_test_knit()
1078
td = list(line_delta(TEXT_1.splitlines(True),
1079
TEXT_1A.splitlines(True)))
1080
self.assertEqualDiff(''.join(td), delta_1_1a)
1081
out = apply_line_delta(TEXT_1.splitlines(True), td)
1082
self.assertEqualDiff(''.join(out), TEXT_1A)
1084
def assertDerivedBlocksEqual(self, source, target, noeol=False):
1085
"""Assert that the derived matching blocks match real output"""
1086
source_lines = source.splitlines(True)
1087
target_lines = target.splitlines(True)
1089
if noeol and not line.endswith('\n'):
1093
source_content = KnitContent([(None, nl(l)) for l in source_lines])
1094
target_content = KnitContent([(None, nl(l)) for l in target_lines])
1095
line_delta = source_content.line_delta(target_content)
1096
delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
1097
source_lines, target_lines))
1098
matcher = KnitSequenceMatcher(None, source_lines, target_lines)
1099
matcher_blocks = list(list(matcher.get_matching_blocks()))
1100
self.assertEqual(matcher_blocks, delta_blocks)
1102
def test_get_line_delta_blocks(self):
1103
self.assertDerivedBlocksEqual('a\nb\nc\n', 'q\nc\n')
1104
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1)
1105
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1A)
1106
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1B)
1107
self.assertDerivedBlocksEqual(TEXT_1B, TEXT_1A)
1108
self.assertDerivedBlocksEqual(TEXT_1A, TEXT_1B)
1109
self.assertDerivedBlocksEqual(TEXT_1A, '')
1110
self.assertDerivedBlocksEqual('', TEXT_1A)
1111
self.assertDerivedBlocksEqual('', '')
1112
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd')
1114
def test_get_line_delta_blocks_noeol(self):
1115
"""Handle historical knit deltas safely
1117
Some existing knit deltas don't consider the last line to differ
1118
when the only difference whether it has a final newline.
1120
New knit deltas appear to always consider the last line to differ
1123
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd\n', noeol=True)
1124
self.assertDerivedBlocksEqual('a\nb\nc\nd\n', 'a\nb\nc', noeol=True)
1125
self.assertDerivedBlocksEqual('a\nb\nc\n', 'a\nb\nc', noeol=True)
1126
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\n', noeol=True)
1128
def test_add_with_parents(self):
1129
"""Store in knit with parents"""
1130
k = self.make_test_knit()
1131
self.add_stock_one_and_one_a(k)
1132
self.assertEquals(k.get_parents('text-1'), [])
1133
self.assertEquals(k.get_parents('text-1a'), ['text-1'])
1135
def test_ancestry(self):
1136
"""Store in knit with parents"""
1137
k = self.make_test_knit()
1138
self.add_stock_one_and_one_a(k)
1139
self.assertEquals(set(k.get_ancestry(['text-1a'])), set(['text-1a', 'text-1']))
1141
def test_add_delta(self):
1142
"""Store in knit with parents"""
1143
k = KnitVersionedFile('test', get_transport('.'), factory=KnitPlainFactory(),
1144
delta=True, create=True)
1145
self.add_stock_one_and_one_a(k)
1147
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1149
def test_add_delta_knit_graph_index(self):
1150
"""Does adding work with a KnitGraphIndex."""
1151
index = InMemoryGraphIndex(2)
1152
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
1154
k = KnitVersionedFile('test', get_transport('.'),
1155
delta=True, create=True, index=knit_index)
1156
self.add_stock_one_and_one_a(k)
1158
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1159
# check the index had the right data added.
1160
self.assertEqual(set([
1161
(index, ('text-1', ), ' 0 127', ((), ())),
1162
(index, ('text-1a', ), ' 127 140', ((('text-1', ),), (('text-1', ),))),
1163
]), set(index.iter_all_entries()))
1164
# we should not have a .kndx file
1165
self.assertFalse(get_transport('.').has('test.kndx'))
1167
def test_annotate(self):
1169
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1170
delta=True, create=True)
1171
self.insert_and_test_small_annotate(k)
1173
def insert_and_test_small_annotate(self, k):
1174
"""test annotation with k works correctly."""
1175
k.add_lines('text-1', [], ['a\n', 'b\n'])
1176
k.add_lines('text-2', ['text-1'], ['a\n', 'c\n'])
1178
origins = k.annotate('text-2')
1179
self.assertEquals(origins[0], ('text-1', 'a\n'))
1180
self.assertEquals(origins[1], ('text-2', 'c\n'))
1182
def test_annotate_fulltext(self):
1184
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1185
delta=False, create=True)
1186
self.insert_and_test_small_annotate(k)
1188
def test_annotate_merge_1(self):
1189
k = self.make_test_knit(True)
1190
k.add_lines('text-a1', [], ['a\n', 'b\n'])
1191
k.add_lines('text-a2', [], ['d\n', 'c\n'])
1192
k.add_lines('text-am', ['text-a1', 'text-a2'], ['d\n', 'b\n'])
1193
origins = k.annotate('text-am')
1194
self.assertEquals(origins[0], ('text-a2', 'd\n'))
1195
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1197
def test_annotate_merge_2(self):
1198
k = self.make_test_knit(True)
1199
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1200
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1201
k.add_lines('text-am', ['text-a1', 'text-a2'], ['a\n', 'y\n', 'c\n'])
1202
origins = k.annotate('text-am')
1203
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1204
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1205
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1207
def test_annotate_merge_9(self):
1208
k = self.make_test_knit(True)
1209
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1210
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1211
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'c\n'])
1212
origins = k.annotate('text-am')
1213
self.assertEquals(origins[0], ('text-am', 'k\n'))
1214
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1215
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1217
def test_annotate_merge_3(self):
1218
k = self.make_test_knit(True)
1219
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1220
k.add_lines('text-a2', [] ,['x\n', 'y\n', 'z\n'])
1221
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'z\n'])
1222
origins = k.annotate('text-am')
1223
self.assertEquals(origins[0], ('text-am', 'k\n'))
1224
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1225
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1227
def test_annotate_merge_4(self):
1228
k = self.make_test_knit(True)
1229
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1230
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1231
k.add_lines('text-a3', ['text-a1'], ['a\n', 'b\n', 'p\n'])
1232
k.add_lines('text-am', ['text-a2', 'text-a3'], ['a\n', 'b\n', 'z\n'])
1233
origins = k.annotate('text-am')
1234
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1235
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1236
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1238
def test_annotate_merge_5(self):
1239
k = self.make_test_knit(True)
1240
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1241
k.add_lines('text-a2', [], ['d\n', 'e\n', 'f\n'])
1242
k.add_lines('text-a3', [], ['x\n', 'y\n', 'z\n'])
1243
k.add_lines('text-am',
1244
['text-a1', 'text-a2', 'text-a3'],
1245
['a\n', 'e\n', 'z\n'])
1246
origins = k.annotate('text-am')
1247
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1248
self.assertEquals(origins[1], ('text-a2', 'e\n'))
1249
self.assertEquals(origins[2], ('text-a3', 'z\n'))
1251
def test_annotate_file_cherry_pick(self):
1252
k = self.make_test_knit(True)
1253
k.add_lines('text-1', [], ['a\n', 'b\n', 'c\n'])
1254
k.add_lines('text-2', ['text-1'], ['d\n', 'e\n', 'f\n'])
1255
k.add_lines('text-3', ['text-2', 'text-1'], ['a\n', 'b\n', 'c\n'])
1256
origins = k.annotate('text-3')
1257
self.assertEquals(origins[0], ('text-1', 'a\n'))
1258
self.assertEquals(origins[1], ('text-1', 'b\n'))
1259
self.assertEquals(origins[2], ('text-1', 'c\n'))
1261
def test_knit_join(self):
1262
"""Store in knit with parents"""
1263
k1 = KnitVersionedFile('test1', get_transport('.'), factory=KnitPlainFactory(), create=True)
1264
k1.add_lines('text-a', [], split_lines(TEXT_1))
1265
k1.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1267
k1.add_lines('text-c', [], split_lines(TEXT_1))
1268
k1.add_lines('text-d', ['text-c'], split_lines(TEXT_1))
1270
k1.add_lines('text-m', ['text-b', 'text-d'], split_lines(TEXT_1))
1272
k2 = KnitVersionedFile('test2', get_transport('.'), factory=KnitPlainFactory(), create=True)
1273
count = k2.join(k1, version_ids=['text-m'])
1274
self.assertEquals(count, 5)
1275
self.assertTrue(k2.has_version('text-a'))
1276
self.assertTrue(k2.has_version('text-c'))
1278
def test_reannotate(self):
1279
k1 = KnitVersionedFile('knit1', get_transport('.'),
1280
factory=KnitAnnotateFactory(), create=True)
1282
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1284
k1.add_lines('text-b', ['text-a'], ['a\n', 'c\n'])
1286
k2 = KnitVersionedFile('test2', get_transport('.'),
1287
factory=KnitAnnotateFactory(), create=True)
1288
k2.join(k1, version_ids=['text-b'])
1291
k1.add_lines('text-X', ['text-b'], ['a\n', 'b\n'])
1293
k2.add_lines('text-c', ['text-b'], ['z\n', 'c\n'])
1295
k2.add_lines('text-Y', ['text-b'], ['b\n', 'c\n'])
1297
# test-c will have index 3
1298
k1.join(k2, version_ids=['text-c'])
1300
lines = k1.get_lines('text-c')
1301
self.assertEquals(lines, ['z\n', 'c\n'])
1303
origins = k1.annotate('text-c')
1304
self.assertEquals(origins[0], ('text-c', 'z\n'))
1305
self.assertEquals(origins[1], ('text-b', 'c\n'))
1307
def test_get_line_delta_texts(self):
1308
"""Make sure we can call get_texts on text with reused line deltas"""
1309
k1 = KnitVersionedFile('test1', get_transport('.'),
1310
factory=KnitPlainFactory(), create=True)
1315
parents = ['%d' % (t-1)]
1316
k1.add_lines('%d' % t, parents, ['hello\n'] * t)
1317
k1.get_texts(('%d' % t) for t in range(3))
1319
def test_iter_lines_reads_in_order(self):
1320
t = MemoryTransport()
1321
instrumented_t = TransportLogger(t)
1322
k1 = KnitVersionedFile('id', instrumented_t, create=True, delta=True)
1323
self.assertEqual([('id.kndx',)], instrumented_t._calls)
1324
# add texts with no required ordering
1325
k1.add_lines('base', [], ['text\n'])
1326
k1.add_lines('base2', [], ['text2\n'])
1328
instrumented_t._calls = []
1329
# request a last-first iteration
1330
results = list(k1.iter_lines_added_or_present_in_versions(['base2', 'base']))
1331
self.assertEqual([('id.knit', [(0, 87), (87, 89)])], instrumented_t._calls)
1332
self.assertEqual(['text\n', 'text2\n'], results)
1334
def test_create_empty_annotated(self):
1335
k1 = self.make_test_knit(True)
1337
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1338
k2 = k1.create_empty('t', MemoryTransport())
1339
self.assertTrue(isinstance(k2.factory, KnitAnnotateFactory))
1340
self.assertEqual(k1.delta, k2.delta)
1341
# the generic test checks for empty content and file class
1343
def test_knit_format(self):
1344
# this tests that a new knit index file has the expected content
1345
# and that is writes the data we expect as records are added.
1346
knit = self.make_test_knit(True)
1347
# Now knit files are not created until we first add data to them
1348
self.assertFileEqual("# bzr knit index 8\n", 'test.kndx')
1349
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1350
self.assertFileEqual(
1351
"# bzr knit index 8\n"
1353
"revid fulltext 0 84 .a_ghost :",
1355
knit.add_lines_with_ghosts('revid2', ['revid'], ['a\n'])
1356
self.assertFileEqual(
1357
"# bzr knit index 8\n"
1358
"\nrevid fulltext 0 84 .a_ghost :"
1359
"\nrevid2 line-delta 84 82 0 :",
1361
# we should be able to load this file again
1362
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1363
self.assertEqual(['revid', 'revid2'], knit.versions())
1364
# write a short write to the file and ensure that its ignored
1365
indexfile = file('test.kndx', 'ab')
1366
indexfile.write('\nrevid3 line-delta 166 82 1 2 3 4 5 .phwoar:demo ')
1368
# we should be able to load this file again
1369
knit = KnitVersionedFile('test', get_transport('.'), access_mode='w')
1370
self.assertEqual(['revid', 'revid2'], knit.versions())
1371
# and add a revision with the same id the failed write had
1372
knit.add_lines('revid3', ['revid2'], ['a\n'])
1373
# and when reading it revid3 should now appear.
1374
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1375
self.assertEqual(['revid', 'revid2', 'revid3'], knit.versions())
1376
self.assertEqual(['revid2'], knit.get_parents('revid3'))
1378
def test_delay_create(self):
1379
"""Test that passing delay_create=True creates files late"""
1380
knit = self.make_test_knit(annotate=True, delay_create=True)
1381
self.failIfExists('test.knit')
1382
self.failIfExists('test.kndx')
1383
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1384
self.failUnlessExists('test.knit')
1385
self.assertFileEqual(
1386
"# bzr knit index 8\n"
1388
"revid fulltext 0 84 .a_ghost :",
1391
def test_create_parent_dir(self):
1392
"""create_parent_dir can create knits in nonexistant dirs"""
1393
# Has no effect if we don't set 'delay_create'
1394
trans = get_transport('.')
1395
self.assertRaises(NoSuchFile, KnitVersionedFile, 'dir/test',
1396
trans, access_mode='w', factory=None,
1397
create=True, create_parent_dir=True)
1398
# Nothing should have changed yet
1399
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1400
factory=None, create=True,
1401
create_parent_dir=True,
1403
self.failIfExists('dir/test.knit')
1404
self.failIfExists('dir/test.kndx')
1405
self.failIfExists('dir')
1406
knit.add_lines('revid', [], ['a\n'])
1407
self.failUnlessExists('dir')
1408
self.failUnlessExists('dir/test.knit')
1409
self.assertFileEqual(
1410
"# bzr knit index 8\n"
1412
"revid fulltext 0 84 :",
1415
def test_create_mode_700(self):
1416
trans = get_transport('.')
1417
if not trans._can_roundtrip_unix_modebits():
1418
# Can't roundtrip, so no need to run this test
1420
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1421
factory=None, create=True,
1422
create_parent_dir=True,
1426
knit.add_lines('revid', [], ['a\n'])
1427
self.assertTransportMode(trans, 'dir', 0700)
1428
self.assertTransportMode(trans, 'dir/test.knit', 0600)
1429
self.assertTransportMode(trans, 'dir/test.kndx', 0600)
1431
def test_create_mode_770(self):
1432
trans = get_transport('.')
1433
if not trans._can_roundtrip_unix_modebits():
1434
# Can't roundtrip, so no need to run this test
1436
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1437
factory=None, create=True,
1438
create_parent_dir=True,
1442
knit.add_lines('revid', [], ['a\n'])
1443
self.assertTransportMode(trans, 'dir', 0770)
1444
self.assertTransportMode(trans, 'dir/test.knit', 0660)
1445
self.assertTransportMode(trans, 'dir/test.kndx', 0660)
1447
def test_create_mode_777(self):
1448
trans = get_transport('.')
1449
if not trans._can_roundtrip_unix_modebits():
1450
# Can't roundtrip, so no need to run this test
1452
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1453
factory=None, create=True,
1454
create_parent_dir=True,
1458
knit.add_lines('revid', [], ['a\n'])
1459
self.assertTransportMode(trans, 'dir', 0777)
1460
self.assertTransportMode(trans, 'dir/test.knit', 0666)
1461
self.assertTransportMode(trans, 'dir/test.kndx', 0666)
1463
def test_plan_merge(self):
1464
my_knit = self.make_test_knit(annotate=True)
1465
my_knit.add_lines('text1', [], split_lines(TEXT_1))
1466
my_knit.add_lines('text1a', ['text1'], split_lines(TEXT_1A))
1467
my_knit.add_lines('text1b', ['text1'], split_lines(TEXT_1B))
1468
plan = list(my_knit.plan_merge('text1a', 'text1b'))
1469
for plan_line, expected_line in zip(plan, AB_MERGE):
1470
self.assertEqual(plan_line, expected_line)
1472
def test_get_stream_empty(self):
1473
"""Get a data stream for an empty knit file."""
1474
k1 = self.make_test_knit()
1475
format, data_list, reader_callable = k1.get_data_stream([])
1476
self.assertEqual('knit-plain', format)
1477
self.assertEqual([], data_list)
1478
content = reader_callable(None)
1479
self.assertEqual('', content)
1480
self.assertIsInstance(content, str)
1482
def test_get_stream_one_version(self):
1483
"""Get a data stream for a single record out of a knit containing just
1486
k1 = self.make_test_knit()
1488
('text-a', [], TEXT_1),
1490
expected_data_list = [
1491
# version, options, length, parents
1492
('text-a', ['fulltext'], 122, []),
1494
for version_id, parents, lines in test_data:
1495
k1.add_lines(version_id, parents, split_lines(lines))
1497
format, data_list, reader_callable = k1.get_data_stream(['text-a'])
1498
self.assertEqual('knit-plain', format)
1499
self.assertEqual(expected_data_list, data_list)
1500
# There's only one record in the knit, so the content should be the
1501
# entire knit data file's contents.
1502
self.assertEqual(k1.transport.get_bytes(k1._data._access._filename),
1503
reader_callable(None))
1505
def test_get_stream_get_one_version_of_many(self):
1506
"""Get a data stream for just one version out of a knit containing many
1509
k1 = self.make_test_knit()
1510
# Insert the same data as test_knit_join, as they seem to cover a range
1511
# of cases (no parents, one parent, multiple parents).
1513
('text-a', [], TEXT_1),
1514
('text-b', ['text-a'], TEXT_1),
1515
('text-c', [], TEXT_1),
1516
('text-d', ['text-c'], TEXT_1),
1517
('text-m', ['text-b', 'text-d'], TEXT_1),
1519
expected_data_list = [
1520
# version, options, length, parents
1521
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1523
for version_id, parents, lines in test_data:
1524
k1.add_lines(version_id, parents, split_lines(lines))
1526
format, data_list, reader_callable = k1.get_data_stream(['text-m'])
1527
self.assertEqual('knit-plain', format)
1528
self.assertEqual(expected_data_list, data_list)
1529
self.assertRecordContentEqual(k1, 'text-m', reader_callable(None))
1531
def test_get_stream_ghost_parent(self):
1532
"""Get a data stream for a version with a ghost parent."""
1533
k1 = self.make_test_knit()
1535
k1.add_lines('text-a', [], split_lines(TEXT_1))
1536
k1.add_lines_with_ghosts('text-b', ['text-a', 'text-ghost'],
1537
split_lines(TEXT_1))
1539
expected_data_list = [
1540
# version, options, length, parents
1541
('text-b', ['line-delta'], 84, ['text-a', 'text-ghost']),
1544
format, data_list, reader_callable = k1.get_data_stream(['text-b'])
1545
self.assertEqual('knit-plain', format)
1546
self.assertEqual(expected_data_list, data_list)
1547
self.assertRecordContentEqual(k1, 'text-b', reader_callable(None))
1549
def test_get_stream_get_multiple_records(self):
1550
"""Get a stream for multiple records of a knit."""
1551
k1 = self.make_test_knit()
1552
# Insert the same data as test_knit_join, as they seem to cover a range
1553
# of cases (no parents, one parent, multiple parents).
1555
('text-a', [], TEXT_1),
1556
('text-b', ['text-a'], TEXT_1),
1557
('text-c', [], TEXT_1),
1558
('text-d', ['text-c'], TEXT_1),
1559
('text-m', ['text-b', 'text-d'], TEXT_1),
1561
expected_data_list = [
1562
# version, options, length, parents
1563
('text-b', ['line-delta'], 84, ['text-a']),
1564
('text-d', ['line-delta'], 84, ['text-c']),
1566
for version_id, parents, lines in test_data:
1567
k1.add_lines(version_id, parents, split_lines(lines))
1569
# Note that even though we request the revision IDs in a particular
1570
# order, the data stream may return them in any order it likes. In this
1571
# case, they'll be in the order they were inserted into the knit.
1572
format, data_list, reader_callable = k1.get_data_stream(
1573
['text-d', 'text-b'])
1574
self.assertEqual('knit-plain', format)
1575
self.assertEqual(expected_data_list, data_list)
1576
self.assertRecordContentEqual(k1, 'text-b', reader_callable(84))
1577
self.assertRecordContentEqual(k1, 'text-d', reader_callable(84))
1578
self.assertEqual('', reader_callable(None),
1579
"There should be no more bytes left to read.")
1581
def test_get_stream_all(self):
1582
"""Get a data stream for all the records in a knit.
1584
This exercises fulltext records, line-delta records, records with
1585
various numbers of parents, and reading multiple records out of the
1586
callable. These cases ought to all be exercised individually by the
1587
other test_get_stream_* tests; this test is basically just paranoia.
1589
k1 = self.make_test_knit()
1590
# Insert the same data as test_knit_join, as they seem to cover a range
1591
# of cases (no parents, one parent, multiple parents).
1593
('text-a', [], TEXT_1),
1594
('text-b', ['text-a'], TEXT_1),
1595
('text-c', [], TEXT_1),
1596
('text-d', ['text-c'], TEXT_1),
1597
('text-m', ['text-b', 'text-d'], TEXT_1),
1599
expected_data_list = [
1600
# version, options, length, parents
1601
('text-a', ['fulltext'], 122, []),
1602
('text-b', ['line-delta'], 84, ['text-a']),
1603
('text-c', ['fulltext'], 121, []),
1604
('text-d', ['line-delta'], 84, ['text-c']),
1605
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1607
for version_id, parents, lines in test_data:
1608
k1.add_lines(version_id, parents, split_lines(lines))
1610
format, data_list, reader_callable = k1.get_data_stream(
1611
['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
1612
self.assertEqual('knit-plain', format)
1613
self.assertEqual(expected_data_list, data_list)
1614
for version_id, options, length, parents in expected_data_list:
1615
bytes = reader_callable(length)
1616
self.assertRecordContentEqual(k1, version_id, bytes)
1618
def assertKnitFilesEqual(self, knit1, knit2):
1619
"""Assert that the contents of the index and data files of two knits are
1623
knit1.transport.get_bytes(knit1._data._access._filename),
1624
knit2.transport.get_bytes(knit2._data._access._filename))
1626
knit1.transport.get_bytes(knit1._index._filename),
1627
knit2.transport.get_bytes(knit2._index._filename))
1629
def test_insert_data_stream_empty(self):
1630
"""Inserting a data stream with no records should not put any data into
1633
k1 = self.make_test_knit()
1634
k1.insert_data_stream(
1635
(k1.get_format_signature(), [], lambda ignored: ''))
1636
self.assertEqual('', k1.transport.get_bytes(k1._data._access._filename),
1637
"The .knit should be completely empty.")
1638
self.assertEqual(k1._index.HEADER,
1639
k1.transport.get_bytes(k1._index._filename),
1640
"The .kndx should have nothing apart from the header.")
1642
def test_insert_data_stream_one_record(self):
1643
"""Inserting a data stream with one record from a knit with one record
1644
results in byte-identical files.
1646
source = self.make_test_knit(name='source')
1647
source.add_lines('text-a', [], split_lines(TEXT_1))
1648
data_stream = source.get_data_stream(['text-a'])
1650
target = self.make_test_knit(name='target')
1651
target.insert_data_stream(data_stream)
1653
self.assertKnitFilesEqual(source, target)
1655
def test_insert_data_stream_records_already_present(self):
1656
"""Insert a data stream where some records are alreday present in the
1657
target, and some not. Only the new records are inserted.
1659
source = self.make_test_knit(name='source')
1660
target = self.make_test_knit(name='target')
1661
# Insert 'text-a' into both source and target
1662
source.add_lines('text-a', [], split_lines(TEXT_1))
1663
target.insert_data_stream(source.get_data_stream(['text-a']))
1664
# Insert 'text-b' into just the source.
1665
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1666
# Get a data stream of both text-a and text-b, and insert it.
1667
data_stream = source.get_data_stream(['text-a', 'text-b'])
1668
target.insert_data_stream(data_stream)
1669
# The source and target will now be identical. This means the text-a
1670
# record was not added a second time.
1671
self.assertKnitFilesEqual(source, target)
1673
def test_insert_data_stream_multiple_records(self):
1674
"""Inserting a data stream of all records from a knit with multiple
1675
records results in byte-identical files.
1677
source = self.make_test_knit(name='source')
1678
source.add_lines('text-a', [], split_lines(TEXT_1))
1679
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1680
source.add_lines('text-c', [], split_lines(TEXT_1))
1681
data_stream = source.get_data_stream(['text-a', 'text-b', 'text-c'])
1683
target = self.make_test_knit(name='target')
1684
target.insert_data_stream(data_stream)
1686
self.assertKnitFilesEqual(source, target)
1688
def test_insert_data_stream_ghost_parent(self):
1689
"""Insert a data stream with a record that has a ghost parent."""
1690
# Make a knit with a record, text-a, that has a ghost parent.
1691
source = self.make_test_knit(name='source')
1692
source.add_lines_with_ghosts('text-a', ['text-ghost'],
1693
split_lines(TEXT_1))
1694
data_stream = source.get_data_stream(['text-a'])
1696
target = self.make_test_knit(name='target')
1697
target.insert_data_stream(data_stream)
1699
self.assertKnitFilesEqual(source, target)
1701
# The target knit object is in a consistent state, i.e. the record we
1702
# just added is immediately visible.
1703
self.assertTrue(target.has_version('text-a'))
1704
self.assertTrue(target.has_ghost('text-ghost'))
1705
self.assertEqual(split_lines(TEXT_1), target.get_lines('text-a'))
1707
def test_insert_data_stream_inconsistent_version_lines(self):
1708
"""Inserting a data stream which has different content for a version_id
1709
than already exists in the knit will raise KnitCorrupt.
1711
source = self.make_test_knit(name='source')
1712
target = self.make_test_knit(name='target')
1713
# Insert a different 'text-a' into both source and target
1714
source.add_lines('text-a', [], split_lines(TEXT_1))
1715
target.add_lines('text-a', [], split_lines(TEXT_2))
1716
# Insert a data stream with conflicting content into the target
1717
data_stream = source.get_data_stream(['text-a'])
1719
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1721
def test_insert_data_stream_inconsistent_version_parents(self):
1722
"""Inserting a data stream which has different parents for a version_id
1723
than already exists in the knit will raise KnitCorrupt.
1725
source = self.make_test_knit(name='source')
1726
target = self.make_test_knit(name='target')
1727
# Insert a different 'text-a' into both source and target. They differ
1728
# only by the parents list, the content is the same.
1729
source.add_lines_with_ghosts('text-a', [], split_lines(TEXT_1))
1730
target.add_lines_with_ghosts('text-a', ['a-ghost'], split_lines(TEXT_1))
1731
# Insert a data stream with conflicting content into the target
1732
data_stream = source.get_data_stream(['text-a'])
1734
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1736
def test_insert_data_stream_incompatible_format(self):
1737
"""A data stream in a different format to the target knit cannot be
1740
It will raise KnitDataStreamIncompatible.
1742
data_stream = ('fake-format-signature', [], lambda _: '')
1743
target = self.make_test_knit(name='target')
1745
errors.KnitDataStreamIncompatible,
1746
target.insert_data_stream, data_stream)
1748
# * test that a stream of "already present version, then new version"
1749
# inserts correctly.
1760
Banana cup cake recipe
1766
- self-raising flour
1770
Banana cup cake recipe
1772
- bananas (do not use plantains!!!)
1779
Banana cup cake recipe
1782
- self-raising flour
1795
AB_MERGE_TEXT="""unchanged|Banana cup cake recipe
1800
new-b|- bananas (do not use plantains!!!)
1801
unchanged|- broken tea cups
1802
new-a|- self-raising flour
1805
AB_MERGE=[tuple(l.split('|')) for l in AB_MERGE_TEXT.splitlines(True)]
1808
def line_delta(from_lines, to_lines):
1809
"""Generate line-based delta from one text to another"""
1810
s = difflib.SequenceMatcher(None, from_lines, to_lines)
1811
for op in s.get_opcodes():
1812
if op[0] == 'equal':
1814
yield '%d,%d,%d\n' % (op[1], op[2], op[4]-op[3])
1815
for i in range(op[3], op[4]):
1819
def apply_line_delta(basis_lines, delta_lines):
1820
"""Apply a line-based perfect diff
1822
basis_lines -- text to apply the patch to
1823
delta_lines -- diff instructions and content
1825
out = basis_lines[:]
1828
while i < len(delta_lines):
1830
a, b, c = map(long, l.split(','))
1832
out[offset+a:offset+b] = delta_lines[i:i+c]
1834
offset = offset + (b - a) + c
1838
class TestWeaveToKnit(KnitTests):
1840
def test_weave_to_knit_matches(self):
1841
# check that the WeaveToKnit is_compatible function
1842
# registers True for a Weave to a Knit.
1844
k = self.make_test_knit()
1845
self.failUnless(WeaveToKnit.is_compatible(w, k))
1846
self.failIf(WeaveToKnit.is_compatible(k, w))
1847
self.failIf(WeaveToKnit.is_compatible(w, w))
1848
self.failIf(WeaveToKnit.is_compatible(k, k))
1851
class TestKnitCaching(KnitTests):
1853
def create_knit(self, cache_add=False):
1854
k = self.make_test_knit(True)
1858
k.add_lines('text-1', [], split_lines(TEXT_1))
1859
k.add_lines('text-2', [], split_lines(TEXT_2))
1862
def test_no_caching(self):
1863
k = self.create_knit()
1864
# Nothing should be cached without setting 'enable_cache'
1865
self.assertEqual({}, k._data._cache)
1867
def test_cache_add_and_clear(self):
1868
k = self.create_knit(True)
1870
self.assertEqual(['text-1', 'text-2'], sorted(k._data._cache.keys()))
1873
self.assertEqual({}, k._data._cache)
1875
def test_cache_data_read_raw(self):
1876
k = self.create_knit()
1878
# Now cache and read
1881
def read_one_raw(version):
1882
pos_map = k._get_components_positions([version])
1883
method, index_memo, next = pos_map[version]
1884
lst = list(k._data.read_records_iter_raw([(version, index_memo)]))
1885
self.assertEqual(1, len(lst))
1888
val = read_one_raw('text-1')
1889
self.assertEqual({'text-1':val[1]}, k._data._cache)
1892
# After clear, new reads are not cached
1893
self.assertEqual({}, k._data._cache)
1895
val2 = read_one_raw('text-1')
1896
self.assertEqual(val, val2)
1897
self.assertEqual({}, k._data._cache)
1899
def test_cache_data_read(self):
1900
k = self.create_knit()
1902
def read_one(version):
1903
pos_map = k._get_components_positions([version])
1904
method, index_memo, next = pos_map[version]
1905
lst = list(k._data.read_records_iter([(version, index_memo)]))
1906
self.assertEqual(1, len(lst))
1909
# Now cache and read
1912
val = read_one('text-2')
1913
self.assertEqual(['text-2'], k._data._cache.keys())
1914
self.assertEqual('text-2', val[0])
1915
content, digest = k._data._parse_record('text-2',
1916
k._data._cache['text-2'])
1917
self.assertEqual(content, val[1])
1918
self.assertEqual(digest, val[2])
1921
self.assertEqual({}, k._data._cache)
1923
val2 = read_one('text-2')
1924
self.assertEqual(val, val2)
1925
self.assertEqual({}, k._data._cache)
1927
def test_cache_read(self):
1928
k = self.create_knit()
1931
text = k.get_text('text-1')
1932
self.assertEqual(TEXT_1, text)
1933
self.assertEqual(['text-1'], k._data._cache.keys())
1936
self.assertEqual({}, k._data._cache)
1938
text = k.get_text('text-1')
1939
self.assertEqual(TEXT_1, text)
1940
self.assertEqual({}, k._data._cache)
1465
def make_test_knit(self, annotate=False, name='test'):
1466
mapper = ConstantMapper(name)
1467
return make_file_factory(annotate, mapper)(self.get_transport())
1470
class TestBadShaError(KnitTests):
1471
"""Tests for handling of sha errors."""
1473
def test_sha_exception_has_text(self):
1474
# having the failed text included in the error allows for recovery.
1475
source = self.make_test_knit()
1476
target = self.make_test_knit(name="target")
1477
if not source._max_delta_chain:
1478
raise TestNotApplicable(
1479
"cannot get delta-caused sha failures without deltas.")
1482
broken = ('broken',)
1483
source.add_lines(basis, (), ['foo\n'])
1484
source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1485
# Seed target with a bad basis text
1486
target.add_lines(basis, (), ['gam\n'])
1487
target.insert_record_stream(
1488
source.get_record_stream([broken], 'unordered', False))
1489
err = self.assertRaises(errors.KnitCorrupt,
1490
target.get_record_stream([broken], 'unordered', True
1491
).next().get_bytes_as, 'chunked')
1492
self.assertEqual(['gam\n', 'bar\n'], err.content)
1493
# Test for formatting with live data
1494
self.assertStartsWith(str(err), "Knit ")
1943
1497
class TestKnitIndex(KnitTests):
2062
1625
add_callback = self.catch_add
2064
1627
add_callback = None
2065
return KnitGraphIndex(combined_index, deltas=deltas,
1628
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
2066
1629
add_callback=add_callback)
2068
def test_get_graph(self):
2069
index = self.two_graph_index()
2070
self.assertEqual(set([
2071
('tip', ('parent', )),
2073
('parent', ('tail', 'ghost')),
2075
]), set(index.get_graph()))
2077
def test_get_ancestry(self):
2078
# get_ancestry is defined as eliding ghosts, not erroring.
2079
index = self.two_graph_index()
2080
self.assertEqual([], index.get_ancestry([]))
2081
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2082
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2083
self.assertEqual(['tail', 'parent'], index.get_ancestry(['parent']))
2084
self.assertEqual(['tail', 'parent', 'tip'], index.get_ancestry(['tip']))
2085
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2086
(['tail', 'parent', 'tip', 'separate'],
2087
['separate', 'tail', 'parent', 'tip'],
2089
# and without topo_sort
2090
self.assertEqual(set(['separate']),
2091
set(index.get_ancestry(['separate'], topo_sorted=False)))
2092
self.assertEqual(set(['tail']),
2093
set(index.get_ancestry(['tail'], topo_sorted=False)))
2094
self.assertEqual(set(['tail', 'parent']),
2095
set(index.get_ancestry(['parent'], topo_sorted=False)))
2096
self.assertEqual(set(['tail', 'parent', 'tip']),
2097
set(index.get_ancestry(['tip'], topo_sorted=False)))
2098
self.assertEqual(set(['separate', 'tail', 'parent', 'tip']),
2099
set(index.get_ancestry(['tip', 'separate'])))
2100
# asking for a ghost makes it go boom.
2101
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2103
def test_get_ancestry_with_ghosts(self):
2104
index = self.two_graph_index()
2105
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2106
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2107
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2108
self.assertTrue(index.get_ancestry_with_ghosts(['parent']) in
2109
(['tail', 'ghost', 'parent'],
2110
['ghost', 'tail', 'parent'],
2112
self.assertTrue(index.get_ancestry_with_ghosts(['tip']) in
2113
(['tail', 'ghost', 'parent', 'tip'],
2114
['ghost', 'tail', 'parent', 'tip'],
2116
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2117
(['tail', 'ghost', 'parent', 'tip', 'separate'],
2118
['ghost', 'tail', 'parent', 'tip', 'separate'],
2119
['separate', 'tail', 'ghost', 'parent', 'tip'],
2120
['separate', 'ghost', 'tail', 'parent', 'tip'],
2122
# asking for a ghost makes it go boom.
2123
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2125
def test_num_versions(self):
2126
index = self.two_graph_index()
2127
self.assertEqual(4, index.num_versions())
2129
def test_get_versions(self):
2130
index = self.two_graph_index()
2131
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2132
set(index.get_versions()))
2134
def test_has_version(self):
2135
index = self.two_graph_index()
2136
self.assertTrue(index.has_version('tail'))
2137
self.assertFalse(index.has_version('ghost'))
1631
def test_keys(self):
1632
index = self.two_graph_index()
1633
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2139
1636
def test_get_position(self):
2140
1637
index = self.two_graph_index()
2141
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2142
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1638
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1639
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
2144
1641
def test_get_method_deltas(self):
2145
1642
index = self.two_graph_index(deltas=True)
2146
self.assertEqual('fulltext', index.get_method('tip'))
2147
self.assertEqual('line-delta', index.get_method('parent'))
1643
self.assertEqual('fulltext', index.get_method(('tip',)))
1644
self.assertEqual('line-delta', index.get_method(('parent',)))
2149
1646
def test_get_method_no_deltas(self):
2150
1647
# check that the parent-history lookup is ignored with deltas=False.
2151
1648
index = self.two_graph_index(deltas=False)
2152
self.assertEqual('fulltext', index.get_method('tip'))
2153
self.assertEqual('fulltext', index.get_method('parent'))
1649
self.assertEqual('fulltext', index.get_method(('tip',)))
1650
self.assertEqual('fulltext', index.get_method(('parent',)))
2155
1652
def test_get_options_deltas(self):
2156
1653
index = self.two_graph_index(deltas=True)
2157
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2158
self.assertEqual(['line-delta'], index.get_options('parent'))
1654
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1655
self.assertEqual(['line-delta'], index.get_options(('parent',)))
2160
1657
def test_get_options_no_deltas(self):
2161
1658
# check that the parent-history lookup is ignored with deltas=False.
2162
1659
index = self.two_graph_index(deltas=False)
2163
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2164
self.assertEqual(['fulltext'], index.get_options('parent'))
2166
def test_get_parents(self):
2167
# get_parents ignores ghosts
2168
index = self.two_graph_index()
2169
self.assertEqual(('tail', ), index.get_parents('parent'))
2170
# and errors on ghosts.
2171
self.assertRaises(errors.RevisionNotPresent,
2172
index.get_parents, 'ghost')
2174
def test_get_parents_with_ghosts(self):
2175
index = self.two_graph_index()
2176
self.assertEqual(('tail', 'ghost'), index.get_parents_with_ghosts('parent'))
2177
# and errors on ghosts.
2178
self.assertRaises(errors.RevisionNotPresent,
2179
index.get_parents_with_ghosts, 'ghost')
2181
def test_check_versions_present(self):
2182
# ghosts should not be considered present
2183
index = self.two_graph_index()
2184
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2186
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2188
index.check_versions_present(['tail', 'separate'])
1660
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1661
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1663
def test_get_parent_map(self):
1664
index = self.two_graph_index()
1665
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1666
index.get_parent_map([('parent',), ('ghost',)]))
2190
1668
def catch_add(self, entries):
2191
1669
self.caught_entries.append(entries)
2193
1671
def test_add_no_callback_errors(self):
2194
1672
index = self.two_graph_index()
2195
self.assertRaises(errors.ReadOnlyError, index.add_version,
2196
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1673
self.assertRaises(errors.ReadOnlyError, index.add_records,
1674
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
2198
1676
def test_add_version_smoke(self):
2199
1677
index = self.two_graph_index(catch_adds=True)
2200
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1678
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
2201
1680
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
2202
1681
self.caught_entries)
2204
1683
def test_add_version_delta_not_delta_index(self):
2205
1684
index = self.two_graph_index(catch_adds=True)
2206
self.assertRaises(errors.KnitCorrupt, index.add_version,
2207
'new', 'no-eol,line-delta', (None, 0, 100), ['parent'])
1685
self.assertRaises(errors.KnitCorrupt, index.add_records,
1686
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2208
1687
self.assertEqual([], self.caught_entries)
2210
1689
def test_add_version_same_dup(self):
2211
1690
index = self.two_graph_index(catch_adds=True)
2212
1691
# options can be spelt two different ways
2213
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2214
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])
2215
# but neither should have added data.
2216
self.assertEqual([[], []], self.caught_entries)
1692
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1693
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1694
# position/length are ignored (because each pack could have fulltext or
1695
# delta, and be at a different position.
1696
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1698
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1700
# but neither should have added data:
1701
self.assertEqual([[], [], [], []], self.caught_entries)
2218
1703
def test_add_version_different_dup(self):
2219
1704
index = self.two_graph_index(deltas=True, catch_adds=True)
2220
1705
# change options
2221
self.assertRaises(errors.KnitCorrupt, index.add_version,
2222
'tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2223
self.assertRaises(errors.KnitCorrupt, index.add_version,
2224
'tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])
2225
self.assertRaises(errors.KnitCorrupt, index.add_version,
2226
'tip', 'fulltext', (None, 0, 100), ['parent'])
2228
self.assertRaises(errors.KnitCorrupt, index.add_version,
2229
'tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])
2230
self.assertRaises(errors.KnitCorrupt, index.add_version,
2231
'tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])
1706
self.assertRaises(errors.KnitCorrupt, index.add_records,
1707
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1708
self.assertRaises(errors.KnitCorrupt, index.add_records,
1709
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2233
self.assertRaises(errors.KnitCorrupt, index.add_version,
2234
'tip', 'fulltext,no-eol', (None, 0, 100), [])
1711
self.assertRaises(errors.KnitCorrupt, index.add_records,
1712
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2235
1713
self.assertEqual([], self.caught_entries)
2237
1715
def test_add_versions_nodeltas(self):
2238
1716
index = self.two_graph_index(catch_adds=True)
2239
index.add_versions([
2240
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2241
('new2', 'fulltext', (None, 0, 6), ['new']),
1718
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1719
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
2243
1721
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
2244
1722
(('new2', ), ' 0 6', ((('new',),),))],
2259
1737
def test_add_versions_delta_not_delta_index(self):
2260
1738
index = self.two_graph_index(catch_adds=True)
2261
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2262
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1739
self.assertRaises(errors.KnitCorrupt, index.add_records,
1740
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2263
1741
self.assertEqual([], self.caught_entries)
1743
def test_add_versions_random_id_accepted(self):
1744
index = self.two_graph_index(catch_adds=True)
1745
index.add_records([], random_id=True)
2265
1747
def test_add_versions_same_dup(self):
2266
1748
index = self.two_graph_index(catch_adds=True)
2267
1749
# options can be spelt two different ways
2268
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2269
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
1750
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1752
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1754
# position/length are ignored (because each pack could have fulltext or
1755
# delta, and be at a different position.
1756
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1758
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
2270
1760
# but neither should have added data.
2271
self.assertEqual([[], []], self.caught_entries)
1761
self.assertEqual([[], [], [], []], self.caught_entries)
2273
1763
def test_add_versions_different_dup(self):
2274
1764
index = self.two_graph_index(deltas=True, catch_adds=True)
2275
1765
# change options
2276
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2277
[('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2278
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2279
[('tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])])
2280
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2281
[('tip', 'fulltext', (None, 0, 100), ['parent'])])
2283
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2284
[('tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])])
2285
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2286
[('tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])])
1766
self.assertRaises(errors.KnitCorrupt, index.add_records,
1767
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1768
self.assertRaises(errors.KnitCorrupt, index.add_records,
1769
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2288
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2289
[('tip', 'fulltext,no-eol', (None, 0, 100), [])])
1771
self.assertRaises(errors.KnitCorrupt, index.add_records,
1772
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2290
1773
# change options in the second record
2291
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2292
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent']),
2293
('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1774
self.assertRaises(errors.KnitCorrupt, index.add_records,
1775
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1776
(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
2294
1777
self.assertEqual([], self.caught_entries)
2296
def test_iter_parents(self):
2297
index1 = self.make_g_index('1', 1, [
2299
(('r0', ), 'N0 100', ([], )),
2301
(('r1', ), '', ([('r0', )], ))])
2302
index2 = self.make_g_index('2', 1, [
2304
(('r2', ), 'N0 100', ([('r1', ), ('r0', )], )),
2306
combined_index = CombinedGraphIndex([index1, index2])
2307
index = KnitGraphIndex(combined_index)
2309
# cases: each sample data individually:
2310
self.assertEqual(set([('r0', ())]),
2311
set(index.iter_parents(['r0'])))
2312
self.assertEqual(set([('r1', ('r0', ))]),
2313
set(index.iter_parents(['r1'])))
2314
self.assertEqual(set([('r2', ('r1', 'r0'))]),
2315
set(index.iter_parents(['r2'])))
2316
# no nodes returned for a missing node
2317
self.assertEqual(set(),
2318
set(index.iter_parents(['missing'])))
2319
# 1 node returned with missing nodes skipped
2320
self.assertEqual(set([('r1', ('r0', ))]),
2321
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
2323
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2324
set(index.iter_parents(['r0', 'r1'])))
2325
# 2 nodes returned, missing skipped
2326
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2327
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
1779
def make_g_index_missing_compression_parent(self):
1780
graph_index = self.make_g_index('missing_comp', 2,
1781
[(('tip', ), ' 100 78',
1782
([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1785
def make_g_index_missing_parent(self):
1786
graph_index = self.make_g_index('missing_parent', 2,
1787
[(('parent', ), ' 100 78', ([], [])),
1788
(('tip', ), ' 100 78',
1789
([('parent', ), ('missing-parent', )], [('parent', )])),
1793
def make_g_index_no_external_refs(self):
1794
graph_index = self.make_g_index('no_external_refs', 2,
1795
[(('rev', ), ' 100 78',
1796
([('parent', ), ('ghost', )], []))])
1799
def test_add_good_unvalidated_index(self):
1800
unvalidated = self.make_g_index_no_external_refs()
1801
combined = CombinedGraphIndex([unvalidated])
1802
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1803
index.scan_unvalidated_index(unvalidated)
1804
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1806
def test_add_missing_compression_parent_unvalidated_index(self):
1807
unvalidated = self.make_g_index_missing_compression_parent()
1808
combined = CombinedGraphIndex([unvalidated])
1809
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1810
index.scan_unvalidated_index(unvalidated)
1811
# This also checks that its only the compression parent that is
1812
# examined, otherwise 'ghost' would also be reported as a missing
1815
frozenset([('missing-parent',)]),
1816
index.get_missing_compression_parents())
1818
def test_add_missing_noncompression_parent_unvalidated_index(self):
1819
unvalidated = self.make_g_index_missing_parent()
1820
combined = CombinedGraphIndex([unvalidated])
1821
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1822
track_external_parent_refs=True)
1823
index.scan_unvalidated_index(unvalidated)
1825
frozenset([('missing-parent',)]), index.get_missing_parents())
1827
def test_track_external_parent_refs(self):
1828
g_index = self.make_g_index('empty', 2, [])
1829
combined = CombinedGraphIndex([g_index])
1830
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1831
add_callback=self.catch_add, track_external_parent_refs=True)
1832
self.caught_entries = []
1834
(('new-key',), 'fulltext,no-eol', (None, 50, 60),
1835
[('parent-1',), ('parent-2',)])])
1837
frozenset([('parent-1',), ('parent-2',)]),
1838
index.get_missing_parents())
1840
def test_add_unvalidated_index_with_present_external_references(self):
1841
index = self.two_graph_index(deltas=True)
1842
# Ugly hack to get at one of the underlying GraphIndex objects that
1843
# two_graph_index built.
1844
unvalidated = index._graph_index._indices[1]
1845
# 'parent' is an external ref of _indices[1] (unvalidated), but is
1846
# present in _indices[0].
1847
index.scan_unvalidated_index(unvalidated)
1848
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1850
def make_new_missing_parent_g_index(self, name):
1851
missing_parent = name + '-missing-parent'
1852
graph_index = self.make_g_index(name, 2,
1853
[((name + 'tip', ), ' 100 78',
1854
([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1857
def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1858
g_index_1 = self.make_new_missing_parent_g_index('one')
1859
g_index_2 = self.make_new_missing_parent_g_index('two')
1860
combined = CombinedGraphIndex([g_index_1, g_index_2])
1861
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1862
index.scan_unvalidated_index(g_index_1)
1863
index.scan_unvalidated_index(g_index_2)
1865
frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1866
index.get_missing_compression_parents())
1868
def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1869
graph_index_a = self.make_g_index('one', 2,
1870
[(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
1871
(('child-of-two', ), ' 100 78',
1872
([('parent-two',)], [('parent-two',)]))])
1873
graph_index_b = self.make_g_index('two', 2,
1874
[(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
1875
(('child-of-one', ), ' 100 78',
1876
([('parent-one',)], [('parent-one',)]))])
1877
combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1878
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1879
index.scan_unvalidated_index(graph_index_a)
1880
index.scan_unvalidated_index(graph_index_b)
1882
frozenset([]), index.get_missing_compression_parents())
2330
1885
class TestNoParentsGraphIndexKnit(KnitTests):
2331
"""Tests for knits using KnitGraphIndex with no parents."""
1886
"""Tests for knits using _KnitGraphIndex with no parents."""
2333
1888
def make_g_index(self, name, ref_lists=0, nodes=[]):
2334
1889
builder = GraphIndexBuilder(ref_lists)
2364
1927
add_callback = self.catch_add
2366
1929
add_callback = None
2367
return KnitGraphIndex(combined_index, parents=False,
1930
return _KnitGraphIndex(combined_index, lambda:True, parents=False,
2368
1931
add_callback=add_callback)
2370
def test_get_graph(self):
2371
index = self.two_graph_index()
2372
self.assertEqual(set([
2377
]), set(index.get_graph()))
2379
def test_get_ancestry(self):
2380
# with no parents, ancestry is always just the key.
2381
index = self.two_graph_index()
2382
self.assertEqual([], index.get_ancestry([]))
2383
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2384
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2385
self.assertEqual(['parent'], index.get_ancestry(['parent']))
2386
self.assertEqual(['tip'], index.get_ancestry(['tip']))
2387
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2388
(['tip', 'separate'],
2389
['separate', 'tip'],
2391
# asking for a ghost makes it go boom.
2392
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2394
def test_get_ancestry_with_ghosts(self):
2395
index = self.two_graph_index()
2396
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2397
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2398
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2399
self.assertEqual(['parent'], index.get_ancestry_with_ghosts(['parent']))
2400
self.assertEqual(['tip'], index.get_ancestry_with_ghosts(['tip']))
2401
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2402
(['tip', 'separate'],
2403
['separate', 'tip'],
2405
# asking for a ghost makes it go boom.
2406
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2408
def test_num_versions(self):
2409
index = self.two_graph_index()
2410
self.assertEqual(4, index.num_versions())
2412
def test_get_versions(self):
2413
index = self.two_graph_index()
2414
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2415
set(index.get_versions()))
2417
def test_has_version(self):
2418
index = self.two_graph_index()
2419
self.assertTrue(index.has_version('tail'))
2420
self.assertFalse(index.has_version('ghost'))
1933
def test_keys(self):
1934
index = self.two_graph_index()
1935
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2422
1938
def test_get_position(self):
2423
1939
index = self.two_graph_index()
2424
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2425
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1940
self.assertEqual((index._graph_index._indices[0], 0, 100),
1941
index.get_position(('tip',)))
1942
self.assertEqual((index._graph_index._indices[1], 100, 78),
1943
index.get_position(('parent',)))
2427
1945
def test_get_method(self):
2428
1946
index = self.two_graph_index()
2429
self.assertEqual('fulltext', index.get_method('tip'))
2430
self.assertEqual(['fulltext'], index.get_options('parent'))
1947
self.assertEqual('fulltext', index.get_method(('tip',)))
1948
self.assertEqual(['fulltext'], index.get_options(('parent',)))
2432
1950
def test_get_options(self):
2433
1951
index = self.two_graph_index()
2434
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2435
self.assertEqual(['fulltext'], index.get_options('parent'))
2437
def test_get_parents(self):
2438
index = self.two_graph_index()
2439
self.assertEqual((), index.get_parents('parent'))
2440
# and errors on ghosts.
2441
self.assertRaises(errors.RevisionNotPresent,
2442
index.get_parents, 'ghost')
2444
def test_get_parents_with_ghosts(self):
2445
index = self.two_graph_index()
2446
self.assertEqual((), index.get_parents_with_ghosts('parent'))
2447
# and errors on ghosts.
2448
self.assertRaises(errors.RevisionNotPresent,
2449
index.get_parents_with_ghosts, 'ghost')
2451
def test_check_versions_present(self):
2452
index = self.two_graph_index()
2453
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2455
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2456
['tail', 'missing'])
2457
index.check_versions_present(['tail', 'separate'])
1952
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1953
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1955
def test_get_parent_map(self):
1956
index = self.two_graph_index()
1957
self.assertEqual({('parent',):None},
1958
index.get_parent_map([('parent',), ('ghost',)]))
2459
1960
def catch_add(self, entries):
2460
1961
self.caught_entries.append(entries)
2462
1963
def test_add_no_callback_errors(self):
2463
1964
index = self.two_graph_index()
2464
self.assertRaises(errors.ReadOnlyError, index.add_version,
2465
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1965
self.assertRaises(errors.ReadOnlyError, index.add_records,
1966
[(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
2467
1968
def test_add_version_smoke(self):
2468
1969
index = self.two_graph_index(catch_adds=True)
2469
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), [])
1970
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
2470
1971
self.assertEqual([[(('new', ), 'N50 60')]],
2471
1972
self.caught_entries)
2473
1974
def test_add_version_delta_not_delta_index(self):
2474
1975
index = self.two_graph_index(catch_adds=True)
2475
self.assertRaises(errors.KnitCorrupt, index.add_version,
2476
'new', 'no-eol,line-delta', (None, 0, 100), [])
1976
self.assertRaises(errors.KnitCorrupt, index.add_records,
1977
[(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
2477
1978
self.assertEqual([], self.caught_entries)
2479
1980
def test_add_version_same_dup(self):
2480
1981
index = self.two_graph_index(catch_adds=True)
2481
1982
# options can be spelt two different ways
2482
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), [])
2483
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), [])
1983
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1984
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
1985
# position/length are ignored (because each pack could have fulltext or
1986
# delta, and be at a different position.
1987
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
1988
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2484
1989
# but neither should have added data.
2485
self.assertEqual([[], []], self.caught_entries)
1990
self.assertEqual([[], [], [], []], self.caught_entries)
2487
1992
def test_add_version_different_dup(self):
2488
1993
index = self.two_graph_index(catch_adds=True)
2489
1994
# change options
2490
self.assertRaises(errors.KnitCorrupt, index.add_version,
2491
'tip', 'no-eol,line-delta', (None, 0, 100), [])
2492
self.assertRaises(errors.KnitCorrupt, index.add_version,
2493
'tip', 'line-delta,no-eol', (None, 0, 100), [])
2494
self.assertRaises(errors.KnitCorrupt, index.add_version,
2495
'tip', 'fulltext', (None, 0, 100), [])
2497
self.assertRaises(errors.KnitCorrupt, index.add_version,
2498
'tip', 'fulltext,no-eol', (None, 50, 100), [])
2499
self.assertRaises(errors.KnitCorrupt, index.add_version,
2500
'tip', 'fulltext,no-eol', (None, 0, 1000), [])
1995
self.assertRaises(errors.KnitCorrupt, index.add_records,
1996
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
1997
self.assertRaises(errors.KnitCorrupt, index.add_records,
1998
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
1999
self.assertRaises(errors.KnitCorrupt, index.add_records,
2000
[(('tip',), 'fulltext', (None, 0, 100), [])])
2502
self.assertRaises(errors.KnitCorrupt, index.add_version,
2503
'tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2002
self.assertRaises(errors.KnitCorrupt, index.add_records,
2003
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2504
2004
self.assertEqual([], self.caught_entries)
2506
2006
def test_add_versions(self):
2507
2007
index = self.two_graph_index(catch_adds=True)
2508
index.add_versions([
2509
('new', 'fulltext,no-eol', (None, 50, 60), []),
2510
('new2', 'fulltext', (None, 0, 6), []),
2009
(('new',), 'fulltext,no-eol', (None, 50, 60), []),
2010
(('new2',), 'fulltext', (None, 0, 6), []),
2512
2012
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
2513
2013
sorted(self.caught_entries[0]))
2516
2016
def test_add_versions_delta_not_delta_index(self):
2517
2017
index = self.two_graph_index(catch_adds=True)
2518
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2519
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2018
self.assertRaises(errors.KnitCorrupt, index.add_records,
2019
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2520
2020
self.assertEqual([], self.caught_entries)
2522
2022
def test_add_versions_parents_not_parents_index(self):
2523
2023
index = self.two_graph_index(catch_adds=True)
2524
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2525
[('new', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2024
self.assertRaises(errors.KnitCorrupt, index.add_records,
2025
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2526
2026
self.assertEqual([], self.caught_entries)
2028
def test_add_versions_random_id_accepted(self):
2029
index = self.two_graph_index(catch_adds=True)
2030
index.add_records([], random_id=True)
2528
2032
def test_add_versions_same_dup(self):
2529
2033
index = self.two_graph_index(catch_adds=True)
2530
2034
# options can be spelt two different ways
2531
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2532
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), [])])
2035
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2036
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2037
# position/length are ignored (because each pack could have fulltext or
2038
# delta, and be at a different position.
2039
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2040
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2533
2041
# but neither should have added data.
2534
self.assertEqual([[], []], self.caught_entries)
2042
self.assertEqual([[], [], [], []], self.caught_entries)
2536
2044
def test_add_versions_different_dup(self):
2537
2045
index = self.two_graph_index(catch_adds=True)
2538
2046
# change options
2539
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2540
[('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2541
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2542
[('tip', 'line-delta,no-eol', (None, 0, 100), [])])
2543
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2544
[('tip', 'fulltext', (None, 0, 100), [])])
2546
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2547
[('tip', 'fulltext,no-eol', (None, 50, 100), [])])
2548
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2549
[('tip', 'fulltext,no-eol', (None, 0, 1000), [])])
2047
self.assertRaises(errors.KnitCorrupt, index.add_records,
2048
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2049
self.assertRaises(errors.KnitCorrupt, index.add_records,
2050
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2051
self.assertRaises(errors.KnitCorrupt, index.add_records,
2052
[(('tip',), 'fulltext', (None, 0, 100), [])])
2551
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2552
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2054
self.assertRaises(errors.KnitCorrupt, index.add_records,
2055
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2553
2056
# change options in the second record
2554
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2555
[('tip', 'fulltext,no-eol', (None, 0, 100), []),
2556
('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2057
self.assertRaises(errors.KnitCorrupt, index.add_records,
2058
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
2059
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2557
2060
self.assertEqual([], self.caught_entries)
2559
def test_iter_parents(self):
2560
index = self.two_graph_index()
2561
self.assertEqual(set([
2562
('tip', ()), ('tail', ()), ('parent', ()), ('separate', ())
2564
set(index.iter_parents(['tip', 'tail', 'ghost', 'parent', 'separate'])))
2565
self.assertEqual(set([('tip', ())]),
2566
set(index.iter_parents(['tip'])))
2567
self.assertEqual(set(),
2568
set(index.iter_parents([])))
2063
class TestKnitVersionedFiles(KnitTests):
2065
def assertGroupKeysForIo(self, exp_groups, keys, non_local_keys,
2066
positions, _min_buffer_size=None):
2067
kvf = self.make_test_knit()
2068
if _min_buffer_size is None:
2069
_min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2070
self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2071
non_local_keys, positions,
2072
_min_buffer_size=_min_buffer_size))
2074
def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2076
split, prefix_order = KnitVersionedFiles._split_by_prefix(keys)
2077
self.assertEqual(expected_map, split)
2078
self.assertEqual(expected_prefix_order, prefix_order)
2080
def test__group_keys_for_io(self):
2081
ft_detail = ('fulltext', False)
2082
ld_detail = ('line-delta', False)
2090
f_a: (ft_detail, (f_a, 0, 100), None),
2091
f_b: (ld_detail, (f_b, 100, 21), f_a),
2092
f_c: (ld_detail, (f_c, 180, 15), f_b),
2093
g_a: (ft_detail, (g_a, 121, 35), None),
2094
g_b: (ld_detail, (g_b, 156, 12), g_a),
2095
g_c: (ld_detail, (g_c, 195, 13), g_a),
2097
self.assertGroupKeysForIo([([f_a], set())],
2098
[f_a], [], positions)
2099
self.assertGroupKeysForIo([([f_a], set([f_a]))],
2100
[f_a], [f_a], positions)
2101
self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2102
[f_a, f_b], [], positions)
2103
self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2104
[f_a, f_b], [f_b], positions)
2105
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2106
[f_a, g_a, f_b, g_b], [], positions)
2107
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2108
[f_a, g_a, f_b, g_b], [], positions,
2109
_min_buffer_size=150)
2110
self.assertGroupKeysForIo([([f_a, f_b], set()), ([g_a, g_b], set())],
2111
[f_a, g_a, f_b, g_b], [], positions,
2112
_min_buffer_size=100)
2113
self.assertGroupKeysForIo([([f_c], set()), ([g_b], set())],
2114
[f_c, g_b], [], positions,
2115
_min_buffer_size=125)
2116
self.assertGroupKeysForIo([([g_b, f_c], set())],
2117
[g_b, f_c], [], positions,
2118
_min_buffer_size=125)
2120
def test__split_by_prefix(self):
2121
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2122
'g': [('g', 'b'), ('g', 'a')],
2124
[('f', 'a'), ('g', 'b'),
2125
('g', 'a'), ('f', 'b')])
2127
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2128
'g': [('g', 'b'), ('g', 'a')],
2130
[('f', 'a'), ('f', 'b'),
2131
('g', 'b'), ('g', 'a')])
2133
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2134
'g': [('g', 'b'), ('g', 'a')],
2136
[('f', 'a'), ('f', 'b'),
2137
('g', 'b'), ('g', 'a')])
2139
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2140
'g': [('g', 'b'), ('g', 'a')],
2141
'': [('a',), ('b',)]
2143
[('f', 'a'), ('g', 'b'),
2145
('g', 'a'), ('f', 'b')])
2148
class TestStacking(KnitTests):
2150
def get_basis_and_test_knit(self):
2151
basis = self.make_test_knit(name='basis')
2152
basis = RecordingVersionedFilesDecorator(basis)
2153
test = self.make_test_knit(name='test')
2154
test.add_fallback_versioned_files(basis)
2157
def test_add_fallback_versioned_files(self):
2158
basis = self.make_test_knit(name='basis')
2159
test = self.make_test_knit(name='test')
2160
# It must not error; other tests test that the fallback is referred to
2161
# when accessing data.
2162
test.add_fallback_versioned_files(basis)
2164
def test_add_lines(self):
2165
# lines added to the test are not added to the basis
2166
basis, test = self.get_basis_and_test_knit()
2168
key_basis = ('bar',)
2169
key_cross_border = ('quux',)
2170
key_delta = ('zaphod',)
2171
test.add_lines(key, (), ['foo\n'])
2172
self.assertEqual({}, basis.get_parent_map([key]))
2173
# lines added to the test that reference across the stack do a
2175
basis.add_lines(key_basis, (), ['foo\n'])
2177
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2178
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2179
# we don't even need to look at the basis to see that this should be
2180
# stored as a fulltext
2181
self.assertEqual([], basis.calls)
2182
# Subsequent adds do delta.
2184
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2185
self.assertEqual('line-delta', test._index.get_method(key_delta))
2186
self.assertEqual([], basis.calls)
2188
def test_annotate(self):
2189
# annotations from the test knit are answered without asking the basis
2190
basis, test = self.get_basis_and_test_knit()
2192
key_basis = ('bar',)
2193
key_missing = ('missing',)
2194
test.add_lines(key, (), ['foo\n'])
2195
details = test.annotate(key)
2196
self.assertEqual([(key, 'foo\n')], details)
2197
self.assertEqual([], basis.calls)
2198
# But texts that are not in the test knit are looked for in the basis
2200
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2202
details = test.annotate(key_basis)
2203
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2204
# Not optimised to date:
2205
# self.assertEqual([("annotate", key_basis)], basis.calls)
2206
self.assertEqual([('get_parent_map', set([key_basis])),
2207
('get_parent_map', set([key_basis])),
2208
('get_record_stream', [key_basis], 'topological', True)],
2211
def test_check(self):
2212
# At the moment checking a stacked knit does implicitly check the
2214
basis, test = self.get_basis_and_test_knit()
2217
def test_get_parent_map(self):
2218
# parents in the test knit are answered without asking the basis
2219
basis, test = self.get_basis_and_test_knit()
2221
key_basis = ('bar',)
2222
key_missing = ('missing',)
2223
test.add_lines(key, (), [])
2224
parent_map = test.get_parent_map([key])
2225
self.assertEqual({key: ()}, parent_map)
2226
self.assertEqual([], basis.calls)
2227
# But parents that are not in the test knit are looked for in the basis
2228
basis.add_lines(key_basis, (), [])
2230
parent_map = test.get_parent_map([key, key_basis, key_missing])
2231
self.assertEqual({key: (),
2232
key_basis: ()}, parent_map)
2233
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
2236
def test_get_record_stream_unordered_fulltexts(self):
2237
# records from the test knit are answered without asking the basis:
2238
basis, test = self.get_basis_and_test_knit()
2240
key_basis = ('bar',)
2241
key_missing = ('missing',)
2242
test.add_lines(key, (), ['foo\n'])
2243
records = list(test.get_record_stream([key], 'unordered', True))
2244
self.assertEqual(1, len(records))
2245
self.assertEqual([], basis.calls)
2246
# Missing (from test knit) objects are retrieved from the basis:
2247
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2249
records = list(test.get_record_stream([key_basis, key_missing],
2251
self.assertEqual(2, len(records))
2252
calls = list(basis.calls)
2253
for record in records:
2254
self.assertSubset([record.key], (key_basis, key_missing))
2255
if record.key == key_missing:
2256
self.assertIsInstance(record, AbsentContentFactory)
2258
reference = list(basis.get_record_stream([key_basis],
2259
'unordered', True))[0]
2260
self.assertEqual(reference.key, record.key)
2261
self.assertEqual(reference.sha1, record.sha1)
2262
self.assertEqual(reference.storage_kind, record.storage_kind)
2263
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2264
record.get_bytes_as(record.storage_kind))
2265
self.assertEqual(reference.get_bytes_as('fulltext'),
2266
record.get_bytes_as('fulltext'))
2267
# It's not strictly minimal, but it seems reasonable for now for it to
2268
# ask which fallbacks have which parents.
2270
("get_parent_map", set([key_basis, key_missing])),
2271
("get_record_stream", [key_basis], 'unordered', True)],
2274
def test_get_record_stream_ordered_fulltexts(self):
2275
# ordering is preserved down into the fallback store.
2276
basis, test = self.get_basis_and_test_knit()
2278
key_basis = ('bar',)
2279
key_basis_2 = ('quux',)
2280
key_missing = ('missing',)
2281
test.add_lines(key, (key_basis,), ['foo\n'])
2282
# Missing (from test knit) objects are retrieved from the basis:
2283
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2284
basis.add_lines(key_basis_2, (), ['quux\n'])
2286
# ask for in non-topological order
2287
records = list(test.get_record_stream(
2288
[key, key_basis, key_missing, key_basis_2], 'topological', True))
2289
self.assertEqual(4, len(records))
2291
for record in records:
2292
self.assertSubset([record.key],
2293
(key_basis, key_missing, key_basis_2, key))
2294
if record.key == key_missing:
2295
self.assertIsInstance(record, AbsentContentFactory)
2297
results.append((record.key, record.sha1, record.storage_kind,
2298
record.get_bytes_as('fulltext')))
2299
calls = list(basis.calls)
2300
order = [record[0] for record in results]
2301
self.assertEqual([key_basis_2, key_basis, key], order)
2302
for result in results:
2303
if result[0] == key:
2307
record = source.get_record_stream([result[0]], 'unordered',
2309
self.assertEqual(record.key, result[0])
2310
self.assertEqual(record.sha1, result[1])
2311
# We used to check that the storage kind matched, but actually it
2312
# depends on whether it was sourced from the basis, or in a single
2313
# group, because asking for full texts returns proxy objects to a
2314
# _ContentMapGenerator object; so checking the kind is unneeded.
2315
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2316
# It's not strictly minimal, but it seems reasonable for now for it to
2317
# ask which fallbacks have which parents.
2319
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2320
# topological is requested from the fallback, because that is what
2321
# was requested at the top level.
2322
("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
2325
def test_get_record_stream_unordered_deltas(self):
2326
# records from the test knit are answered without asking the basis:
2327
basis, test = self.get_basis_and_test_knit()
2329
key_basis = ('bar',)
2330
key_missing = ('missing',)
2331
test.add_lines(key, (), ['foo\n'])
2332
records = list(test.get_record_stream([key], 'unordered', False))
2333
self.assertEqual(1, len(records))
2334
self.assertEqual([], basis.calls)
2335
# Missing (from test knit) objects are retrieved from the basis:
2336
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2338
records = list(test.get_record_stream([key_basis, key_missing],
2339
'unordered', False))
2340
self.assertEqual(2, len(records))
2341
calls = list(basis.calls)
2342
for record in records:
2343
self.assertSubset([record.key], (key_basis, key_missing))
2344
if record.key == key_missing:
2345
self.assertIsInstance(record, AbsentContentFactory)
2347
reference = list(basis.get_record_stream([key_basis],
2348
'unordered', False))[0]
2349
self.assertEqual(reference.key, record.key)
2350
self.assertEqual(reference.sha1, record.sha1)
2351
self.assertEqual(reference.storage_kind, record.storage_kind)
2352
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2353
record.get_bytes_as(record.storage_kind))
2354
# It's not strictly minimal, but it seems reasonable for now for it to
2355
# ask which fallbacks have which parents.
2357
("get_parent_map", set([key_basis, key_missing])),
2358
("get_record_stream", [key_basis], 'unordered', False)],
2361
def test_get_record_stream_ordered_deltas(self):
2362
# ordering is preserved down into the fallback store.
2363
basis, test = self.get_basis_and_test_knit()
2365
key_basis = ('bar',)
2366
key_basis_2 = ('quux',)
2367
key_missing = ('missing',)
2368
test.add_lines(key, (key_basis,), ['foo\n'])
2369
# Missing (from test knit) objects are retrieved from the basis:
2370
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2371
basis.add_lines(key_basis_2, (), ['quux\n'])
2373
# ask for in non-topological order
2374
records = list(test.get_record_stream(
2375
[key, key_basis, key_missing, key_basis_2], 'topological', False))
2376
self.assertEqual(4, len(records))
2378
for record in records:
2379
self.assertSubset([record.key],
2380
(key_basis, key_missing, key_basis_2, key))
2381
if record.key == key_missing:
2382
self.assertIsInstance(record, AbsentContentFactory)
2384
results.append((record.key, record.sha1, record.storage_kind,
2385
record.get_bytes_as(record.storage_kind)))
2386
calls = list(basis.calls)
2387
order = [record[0] for record in results]
2388
self.assertEqual([key_basis_2, key_basis, key], order)
2389
for result in results:
2390
if result[0] == key:
2394
record = source.get_record_stream([result[0]], 'unordered',
2396
self.assertEqual(record.key, result[0])
2397
self.assertEqual(record.sha1, result[1])
2398
self.assertEqual(record.storage_kind, result[2])
2399
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2400
# It's not strictly minimal, but it seems reasonable for now for it to
2401
# ask which fallbacks have which parents.
2403
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2404
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2407
def test_get_sha1s(self):
2408
# sha1's in the test knit are answered without asking the basis
2409
basis, test = self.get_basis_and_test_knit()
2411
key_basis = ('bar',)
2412
key_missing = ('missing',)
2413
test.add_lines(key, (), ['foo\n'])
2414
key_sha1sum = osutils.sha('foo\n').hexdigest()
2415
sha1s = test.get_sha1s([key])
2416
self.assertEqual({key: key_sha1sum}, sha1s)
2417
self.assertEqual([], basis.calls)
2418
# But texts that are not in the test knit are looked for in the basis
2419
# directly (rather than via text reconstruction) so that remote servers
2420
# etc don't have to answer with full content.
2421
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2422
basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2424
sha1s = test.get_sha1s([key, key_missing, key_basis])
2425
self.assertEqual({key: key_sha1sum,
2426
key_basis: basis_sha1sum}, sha1s)
2427
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
2430
def test_insert_record_stream(self):
2431
# records are inserted as normal; insert_record_stream builds on
2432
# add_lines, so a smoke test should be all that's needed:
2434
key_basis = ('bar',)
2435
key_delta = ('zaphod',)
2436
basis, test = self.get_basis_and_test_knit()
2437
source = self.make_test_knit(name='source')
2438
basis.add_lines(key_basis, (), ['foo\n'])
2440
source.add_lines(key_basis, (), ['foo\n'])
2441
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2442
stream = source.get_record_stream([key_delta], 'unordered', False)
2443
test.insert_record_stream(stream)
2444
# XXX: this does somewhat too many calls in making sure of whether it
2445
# has to recreate the full text.
2446
self.assertEqual([("get_parent_map", set([key_basis])),
2447
('get_parent_map', set([key_basis])),
2448
('get_record_stream', [key_basis], 'unordered', True)],
2450
self.assertEqual({key_delta:(key_basis,)},
2451
test.get_parent_map([key_delta]))
2452
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2453
'unordered', True).next().get_bytes_as('fulltext'))
2455
def test_iter_lines_added_or_present_in_keys(self):
2456
# Lines from the basis are returned, and lines for a given key are only
2460
# all sources are asked for keys:
2461
basis, test = self.get_basis_and_test_knit()
2462
basis.add_lines(key1, (), ["foo"])
2464
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2465
self.assertEqual([("foo\n", key1)], lines)
2466
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
2468
# keys in both are not duplicated:
2469
test.add_lines(key2, (), ["bar\n"])
2470
basis.add_lines(key2, (), ["bar\n"])
2472
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2473
self.assertEqual([("bar\n", key2)], lines)
2474
self.assertEqual([], basis.calls)
2476
def test_keys(self):
2479
# all sources are asked for keys:
2480
basis, test = self.get_basis_and_test_knit()
2482
self.assertEqual(set(), set(keys))
2483
self.assertEqual([("keys",)], basis.calls)
2484
# keys from a basis are returned:
2485
basis.add_lines(key1, (), [])
2488
self.assertEqual(set([key1]), set(keys))
2489
self.assertEqual([("keys",)], basis.calls)
2490
# keys in both are not duplicated:
2491
test.add_lines(key2, (), [])
2492
basis.add_lines(key2, (), [])
2495
self.assertEqual(2, len(keys))
2496
self.assertEqual(set([key1, key2]), set(keys))
2497
self.assertEqual([("keys",)], basis.calls)
2499
def test_add_mpdiffs(self):
2500
# records are inserted as normal; add_mpdiff builds on
2501
# add_lines, so a smoke test should be all that's needed:
2503
key_basis = ('bar',)
2504
key_delta = ('zaphod',)
2505
basis, test = self.get_basis_and_test_knit()
2506
source = self.make_test_knit(name='source')
2507
basis.add_lines(key_basis, (), ['foo\n'])
2509
source.add_lines(key_basis, (), ['foo\n'])
2510
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2511
diffs = source.make_mpdiffs([key_delta])
2512
test.add_mpdiffs([(key_delta, (key_basis,),
2513
source.get_sha1s([key_delta])[key_delta], diffs[0])])
2514
self.assertEqual([("get_parent_map", set([key_basis])),
2515
('get_record_stream', [key_basis], 'unordered', True),],
2517
self.assertEqual({key_delta:(key_basis,)},
2518
test.get_parent_map([key_delta]))
2519
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2520
'unordered', True).next().get_bytes_as('fulltext'))
2522
def test_make_mpdiffs(self):
2523
# Generating an mpdiff across a stacking boundary should detect parent
2527
key_right = ('zaphod',)
2528
basis, test = self.get_basis_and_test_knit()
2529
basis.add_lines(key_left, (), ['bar\n'])
2530
basis.add_lines(key_right, (), ['zaphod\n'])
2532
test.add_lines(key, (key_left, key_right),
2533
['bar\n', 'foo\n', 'zaphod\n'])
2534
diffs = test.make_mpdiffs([key])
2536
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2537
multiparent.NewText(['foo\n']),
2538
multiparent.ParentText(1, 0, 2, 1)])],
2540
self.assertEqual(3, len(basis.calls))
2542
("get_parent_map", set([key_left, key_right])),
2543
("get_parent_map", set([key_left, key_right])),
2546
last_call = basis.calls[-1]
2547
self.assertEqual('get_record_stream', last_call[0])
2548
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2549
self.assertEqual('topological', last_call[2])
2550
self.assertEqual(True, last_call[3])
2553
class TestNetworkBehaviour(KnitTests):
2554
"""Tests for getting data out of/into knits over the network."""
2556
def test_include_delta_closure_generates_a_knit_delta_closure(self):
2557
vf = self.make_test_knit(name='test')
2558
# put in three texts, giving ft, delta, delta
2559
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2560
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2561
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2562
# But heuristics could interfere, so check what happened:
2563
self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2564
[record.storage_kind for record in
2565
vf.get_record_stream([('base',), ('d1',), ('d2',)],
2566
'topological', False)])
2567
# generate a stream of just the deltas include_delta_closure=True,
2568
# serialise to the network, and check that we get a delta closure on the wire.
2569
stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2570
netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2571
# The first bytes should be a memo from _ContentMapGenerator, and the
2572
# second bytes should be empty (because its a API proxy not something
2573
# for wire serialisation.
2574
self.assertEqual('', netb[1])
2576
kind, line_end = network_bytes_to_kind_and_offset(bytes)
2577
self.assertEqual('knit-delta-closure', kind)
2580
class TestContentMapGenerator(KnitTests):
2581
"""Tests for ContentMapGenerator"""
2583
def test_get_record_stream_gives_records(self):
2584
vf = self.make_test_knit(name='test')
2585
# put in three texts, giving ft, delta, delta
2586
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2587
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2588
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2589
keys = [('d1',), ('d2',)]
2590
generator = _VFContentMapGenerator(vf, keys,
2591
global_map=vf.get_parent_map(keys))
2592
for record in generator.get_record_stream():
2593
if record.key == ('d1',):
2594
self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2596
self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2598
def test_get_record_stream_kinds_are_raw(self):
2599
vf = self.make_test_knit(name='test')
2600
# put in three texts, giving ft, delta, delta
2601
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2602
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2603
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2604
keys = [('base',), ('d1',), ('d2',)]
2605
generator = _VFContentMapGenerator(vf, keys,
2606
global_map=vf.get_parent_map(keys))
2607
kinds = {('base',): 'knit-delta-closure',
2608
('d1',): 'knit-delta-closure-ref',
2609
('d2',): 'knit-delta-closure-ref',
2611
for record in generator.get_record_stream():
2612
self.assertEqual(kinds[record.key], record.storage_kind)