487
355
writer = pack.ContainerWriter(write_data)
489
357
access.set_writer(writer, index, (transport, packname))
490
memos = access.add_raw_records([('key', 10)], '1234567890')
358
memos = access.add_raw_records([10], '1234567890')
492
360
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
494
def test_missing_index_raises_retry(self):
495
memos = self.make_pack_file()
496
transport = self.get_transport()
497
reload_called, reload_func = self.make_reload_func()
498
# Note that the index key has changed from 'foo' to 'bar'
499
access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')},
500
reload_func=reload_func)
501
e = self.assertListRaises(errors.RetryWithNewPacks,
502
access.get_raw_records, memos)
503
# Because a key was passed in which does not match our index list, we
504
# assume that the listing was already reloaded
505
self.assertTrue(e.reload_occurred)
506
self.assertIsInstance(e.exc_info, tuple)
507
self.assertIs(e.exc_info[0], KeyError)
508
self.assertIsInstance(e.exc_info[1], KeyError)
510
def test_missing_index_raises_key_error_with_no_reload(self):
511
memos = self.make_pack_file()
512
transport = self.get_transport()
513
# Note that the index key has changed from 'foo' to 'bar'
514
access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')})
515
e = self.assertListRaises(KeyError, access.get_raw_records, memos)
517
def test_missing_file_raises_retry(self):
518
memos = self.make_pack_file()
519
transport = self.get_transport()
520
reload_called, reload_func = self.make_reload_func()
521
# Note that the 'filename' has been changed to 'different-packname'
522
access = pack_repo._DirectPackAccess(
523
{'foo':(transport, 'different-packname')},
524
reload_func=reload_func)
525
e = self.assertListRaises(errors.RetryWithNewPacks,
526
access.get_raw_records, memos)
527
# The file has gone missing, so we assume we need to reload
528
self.assertFalse(e.reload_occurred)
529
self.assertIsInstance(e.exc_info, tuple)
530
self.assertIs(e.exc_info[0], errors.NoSuchFile)
531
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
532
self.assertEqual('different-packname', e.exc_info[1].path)
534
def test_missing_file_raises_no_such_file_with_no_reload(self):
535
memos = self.make_pack_file()
536
transport = self.get_transport()
537
# Note that the 'filename' has been changed to 'different-packname'
538
access = pack_repo._DirectPackAccess(
539
{'foo': (transport, 'different-packname')})
540
e = self.assertListRaises(errors.NoSuchFile,
541
access.get_raw_records, memos)
543
def test_failing_readv_raises_retry(self):
544
memos = self.make_pack_file()
545
transport = self.get_transport()
546
failing_transport = MockReadvFailingTransport(
547
[transport.get_bytes('packname')])
548
reload_called, reload_func = self.make_reload_func()
549
access = pack_repo._DirectPackAccess(
550
{'foo': (failing_transport, 'packname')},
551
reload_func=reload_func)
552
# Asking for a single record will not trigger the Mock failure
553
self.assertEqual(['1234567890'],
554
list(access.get_raw_records(memos[:1])))
555
self.assertEqual(['12345'],
556
list(access.get_raw_records(memos[1:2])))
557
# A multiple offset readv() will fail mid-way through
558
e = self.assertListRaises(errors.RetryWithNewPacks,
559
access.get_raw_records, memos)
560
# The file has gone missing, so we assume we need to reload
561
self.assertFalse(e.reload_occurred)
562
self.assertIsInstance(e.exc_info, tuple)
563
self.assertIs(e.exc_info[0], errors.NoSuchFile)
564
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
565
self.assertEqual('packname', e.exc_info[1].path)
567
def test_failing_readv_raises_no_such_file_with_no_reload(self):
568
memos = self.make_pack_file()
569
transport = self.get_transport()
570
failing_transport = MockReadvFailingTransport(
571
[transport.get_bytes('packname')])
572
reload_called, reload_func = self.make_reload_func()
573
access = pack_repo._DirectPackAccess(
574
{'foo':(failing_transport, 'packname')})
575
# Asking for a single record will not trigger the Mock failure
576
self.assertEqual(['1234567890'],
577
list(access.get_raw_records(memos[:1])))
578
self.assertEqual(['12345'],
579
list(access.get_raw_records(memos[1:2])))
580
# A multiple offset readv() will fail mid-way through
581
e = self.assertListRaises(errors.NoSuchFile,
582
access.get_raw_records, memos)
584
def test_reload_or_raise_no_reload(self):
585
access = pack_repo._DirectPackAccess({}, reload_func=None)
586
retry_exc = self.make_retry_exception()
587
# Without a reload_func, we will just re-raise the original exception
588
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
590
def test_reload_or_raise_reload_changed(self):
591
reload_called, reload_func = self.make_reload_func(return_val=True)
592
access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
593
retry_exc = self.make_retry_exception()
594
access.reload_or_raise(retry_exc)
595
self.assertEqual([1], reload_called)
596
retry_exc.reload_occurred=True
597
access.reload_or_raise(retry_exc)
598
self.assertEqual([2], reload_called)
600
def test_reload_or_raise_reload_no_change(self):
601
reload_called, reload_func = self.make_reload_func(return_val=False)
602
access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
603
retry_exc = self.make_retry_exception()
604
# If reload_occurred is False, then we consider it an error to have
605
# reload_func() return False (no changes).
606
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
607
self.assertEqual([1], reload_called)
608
retry_exc.reload_occurred=True
609
# If reload_occurred is True, then we assume nothing changed because
610
# it had changed earlier, but didn't change again
611
access.reload_or_raise(retry_exc)
612
self.assertEqual([2], reload_called)
614
def test_annotate_retries(self):
615
vf, reload_counter = self.make_vf_for_retrying()
616
# It is a little bit bogus to annotate the Revision VF, but it works,
617
# as we have ancestry stored there
619
reload_lines = vf.annotate(key)
620
self.assertEqual([1, 1, 0], reload_counter)
621
plain_lines = vf.annotate(key)
622
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
623
if reload_lines != plain_lines:
624
self.fail('Annotation was not identical with reloading.')
625
# Now delete the packs-in-use, which should trigger another reload, but
626
# this time we just raise an exception because we can't recover
627
for trans, name in vf._access._indices.itervalues():
629
self.assertRaises(errors.NoSuchFile, vf.annotate, key)
630
self.assertEqual([2, 1, 1], reload_counter)
632
def test__get_record_map_retries(self):
633
vf, reload_counter = self.make_vf_for_retrying()
634
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
635
records = vf._get_record_map(keys)
636
self.assertEqual(keys, sorted(records.keys()))
637
self.assertEqual([1, 1, 0], reload_counter)
638
# Now delete the packs-in-use, which should trigger another reload, but
639
# this time we just raise an exception because we can't recover
640
for trans, name in vf._access._indices.itervalues():
642
self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
643
self.assertEqual([2, 1, 1], reload_counter)
645
def test_get_record_stream_retries(self):
646
vf, reload_counter = self.make_vf_for_retrying()
647
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
648
record_stream = vf.get_record_stream(keys, 'topological', False)
649
record = record_stream.next()
650
self.assertEqual(('rev-1',), record.key)
651
self.assertEqual([0, 0, 0], reload_counter)
652
record = record_stream.next()
653
self.assertEqual(('rev-2',), record.key)
654
self.assertEqual([1, 1, 0], reload_counter)
655
record = record_stream.next()
656
self.assertEqual(('rev-3',), record.key)
657
self.assertEqual([1, 1, 0], reload_counter)
658
# Now delete all pack files, and see that we raise the right error
659
for trans, name in vf._access._indices.itervalues():
661
self.assertListRaises(errors.NoSuchFile,
662
vf.get_record_stream, keys, 'topological', False)
664
def test_iter_lines_added_or_present_in_keys_retries(self):
665
vf, reload_counter = self.make_vf_for_retrying()
666
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
667
# Unfortunately, iter_lines_added_or_present_in_keys iterates the
668
# result in random order (determined by the iteration order from a
669
# set()), so we don't have any solid way to trigger whether data is
670
# read before or after. However we tried to delete the middle node to
671
# exercise the code well.
672
# What we care about is that all lines are always yielded, but not
675
reload_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
676
self.assertEqual([1, 1, 0], reload_counter)
677
# Now do it again, to make sure the result is equivalent
678
plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
679
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
680
self.assertEqual(plain_lines, reload_lines)
681
self.assertEqual(21, len(plain_lines))
682
# Now delete all pack files, and see that we raise the right error
683
for trans, name in vf._access._indices.itervalues():
685
self.assertListRaises(errors.NoSuchFile,
686
vf.iter_lines_added_or_present_in_keys, keys)
687
self.assertEqual([2, 1, 1], reload_counter)
689
def test_get_record_stream_yields_disk_sorted_order(self):
690
# if we get 'unordered' pick a semi-optimal order for reading. The
691
# order should be grouped by pack file, and then by position in file
692
repo = self.make_repository('test', format='pack-0.92')
694
self.addCleanup(repo.unlock)
695
repo.start_write_group()
697
vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
698
vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
699
vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
700
repo.commit_write_group()
701
# We inserted them as rev-5, rev-1, rev-2, we should get them back in
703
stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
704
('f-id', 'rev-2')], 'unordered', False)
705
keys = [r.key for r in stream]
706
self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
707
('f-id', 'rev-2')], keys)
708
repo.start_write_group()
709
vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
710
vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
711
vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
712
repo.commit_write_group()
713
# Request in random order, to make sure the output order isn't based on
715
request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
716
stream = vf.get_record_stream(request_keys, 'unordered', False)
717
keys = [r.key for r in stream]
718
# We want to get the keys back in disk order, but it doesn't matter
719
# which pack we read from first. So this can come back in 2 orders
720
alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
721
alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
722
if keys != alt1 and keys != alt2:
723
self.fail('Returned key order did not match either expected order.'
724
' expected %s or %s, not %s'
725
% (alt1, alt2, keys))
728
363
class LowLevelKnitDataTests(TestCase):
877
469
# Change 2 bytes in the middle to \xff
878
470
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
879
471
transport = MockTransport([gz_txt])
880
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
881
knit = KnitVersionedFiles(None, access)
882
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
883
self.assertRaises(errors.KnitCorrupt, list,
884
knit._read_records_iter(records))
885
# read_records_iter_raw will barf on bad gz data
886
self.assertRaises(errors.KnitCorrupt, list,
887
knit._read_records_iter_raw(records))
472
access = _KnitAccess(transport, 'filename', None, None, False, False)
473
data = _KnitData(access=access)
474
records = [('rev-id-1', (None, 0, len(gz_txt)))]
476
self.assertRaises(errors.KnitCorrupt, data.read_records, records)
478
# read_records_iter_raw will notice if we request the wrong version.
479
self.assertRaises(errors.KnitCorrupt, list,
480
data.read_records_iter_raw(records))
890
483
class LowLevelKnitIndexTests(TestCase):
892
def get_knit_index(self, transport, name, mode):
893
mapper = ConstantMapper(name)
485
def get_knit_index(self, *args, **kwargs):
486
orig = knit._load_data
488
knit._load_data = orig
489
self.addCleanup(reset)
894
490
from bzrlib._knit_load_data_py import _load_data_py
895
self.overrideAttr(knit, '_load_data', _load_data_py)
896
allow_writes = lambda: 'w' in mode
897
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
491
knit._load_data = _load_data_py
492
return _KnitIndex(*args, **kwargs)
494
def test_no_such_file(self):
495
transport = MockTransport()
497
self.assertRaises(NoSuchFile, self.get_knit_index,
498
transport, "filename", "r")
499
self.assertRaises(NoSuchFile, self.get_knit_index,
500
transport, "filename", "w", create=False)
899
502
def test_create_file(self):
900
503
transport = MockTransport()
901
index = self.get_knit_index(transport, "filename", "w")
903
call = transport.calls.pop(0)
904
# call[1][1] is a StringIO - we can't test it by simple equality.
905
self.assertEqual('put_file_non_atomic', call[0])
906
self.assertEqual('filename.kndx', call[1][0])
907
# With no history, _KndxIndex writes a new index:
908
self.assertEqual(_KndxIndex.HEADER,
909
call[1][1].getvalue())
910
self.assertEqual({'create_parent_dir': True}, call[2])
505
index = self.get_knit_index(transport, "filename", "w",
506
file_mode="wb", create=True)
508
("put_bytes_non_atomic",
509
("filename", index.HEADER), {"mode": "wb"}),
510
transport.calls.pop(0))
512
def test_delay_create_file(self):
513
transport = MockTransport()
515
index = self.get_knit_index(transport, "filename", "w",
516
create=True, file_mode="wb", create_parent_dir=True,
517
delay_create=True, dir_mode=0777)
518
self.assertEqual([], transport.calls)
520
index.add_versions([])
521
name, (filename, f), kwargs = transport.calls.pop(0)
522
self.assertEqual("put_file_non_atomic", name)
524
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
526
self.assertEqual("filename", filename)
527
self.assertEqual(index.HEADER, f.read())
529
index.add_versions([])
530
self.assertEqual(("append_bytes", ("filename", ""), {}),
531
transport.calls.pop(0))
912
533
def test_read_utf8_version_id(self):
913
534
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
914
535
utf8_revision_id = unicode_revision_id.encode('utf-8')
915
536
transport = MockTransport([
917
538
'%s option 0 1 :' % (utf8_revision_id,)
919
540
index = self.get_knit_index(transport, "filename", "r")
920
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
541
# _KnitIndex is a private class, and deals in utf8 revision_ids, not
921
542
# Unicode revision_ids.
922
self.assertEqual({(utf8_revision_id,):()},
923
index.get_parent_map(index.keys()))
924
self.assertFalse((unicode_revision_id,) in index.keys())
543
self.assertTrue(index.has_version(utf8_revision_id))
544
self.assertFalse(index.has_version(unicode_revision_id))
926
546
def test_read_utf8_parents(self):
927
547
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
928
548
utf8_revision_id = unicode_revision_id.encode('utf-8')
929
549
transport = MockTransport([
931
551
"version option 0 1 .%s :" % (utf8_revision_id,)
933
553
index = self.get_knit_index(transport, "filename", "r")
934
self.assertEqual({("version",):((utf8_revision_id,),)},
935
index.get_parent_map(index.keys()))
554
self.assertEqual([utf8_revision_id],
555
index.get_parents_with_ghosts("version"))
937
557
def test_read_ignore_corrupted_lines(self):
938
558
transport = MockTransport([
941
561
"corrupted options 0 1 .b .c ",
942
562
"version options 0 1 :"
944
564
index = self.get_knit_index(transport, "filename", "r")
945
self.assertEqual(1, len(index.keys()))
946
self.assertEqual(set([("version",)]), index.keys())
565
self.assertEqual(1, index.num_versions())
566
self.assertTrue(index.has_version("version"))
948
568
def test_read_corrupted_header(self):
949
569
transport = MockTransport(['not a bzr knit index header\n'])
950
index = self.get_knit_index(transport, "filename", "r")
951
self.assertRaises(KnitHeaderError, index.keys)
570
self.assertRaises(KnitHeaderError,
571
self.get_knit_index, transport, "filename", "r")
953
573
def test_read_duplicate_entries(self):
954
574
transport = MockTransport([
956
576
"parent options 0 1 :",
957
577
"version options1 0 1 0 :",
958
578
"version options2 1 2 .other :",
959
579
"version options3 3 4 0 .other :"
961
581
index = self.get_knit_index(transport, "filename", "r")
962
self.assertEqual(2, len(index.keys()))
582
self.assertEqual(2, index.num_versions())
963
583
# check that the index used is the first one written. (Specific
964
584
# to KnitIndex style indices.
965
self.assertEqual("1", index._dictionary_compress([("version",)]))
966
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
967
self.assertEqual(["options3"], index.get_options(("version",)))
968
self.assertEqual({("version",):(("parent",), ("other",))},
969
index.get_parent_map([("version",)]))
585
self.assertEqual("1", index._version_list_to_index(["version"]))
586
self.assertEqual((None, 3, 4), index.get_position("version"))
587
self.assertEqual(["options3"], index.get_options("version"))
588
self.assertEqual(["parent", "other"],
589
index.get_parents_with_ghosts("version"))
971
591
def test_read_compressed_parents(self):
972
592
transport = MockTransport([
974
594
"a option 0 1 :",
975
595
"b option 0 1 0 :",
976
596
"c option 0 1 1 0 :",
978
598
index = self.get_knit_index(transport, "filename", "r")
979
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
980
index.get_parent_map([("b",), ("c",)]))
599
self.assertEqual(["a"], index.get_parents("b"))
600
self.assertEqual(["b", "a"], index.get_parents("c"))
982
602
def test_write_utf8_version_id(self):
983
603
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
984
604
utf8_revision_id = unicode_revision_id.encode('utf-8')
985
605
transport = MockTransport([
988
608
index = self.get_knit_index(transport, "filename", "r")
990
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
991
call = transport.calls.pop(0)
992
# call[1][1] is a StringIO - we can't test it by simple equality.
993
self.assertEqual('put_file_non_atomic', call[0])
994
self.assertEqual('filename.kndx', call[1][0])
995
# With no history, _KndxIndex writes a new index:
996
self.assertEqual(_KndxIndex.HEADER +
997
"\n%s option 0 1 :" % (utf8_revision_id,),
998
call[1][1].getvalue())
999
self.assertEqual({'create_parent_dir': True}, call[2])
609
index.add_version(utf8_revision_id, ["option"], (None, 0, 1), [])
610
self.assertEqual(("append_bytes", ("filename",
611
"\n%s option 0 1 :" % (utf8_revision_id,)),
613
transport.calls.pop(0))
1001
615
def test_write_utf8_parents(self):
1002
616
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
1003
617
utf8_revision_id = unicode_revision_id.encode('utf-8')
1004
618
transport = MockTransport([
1007
index = self.get_knit_index(transport, "filename", "r")
1009
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
1010
call = transport.calls.pop(0)
1011
# call[1][1] is a StringIO - we can't test it by simple equality.
1012
self.assertEqual('put_file_non_atomic', call[0])
1013
self.assertEqual('filename.kndx', call[1][0])
1014
# With no history, _KndxIndex writes a new index:
1015
self.assertEqual(_KndxIndex.HEADER +
1016
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
1017
call[1][1].getvalue())
1018
self.assertEqual({'create_parent_dir': True}, call[2])
1020
def test_keys(self):
1021
transport = MockTransport([
1024
index = self.get_knit_index(transport, "filename", "r")
1026
self.assertEqual(set(), index.keys())
1028
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1029
self.assertEqual(set([("a",)]), index.keys())
1031
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1032
self.assertEqual(set([("a",)]), index.keys())
1034
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
1035
self.assertEqual(set([("a",), ("b",)]), index.keys())
1037
def add_a_b(self, index, random_id=None):
1039
if random_id is not None:
1040
kwargs["random_id"] = random_id
1042
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
1043
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
1044
(("b",), ["option"], (("b",), 2, 3), [("a",)])
1047
def assertIndexIsAB(self, index):
1052
index.get_parent_map(index.keys()))
1053
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
1054
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
1055
self.assertEqual(["opt"], index.get_options(("a",)))
621
index = self.get_knit_index(transport, "filename", "r")
622
index.add_version("version", ["option"], (None, 0, 1), [utf8_revision_id])
623
self.assertEqual(("append_bytes", ("filename",
624
"\nversion option 0 1 .%s :" % (utf8_revision_id,)),
626
transport.calls.pop(0))
628
def test_get_graph(self):
629
transport = MockTransport()
630
index = self.get_knit_index(transport, "filename", "w", create=True)
631
self.assertEqual([], index.get_graph())
633
index.add_version("a", ["option"], (None, 0, 1), ["b"])
634
self.assertEqual([("a", ["b"])], index.get_graph())
636
index.add_version("c", ["option"], (None, 0, 1), ["d"])
637
self.assertEqual([("a", ["b"]), ("c", ["d"])],
638
sorted(index.get_graph()))
640
def test_get_ancestry(self):
641
transport = MockTransport([
644
"b option 0 1 0 .e :",
645
"c option 0 1 1 0 :",
646
"d option 0 1 2 .f :"
648
index = self.get_knit_index(transport, "filename", "r")
650
self.assertEqual([], index.get_ancestry([]))
651
self.assertEqual(["a"], index.get_ancestry(["a"]))
652
self.assertEqual(["a", "b"], index.get_ancestry(["b"]))
653
self.assertEqual(["a", "b", "c"], index.get_ancestry(["c"]))
654
self.assertEqual(["a", "b", "c", "d"], index.get_ancestry(["d"]))
655
self.assertEqual(["a", "b"], index.get_ancestry(["a", "b"]))
656
self.assertEqual(["a", "b", "c"], index.get_ancestry(["a", "c"]))
658
self.assertRaises(RevisionNotPresent, index.get_ancestry, ["e"])
660
def test_get_ancestry_with_ghosts(self):
661
transport = MockTransport([
664
"b option 0 1 0 .e :",
665
"c option 0 1 0 .f .g :",
666
"d option 0 1 2 .h .j .k :"
668
index = self.get_knit_index(transport, "filename", "r")
670
self.assertEqual([], index.get_ancestry_with_ghosts([]))
671
self.assertEqual(["a"], index.get_ancestry_with_ghosts(["a"]))
672
self.assertEqual(["a", "e", "b"],
673
index.get_ancestry_with_ghosts(["b"]))
674
self.assertEqual(["a", "g", "f", "c"],
675
index.get_ancestry_with_ghosts(["c"]))
676
self.assertEqual(["a", "g", "f", "c", "k", "j", "h", "d"],
677
index.get_ancestry_with_ghosts(["d"]))
678
self.assertEqual(["a", "e", "b"],
679
index.get_ancestry_with_ghosts(["a", "b"]))
680
self.assertEqual(["a", "g", "f", "c"],
681
index.get_ancestry_with_ghosts(["a", "c"]))
683
["a", "g", "f", "c", "e", "b", "k", "j", "h", "d"],
684
index.get_ancestry_with_ghosts(["b", "d"]))
686
self.assertRaises(RevisionNotPresent,
687
index.get_ancestry_with_ghosts, ["e"])
689
def test_iter_parents(self):
690
transport = MockTransport()
691
index = self.get_knit_index(transport, "filename", "w", create=True)
693
index.add_version('r0', ['option'], (None, 0, 1), [])
695
index.add_version('r1', ['option'], (None, 0, 1), ['r0'])
697
index.add_version('r2', ['option'], (None, 0, 1), ['r1', 'r0'])
699
# cases: each sample data individually:
700
self.assertEqual(set([('r0', ())]),
701
set(index.iter_parents(['r0'])))
702
self.assertEqual(set([('r1', ('r0', ))]),
703
set(index.iter_parents(['r1'])))
704
self.assertEqual(set([('r2', ('r1', 'r0'))]),
705
set(index.iter_parents(['r2'])))
706
# no nodes returned for a missing node
707
self.assertEqual(set(),
708
set(index.iter_parents(['missing'])))
709
# 1 node returned with missing nodes skipped
710
self.assertEqual(set([('r1', ('r0', ))]),
711
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
713
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
714
set(index.iter_parents(['r0', 'r1'])))
715
# 2 nodes returned, missing skipped
716
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
717
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
719
def test_num_versions(self):
720
transport = MockTransport([
723
index = self.get_knit_index(transport, "filename", "r")
725
self.assertEqual(0, index.num_versions())
726
self.assertEqual(0, len(index))
728
index.add_version("a", ["option"], (None, 0, 1), [])
729
self.assertEqual(1, index.num_versions())
730
self.assertEqual(1, len(index))
732
index.add_version("a", ["option2"], (None, 1, 2), [])
733
self.assertEqual(1, index.num_versions())
734
self.assertEqual(1, len(index))
736
index.add_version("b", ["option"], (None, 0, 1), [])
737
self.assertEqual(2, index.num_versions())
738
self.assertEqual(2, len(index))
740
def test_get_versions(self):
741
transport = MockTransport([
744
index = self.get_knit_index(transport, "filename", "r")
746
self.assertEqual([], index.get_versions())
748
index.add_version("a", ["option"], (None, 0, 1), [])
749
self.assertEqual(["a"], index.get_versions())
751
index.add_version("a", ["option"], (None, 0, 1), [])
752
self.assertEqual(["a"], index.get_versions())
754
index.add_version("b", ["option"], (None, 0, 1), [])
755
self.assertEqual(["a", "b"], index.get_versions())
757
def test_add_version(self):
758
transport = MockTransport([
761
index = self.get_knit_index(transport, "filename", "r")
763
index.add_version("a", ["option"], (None, 0, 1), ["b"])
764
self.assertEqual(("append_bytes",
765
("filename", "\na option 0 1 .b :"),
766
{}), transport.calls.pop(0))
767
self.assertTrue(index.has_version("a"))
768
self.assertEqual(1, index.num_versions())
769
self.assertEqual((None, 0, 1), index.get_position("a"))
770
self.assertEqual(["option"], index.get_options("a"))
771
self.assertEqual(["b"], index.get_parents_with_ghosts("a"))
773
index.add_version("a", ["opt"], (None, 1, 2), ["c"])
774
self.assertEqual(("append_bytes",
775
("filename", "\na opt 1 2 .c :"),
776
{}), transport.calls.pop(0))
777
self.assertTrue(index.has_version("a"))
778
self.assertEqual(1, index.num_versions())
779
self.assertEqual((None, 1, 2), index.get_position("a"))
780
self.assertEqual(["opt"], index.get_options("a"))
781
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
783
index.add_version("b", ["option"], (None, 2, 3), ["a"])
784
self.assertEqual(("append_bytes",
785
("filename", "\nb option 2 3 0 :"),
786
{}), transport.calls.pop(0))
787
self.assertTrue(index.has_version("b"))
788
self.assertEqual(2, index.num_versions())
789
self.assertEqual((None, 2, 3), index.get_position("b"))
790
self.assertEqual(["option"], index.get_options("b"))
791
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1057
793
def test_add_versions(self):
1058
794
transport = MockTransport([
1061
797
index = self.get_knit_index(transport, "filename", "r")
1064
call = transport.calls.pop(0)
1065
# call[1][1] is a StringIO - we can't test it by simple equality.
1066
self.assertEqual('put_file_non_atomic', call[0])
1067
self.assertEqual('filename.kndx', call[1][0])
1068
# With no history, _KndxIndex writes a new index:
800
("a", ["option"], (None, 0, 1), ["b"]),
801
("a", ["opt"], (None, 1, 2), ["c"]),
802
("b", ["option"], (None, 2, 3), ["a"])
804
self.assertEqual(("append_bytes", ("filename",
1071
805
"\na option 0 1 .b :"
1072
806
"\na opt 1 2 .c :"
1073
"\nb option 2 3 0 :",
1074
call[1][1].getvalue())
1075
self.assertEqual({'create_parent_dir': True}, call[2])
1076
self.assertIndexIsAB(index)
808
), {}), transport.calls.pop(0))
809
self.assertTrue(index.has_version("a"))
810
self.assertTrue(index.has_version("b"))
811
self.assertEqual(2, index.num_versions())
812
self.assertEqual((None, 1, 2), index.get_position("a"))
813
self.assertEqual((None, 2, 3), index.get_position("b"))
814
self.assertEqual(["opt"], index.get_options("a"))
815
self.assertEqual(["option"], index.get_options("b"))
816
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
817
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1078
819
def test_add_versions_random_id_is_accepted(self):
1079
820
transport = MockTransport([
1082
823
index = self.get_knit_index(transport, "filename", "r")
1083
self.add_a_b(index, random_id=True)
826
("a", ["option"], (None, 0, 1), ["b"]),
827
("a", ["opt"], (None, 1, 2), ["c"]),
828
("b", ["option"], (None, 2, 3), ["a"])
1085
831
def test_delay_create_and_add_versions(self):
1086
832
transport = MockTransport()
1088
index = self.get_knit_index(transport, "filename", "w")
834
index = self.get_knit_index(transport, "filename", "w",
835
create=True, file_mode="wb", create_parent_dir=True,
836
delay_create=True, dir_mode=0777)
1090
837
self.assertEqual([], transport.calls)
1093
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1095
# Two calls: one during which we load the existing index (and when its
1096
# missing create it), then a second where we write the contents out.
1097
self.assertEqual(2, len(transport.calls))
1098
call = transport.calls.pop(0)
1099
self.assertEqual('put_file_non_atomic', call[0])
1100
self.assertEqual('filename.kndx', call[1][0])
1101
# With no history, _KndxIndex writes a new index:
1102
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1103
self.assertEqual({'create_parent_dir': True}, call[2])
1104
call = transport.calls.pop(0)
1105
# call[1][1] is a StringIO - we can't test it by simple equality.
1106
self.assertEqual('put_file_non_atomic', call[0])
1107
self.assertEqual('filename.kndx', call[1][0])
1108
# With no history, _KndxIndex writes a new index:
840
("a", ["option"], (None, 0, 1), ["b"]),
841
("a", ["opt"], (None, 1, 2), ["c"]),
842
("b", ["option"], (None, 2, 3), ["a"])
844
name, (filename, f), kwargs = transport.calls.pop(0)
845
self.assertEqual("put_file_non_atomic", name)
847
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
849
self.assertEqual("filename", filename)
1111
852
"\na option 0 1 .b :"
1112
853
"\na opt 1 2 .c :"
1113
854
"\nb option 2 3 0 :",
1114
call[1][1].getvalue())
1115
self.assertEqual({'create_parent_dir': True}, call[2])
1117
def assertTotalBuildSize(self, size, keys, positions):
1118
self.assertEqual(size,
1119
knit._get_total_build_size(None, keys, positions))
1121
def test__get_total_build_size(self):
1123
('a',): (('fulltext', False), (('a',), 0, 100), None),
1124
('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
1125
('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
1126
('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1128
self.assertTotalBuildSize(100, [('a',)], positions)
1129
self.assertTotalBuildSize(121, [('b',)], positions)
1130
# c needs both a & b
1131
self.assertTotalBuildSize(156, [('c',)], positions)
1132
# we shouldn't count 'b' twice
1133
self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
1134
self.assertTotalBuildSize(133, [('d',)], positions)
1135
self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
857
def test_has_version(self):
858
transport = MockTransport([
862
index = self.get_knit_index(transport, "filename", "r")
864
self.assertTrue(index.has_version("a"))
865
self.assertFalse(index.has_version("b"))
1137
867
def test_get_position(self):
1138
868
transport = MockTransport([
1140
870
"a option 0 1 :",
1141
871
"b option 1 2 :"
1143
873
index = self.get_knit_index(transport, "filename", "r")
1145
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
1146
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
875
self.assertEqual((None, 0, 1), index.get_position("a"))
876
self.assertEqual((None, 1, 2), index.get_position("b"))
1148
878
def test_get_method(self):
1149
879
transport = MockTransport([
1151
881
"a fulltext,unknown 0 1 :",
1152
882
"b unknown,line-delta 1 2 :",
1261
1024
def test_invalid_size(self):
1262
1025
transport = MockTransport([
1264
1027
"a option 1 1v :",
1266
index = self.get_knit_index(transport, 'filename', 'r')
1268
self.assertRaises(errors.KnitCorrupt, index.keys)
1030
self.assertRaises(errors.KnitCorrupt,
1031
self.get_knit_index, transport, 'filename', 'r')
1269
1032
except TypeError, e:
1270
1033
if (str(e) == ('exceptions must be strings, classes, or instances,'
1271
' not exceptions.ValueError')):
1034
' not exceptions.ValueError')
1035
and sys.version_info[0:2] >= (2,5)):
1272
1036
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1273
1037
' raising new style exceptions with python'
1278
def test_scan_unvalidated_index_not_implemented(self):
1279
transport = MockTransport()
1280
index = self.get_knit_index(transport, 'filename', 'r')
1282
NotImplementedError, index.scan_unvalidated_index,
1283
'dummy graph_index')
1285
NotImplementedError, index.get_missing_compression_parents)
1287
1042
def test_short_line(self):
1288
1043
transport = MockTransport([
1290
1045
"a option 0 10 :",
1291
1046
"b option 10 10 0", # This line isn't terminated, ignored
1293
1048
index = self.get_knit_index(transport, "filename", "r")
1294
self.assertEqual(set([('a',)]), index.keys())
1049
self.assertEqual(['a'], index.get_versions())
1296
1051
def test_skip_incomplete_record(self):
1297
1052
# A line with bogus data should just be skipped
1298
1053
transport = MockTransport([
1300
1055
"a option 0 10 :",
1301
1056
"b option 10 10 0", # This line isn't terminated, ignored
1302
1057
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1304
1059
index = self.get_knit_index(transport, "filename", "r")
1305
self.assertEqual(set([('a',), ('c',)]), index.keys())
1060
self.assertEqual(['a', 'c'], index.get_versions())
1307
1062
def test_trailing_characters(self):
1308
1063
# A line with bogus data should just be skipped
1309
1064
transport = MockTransport([
1311
1066
"a option 0 10 :",
1312
1067
"b option 10 10 0 :a", # This line has extra trailing characters
1313
1068
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1315
1070
index = self.get_knit_index(transport, "filename", "r")
1316
self.assertEqual(set([('a',), ('c',)]), index.keys())
1071
self.assertEqual(['a', 'c'], index.get_versions())
1319
1074
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1321
_test_needs_features = [compiled_knit_feature]
1323
def get_knit_index(self, transport, name, mode):
1324
mapper = ConstantMapper(name)
1325
from bzrlib._knit_load_data_pyx import _load_data_c
1326
self.overrideAttr(knit, '_load_data', _load_data_c)
1327
allow_writes = lambda: mode == 'w'
1328
return _KndxIndex(transport, mapper, lambda:None,
1329
allow_writes, lambda:True)
1332
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1334
def make_annotator(self):
1335
factory = knit.make_pack_factory(True, True, 1)
1336
vf = factory(self.get_transport())
1337
return knit._KnitAnnotator(vf)
1339
def test__expand_fulltext(self):
1340
ann = self.make_annotator()
1341
rev_key = ('rev-id',)
1342
ann._num_compression_children[rev_key] = 1
1343
res = ann._expand_record(rev_key, (('parent-id',),), None,
1344
['line1\n', 'line2\n'], ('fulltext', True))
1345
# The content object and text lines should be cached appropriately
1346
self.assertEqual(['line1\n', 'line2'], res)
1347
content_obj = ann._content_objects[rev_key]
1348
self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1349
self.assertEqual(res, content_obj.text())
1350
self.assertEqual(res, ann._text_cache[rev_key])
1352
def test__expand_delta_comp_parent_not_available(self):
1353
# Parent isn't available yet, so we return nothing, but queue up this
1354
# node for later processing
1355
ann = self.make_annotator()
1356
rev_key = ('rev-id',)
1357
parent_key = ('parent-id',)
1358
record = ['0,1,1\n', 'new-line\n']
1359
details = ('line-delta', False)
1360
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1362
self.assertEqual(None, res)
1363
self.assertTrue(parent_key in ann._pending_deltas)
1364
pending = ann._pending_deltas[parent_key]
1365
self.assertEqual(1, len(pending))
1366
self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1368
def test__expand_record_tracks_num_children(self):
1369
ann = self.make_annotator()
1370
rev_key = ('rev-id',)
1371
rev2_key = ('rev2-id',)
1372
parent_key = ('parent-id',)
1373
record = ['0,1,1\n', 'new-line\n']
1374
details = ('line-delta', False)
1375
ann._num_compression_children[parent_key] = 2
1376
ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1377
('fulltext', False))
1378
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1380
self.assertEqual({parent_key: 1}, ann._num_compression_children)
1381
# Expanding the second child should remove the content object, and the
1382
# num_compression_children entry
1383
res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1385
self.assertFalse(parent_key in ann._content_objects)
1386
self.assertEqual({}, ann._num_compression_children)
1387
# We should not cache the content_objects for rev2 and rev, because
1388
# they do not have compression children of their own.
1389
self.assertEqual({}, ann._content_objects)
1391
def test__expand_delta_records_blocks(self):
1392
ann = self.make_annotator()
1393
rev_key = ('rev-id',)
1394
parent_key = ('parent-id',)
1395
record = ['0,1,1\n', 'new-line\n']
1396
details = ('line-delta', True)
1397
ann._num_compression_children[parent_key] = 2
1398
ann._expand_record(parent_key, (), None,
1399
['line1\n', 'line2\n', 'line3\n'],
1400
('fulltext', False))
1401
ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1402
self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1403
ann._matching_blocks)
1404
rev2_key = ('rev2-id',)
1405
record = ['0,1,1\n', 'new-line\n']
1406
details = ('line-delta', False)
1407
ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1408
self.assertEqual([(1, 1, 2), (3, 3, 0)],
1409
ann._matching_blocks[(rev2_key, parent_key)])
1411
def test__get_parent_ann_uses_matching_blocks(self):
1412
ann = self.make_annotator()
1413
rev_key = ('rev-id',)
1414
parent_key = ('parent-id',)
1415
parent_ann = [(parent_key,)]*3
1416
block_key = (rev_key, parent_key)
1417
ann._annotations_cache[parent_key] = parent_ann
1418
ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1419
# We should not try to access any parent_lines content, because we know
1420
# we already have the matching blocks
1421
par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1422
['1\n', '2\n', '3\n'], parent_key)
1423
self.assertEqual(parent_ann, par_ann)
1424
self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1425
self.assertEqual({}, ann._matching_blocks)
1427
def test__process_pending(self):
1428
ann = self.make_annotator()
1429
rev_key = ('rev-id',)
1432
record = ['0,1,1\n', 'new-line\n']
1433
details = ('line-delta', False)
1434
p1_record = ['line1\n', 'line2\n']
1435
ann._num_compression_children[p1_key] = 1
1436
res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1438
self.assertEqual(None, res)
1439
# self.assertTrue(p1_key in ann._pending_deltas)
1440
self.assertEqual({}, ann._pending_annotation)
1441
# Now insert p1, and we should be able to expand the delta
1442
res = ann._expand_record(p1_key, (), None, p1_record,
1443
('fulltext', False))
1444
self.assertEqual(p1_record, res)
1445
ann._annotations_cache[p1_key] = [(p1_key,)]*2
1446
res = ann._process_pending(p1_key)
1447
self.assertEqual([], res)
1448
self.assertFalse(p1_key in ann._pending_deltas)
1449
self.assertTrue(p2_key in ann._pending_annotation)
1450
self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1451
ann._pending_annotation)
1452
# Now fill in parent 2, and pending annotation should be satisfied
1453
res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1454
ann._annotations_cache[p2_key] = []
1455
res = ann._process_pending(p2_key)
1456
self.assertEqual([rev_key], res)
1457
self.assertEqual({}, ann._pending_annotation)
1458
self.assertEqual({}, ann._pending_deltas)
1460
def test_record_delta_removes_basis(self):
1461
ann = self.make_annotator()
1462
ann._expand_record(('parent-id',), (), None,
1463
['line1\n', 'line2\n'], ('fulltext', False))
1464
ann._num_compression_children['parent-id'] = 2
1466
def test_annotate_special_text(self):
1467
ann = self.make_annotator()
1469
rev1_key = ('rev-1',)
1470
rev2_key = ('rev-2',)
1471
rev3_key = ('rev-3',)
1472
spec_key = ('special:',)
1473
vf.add_lines(rev1_key, [], ['initial content\n'])
1474
vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1477
vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1480
spec_text = ('initial content\n'
1484
ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1485
anns, lines = ann.annotate(spec_key)
1486
self.assertEqual([(rev1_key,),
1487
(rev2_key, rev3_key),
1491
self.assertEqualDiff(spec_text, ''.join(lines))
1076
_test_needs_features = [CompiledKnitFeature]
1078
def get_knit_index(self, *args, **kwargs):
1079
orig = knit._load_data
1081
knit._load_data = orig
1082
self.addCleanup(reset)
1083
from bzrlib._knit_load_data_c import _load_data_c
1084
knit._load_data = _load_data_c
1085
return _KnitIndex(*args, **kwargs)
1494
1089
class KnitTests(TestCaseWithTransport):
1495
1090
"""Class containing knit test helper routines."""
1497
def make_test_knit(self, annotate=False, name='test'):
1498
mapper = ConstantMapper(name)
1499
return make_file_factory(annotate, mapper)(self.get_transport())
1502
class TestBadShaError(KnitTests):
1503
"""Tests for handling of sha errors."""
1505
def test_sha_exception_has_text(self):
1506
# having the failed text included in the error allows for recovery.
1507
source = self.make_test_knit()
1508
target = self.make_test_knit(name="target")
1509
if not source._max_delta_chain:
1510
raise TestNotApplicable(
1511
"cannot get delta-caused sha failures without deltas.")
1514
broken = ('broken',)
1515
source.add_lines(basis, (), ['foo\n'])
1516
source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1517
# Seed target with a bad basis text
1518
target.add_lines(basis, (), ['gam\n'])
1519
target.insert_record_stream(
1520
source.get_record_stream([broken], 'unordered', False))
1521
err = self.assertRaises(errors.KnitCorrupt,
1522
target.get_record_stream([broken], 'unordered', True
1523
).next().get_bytes_as, 'chunked')
1524
self.assertEqual(['gam\n', 'bar\n'], err.content)
1525
# Test for formatting with live data
1526
self.assertStartsWith(str(err), "Knit ")
1092
def make_test_knit(self, annotate=False, delay_create=False, index=None,
1095
factory = KnitPlainFactory()
1098
return KnitVersionedFile(name, get_transport('.'), access_mode='w',
1099
factory=factory, create=True,
1100
delay_create=delay_create, index=index)
1102
def assertRecordContentEqual(self, knit, version_id, candidate_content):
1103
"""Assert that some raw record content matches the raw record content
1104
for a particular version_id in the given knit.
1106
index_memo = knit._index.get_position(version_id)
1107
record = (version_id, index_memo)
1108
[(_, expected_content)] = list(knit._data.read_records_iter_raw([record]))
1109
self.assertEqual(expected_content, candidate_content)
1112
class BasicKnitTests(KnitTests):
1114
def add_stock_one_and_one_a(self, k):
1115
k.add_lines('text-1', [], split_lines(TEXT_1))
1116
k.add_lines('text-1a', ['text-1'], split_lines(TEXT_1A))
1118
def test_knit_constructor(self):
1119
"""Construct empty k"""
1120
self.make_test_knit()
1122
def test_make_explicit_index(self):
1123
"""We can supply an index to use."""
1124
knit = KnitVersionedFile('test', get_transport('.'),
1125
index='strangelove')
1126
self.assertEqual(knit._index, 'strangelove')
1128
def test_knit_add(self):
1129
"""Store one text in knit and retrieve"""
1130
k = self.make_test_knit()
1131
k.add_lines('text-1', [], split_lines(TEXT_1))
1132
self.assertTrue(k.has_version('text-1'))
1133
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1135
def test_knit_reload(self):
1136
# test that the content in a reloaded knit is correct
1137
k = self.make_test_knit()
1138
k.add_lines('text-1', [], split_lines(TEXT_1))
1140
k2 = KnitVersionedFile('test', get_transport('.'), access_mode='r', factory=KnitPlainFactory(), create=True)
1141
self.assertTrue(k2.has_version('text-1'))
1142
self.assertEqualDiff(''.join(k2.get_lines('text-1')), TEXT_1)
1144
def test_knit_several(self):
1145
"""Store several texts in a knit"""
1146
k = self.make_test_knit()
1147
k.add_lines('text-1', [], split_lines(TEXT_1))
1148
k.add_lines('text-2', [], split_lines(TEXT_2))
1149
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1150
self.assertEqualDiff(''.join(k.get_lines('text-2')), TEXT_2)
1152
def test_repeated_add(self):
1153
"""Knit traps attempt to replace existing version"""
1154
k = self.make_test_knit()
1155
k.add_lines('text-1', [], split_lines(TEXT_1))
1156
self.assertRaises(RevisionAlreadyPresent,
1158
'text-1', [], split_lines(TEXT_1))
1160
def test_empty(self):
1161
k = self.make_test_knit(True)
1162
k.add_lines('text-1', [], [])
1163
self.assertEquals(k.get_lines('text-1'), [])
1165
def test_incomplete(self):
1166
"""Test if texts without a ending line-end can be inserted and
1168
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1169
k.add_lines('text-1', [], ['a\n', 'b' ])
1170
k.add_lines('text-2', ['text-1'], ['a\rb\n', 'b\n'])
1171
# reopening ensures maximum room for confusion
1172
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1173
self.assertEquals(k.get_lines('text-1'), ['a\n', 'b' ])
1174
self.assertEquals(k.get_lines('text-2'), ['a\rb\n', 'b\n'])
1176
def test_delta(self):
1177
"""Expression of knit delta as lines"""
1178
k = self.make_test_knit()
1179
td = list(line_delta(TEXT_1.splitlines(True),
1180
TEXT_1A.splitlines(True)))
1181
self.assertEqualDiff(''.join(td), delta_1_1a)
1182
out = apply_line_delta(TEXT_1.splitlines(True), td)
1183
self.assertEqualDiff(''.join(out), TEXT_1A)
1185
def test_add_with_parents(self):
1186
"""Store in knit with parents"""
1187
k = self.make_test_knit()
1188
self.add_stock_one_and_one_a(k)
1189
self.assertEquals(k.get_parents('text-1'), [])
1190
self.assertEquals(k.get_parents('text-1a'), ['text-1'])
1192
def test_ancestry(self):
1193
"""Store in knit with parents"""
1194
k = self.make_test_knit()
1195
self.add_stock_one_and_one_a(k)
1196
self.assertEquals(set(k.get_ancestry(['text-1a'])), set(['text-1a', 'text-1']))
1198
def test_add_delta(self):
1199
"""Store in knit with parents"""
1200
k = KnitVersionedFile('test', get_transport('.'), factory=KnitPlainFactory(),
1201
delta=True, create=True)
1202
self.add_stock_one_and_one_a(k)
1204
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1206
def test_add_delta_knit_graph_index(self):
1207
"""Does adding work with a KnitGraphIndex."""
1208
index = InMemoryGraphIndex(2)
1209
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
1211
k = KnitVersionedFile('test', get_transport('.'),
1212
delta=True, create=True, index=knit_index)
1213
self.add_stock_one_and_one_a(k)
1215
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1216
# check the index had the right data added.
1217
self.assertEqual(set([
1218
(index, ('text-1', ), ' 0 127', ((), ())),
1219
(index, ('text-1a', ), ' 127 140', ((('text-1', ),), (('text-1', ),))),
1220
]), set(index.iter_all_entries()))
1221
# we should not have a .kndx file
1222
self.assertFalse(get_transport('.').has('test.kndx'))
1224
def test_annotate(self):
1226
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1227
delta=True, create=True)
1228
self.insert_and_test_small_annotate(k)
1230
def insert_and_test_small_annotate(self, k):
1231
"""test annotation with k works correctly."""
1232
k.add_lines('text-1', [], ['a\n', 'b\n'])
1233
k.add_lines('text-2', ['text-1'], ['a\n', 'c\n'])
1235
origins = k.annotate('text-2')
1236
self.assertEquals(origins[0], ('text-1', 'a\n'))
1237
self.assertEquals(origins[1], ('text-2', 'c\n'))
1239
def test_annotate_fulltext(self):
1241
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1242
delta=False, create=True)
1243
self.insert_and_test_small_annotate(k)
1245
def test_annotate_merge_1(self):
1246
k = self.make_test_knit(True)
1247
k.add_lines('text-a1', [], ['a\n', 'b\n'])
1248
k.add_lines('text-a2', [], ['d\n', 'c\n'])
1249
k.add_lines('text-am', ['text-a1', 'text-a2'], ['d\n', 'b\n'])
1250
origins = k.annotate('text-am')
1251
self.assertEquals(origins[0], ('text-a2', 'd\n'))
1252
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1254
def test_annotate_merge_2(self):
1255
k = self.make_test_knit(True)
1256
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1257
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1258
k.add_lines('text-am', ['text-a1', 'text-a2'], ['a\n', 'y\n', 'c\n'])
1259
origins = k.annotate('text-am')
1260
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1261
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1262
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1264
def test_annotate_merge_9(self):
1265
k = self.make_test_knit(True)
1266
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1267
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1268
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'c\n'])
1269
origins = k.annotate('text-am')
1270
self.assertEquals(origins[0], ('text-am', 'k\n'))
1271
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1272
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1274
def test_annotate_merge_3(self):
1275
k = self.make_test_knit(True)
1276
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1277
k.add_lines('text-a2', [] ,['x\n', 'y\n', 'z\n'])
1278
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'z\n'])
1279
origins = k.annotate('text-am')
1280
self.assertEquals(origins[0], ('text-am', 'k\n'))
1281
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1282
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1284
def test_annotate_merge_4(self):
1285
k = self.make_test_knit(True)
1286
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1287
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1288
k.add_lines('text-a3', ['text-a1'], ['a\n', 'b\n', 'p\n'])
1289
k.add_lines('text-am', ['text-a2', 'text-a3'], ['a\n', 'b\n', 'z\n'])
1290
origins = k.annotate('text-am')
1291
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1292
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1293
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1295
def test_annotate_merge_5(self):
1296
k = self.make_test_knit(True)
1297
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1298
k.add_lines('text-a2', [], ['d\n', 'e\n', 'f\n'])
1299
k.add_lines('text-a3', [], ['x\n', 'y\n', 'z\n'])
1300
k.add_lines('text-am',
1301
['text-a1', 'text-a2', 'text-a3'],
1302
['a\n', 'e\n', 'z\n'])
1303
origins = k.annotate('text-am')
1304
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1305
self.assertEquals(origins[1], ('text-a2', 'e\n'))
1306
self.assertEquals(origins[2], ('text-a3', 'z\n'))
1308
def test_annotate_file_cherry_pick(self):
1309
k = self.make_test_knit(True)
1310
k.add_lines('text-1', [], ['a\n', 'b\n', 'c\n'])
1311
k.add_lines('text-2', ['text-1'], ['d\n', 'e\n', 'f\n'])
1312
k.add_lines('text-3', ['text-2', 'text-1'], ['a\n', 'b\n', 'c\n'])
1313
origins = k.annotate('text-3')
1314
self.assertEquals(origins[0], ('text-1', 'a\n'))
1315
self.assertEquals(origins[1], ('text-1', 'b\n'))
1316
self.assertEquals(origins[2], ('text-1', 'c\n'))
1318
def _test_join_with_factories(self, k1_factory, k2_factory):
1319
k1 = KnitVersionedFile('test1', get_transport('.'), factory=k1_factory, create=True)
1320
k1.add_lines('text-a', [], ['a1\n', 'a2\n', 'a3\n'])
1321
k1.add_lines('text-b', ['text-a'], ['a1\n', 'b2\n', 'a3\n'])
1322
k1.add_lines('text-c', [], ['c1\n', 'c2\n', 'c3\n'])
1323
k1.add_lines('text-d', ['text-c'], ['c1\n', 'd2\n', 'd3\n'])
1324
k1.add_lines('text-m', ['text-b', 'text-d'], ['a1\n', 'b2\n', 'd3\n'])
1325
k2 = KnitVersionedFile('test2', get_transport('.'), factory=k2_factory, create=True)
1326
count = k2.join(k1, version_ids=['text-m'])
1327
self.assertEquals(count, 5)
1328
self.assertTrue(k2.has_version('text-a'))
1329
self.assertTrue(k2.has_version('text-c'))
1330
origins = k2.annotate('text-m')
1331
self.assertEquals(origins[0], ('text-a', 'a1\n'))
1332
self.assertEquals(origins[1], ('text-b', 'b2\n'))
1333
self.assertEquals(origins[2], ('text-d', 'd3\n'))
1335
def test_knit_join_plain_to_plain(self):
1336
"""Test joining a plain knit with a plain knit."""
1337
self._test_join_with_factories(KnitPlainFactory(), KnitPlainFactory())
1339
def test_knit_join_anno_to_anno(self):
1340
"""Test joining an annotated knit with an annotated knit."""
1341
self._test_join_with_factories(None, None)
1343
def test_knit_join_anno_to_plain(self):
1344
"""Test joining an annotated knit with a plain knit."""
1345
self._test_join_with_factories(None, KnitPlainFactory())
1347
def test_knit_join_plain_to_anno(self):
1348
"""Test joining a plain knit with an annotated knit."""
1349
self._test_join_with_factories(KnitPlainFactory(), None)
1351
def test_reannotate(self):
1352
k1 = KnitVersionedFile('knit1', get_transport('.'),
1353
factory=KnitAnnotateFactory(), create=True)
1355
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1357
k1.add_lines('text-b', ['text-a'], ['a\n', 'c\n'])
1359
k2 = KnitVersionedFile('test2', get_transport('.'),
1360
factory=KnitAnnotateFactory(), create=True)
1361
k2.join(k1, version_ids=['text-b'])
1364
k1.add_lines('text-X', ['text-b'], ['a\n', 'b\n'])
1366
k2.add_lines('text-c', ['text-b'], ['z\n', 'c\n'])
1368
k2.add_lines('text-Y', ['text-b'], ['b\n', 'c\n'])
1370
# test-c will have index 3
1371
k1.join(k2, version_ids=['text-c'])
1373
lines = k1.get_lines('text-c')
1374
self.assertEquals(lines, ['z\n', 'c\n'])
1376
origins = k1.annotate('text-c')
1377
self.assertEquals(origins[0], ('text-c', 'z\n'))
1378
self.assertEquals(origins[1], ('text-b', 'c\n'))
1380
def test_get_line_delta_texts(self):
1381
"""Make sure we can call get_texts on text with reused line deltas"""
1382
k1 = KnitVersionedFile('test1', get_transport('.'),
1383
factory=KnitPlainFactory(), create=True)
1388
parents = ['%d' % (t-1)]
1389
k1.add_lines('%d' % t, parents, ['hello\n'] * t)
1390
k1.get_texts(('%d' % t) for t in range(3))
1392
def test_iter_lines_reads_in_order(self):
1393
instrumented_t = get_transport('trace+memory:///')
1394
k1 = KnitVersionedFile('id', instrumented_t, create=True, delta=True)
1395
self.assertEqual([('get', 'id.kndx',)], instrumented_t._activity)
1396
# add texts with no required ordering
1397
k1.add_lines('base', [], ['text\n'])
1398
k1.add_lines('base2', [], ['text2\n'])
1400
# clear the logged activity, but preserve the list instance in case of
1401
# clones pointing at it.
1402
del instrumented_t._activity[:]
1403
# request a last-first iteration
1404
results = list(k1.iter_lines_added_or_present_in_versions(
1407
[('readv', 'id.knit', [(0, 87), (87, 89)], False, None)],
1408
instrumented_t._activity)
1409
self.assertEqual(['text\n', 'text2\n'], results)
1411
def test_create_empty_annotated(self):
1412
k1 = self.make_test_knit(True)
1414
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1415
k2 = k1.create_empty('t', MemoryTransport())
1416
self.assertTrue(isinstance(k2.factory, KnitAnnotateFactory))
1417
self.assertEqual(k1.delta, k2.delta)
1418
# the generic test checks for empty content and file class
1420
def test_knit_format(self):
1421
# this tests that a new knit index file has the expected content
1422
# and that is writes the data we expect as records are added.
1423
knit = self.make_test_knit(True)
1424
# Now knit files are not created until we first add data to them
1425
self.assertFileEqual("# bzr knit index 8\n", 'test.kndx')
1426
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1427
self.assertFileEqual(
1428
"# bzr knit index 8\n"
1430
"revid fulltext 0 84 .a_ghost :",
1432
knit.add_lines_with_ghosts('revid2', ['revid'], ['a\n'])
1433
self.assertFileEqual(
1434
"# bzr knit index 8\n"
1435
"\nrevid fulltext 0 84 .a_ghost :"
1436
"\nrevid2 line-delta 84 82 0 :",
1438
# we should be able to load this file again
1439
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1440
self.assertEqual(['revid', 'revid2'], knit.versions())
1441
# write a short write to the file and ensure that its ignored
1442
indexfile = file('test.kndx', 'ab')
1443
indexfile.write('\nrevid3 line-delta 166 82 1 2 3 4 5 .phwoar:demo ')
1445
# we should be able to load this file again
1446
knit = KnitVersionedFile('test', get_transport('.'), access_mode='w')
1447
self.assertEqual(['revid', 'revid2'], knit.versions())
1448
# and add a revision with the same id the failed write had
1449
knit.add_lines('revid3', ['revid2'], ['a\n'])
1450
# and when reading it revid3 should now appear.
1451
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1452
self.assertEqual(['revid', 'revid2', 'revid3'], knit.versions())
1453
self.assertEqual(['revid2'], knit.get_parents('revid3'))
1455
def test_delay_create(self):
1456
"""Test that passing delay_create=True creates files late"""
1457
knit = self.make_test_knit(annotate=True, delay_create=True)
1458
self.failIfExists('test.knit')
1459
self.failIfExists('test.kndx')
1460
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1461
self.failUnlessExists('test.knit')
1462
self.assertFileEqual(
1463
"# bzr knit index 8\n"
1465
"revid fulltext 0 84 .a_ghost :",
1468
def test_create_parent_dir(self):
1469
"""create_parent_dir can create knits in nonexistant dirs"""
1470
# Has no effect if we don't set 'delay_create'
1471
trans = get_transport('.')
1472
self.assertRaises(NoSuchFile, KnitVersionedFile, 'dir/test',
1473
trans, access_mode='w', factory=None,
1474
create=True, create_parent_dir=True)
1475
# Nothing should have changed yet
1476
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1477
factory=None, create=True,
1478
create_parent_dir=True,
1480
self.failIfExists('dir/test.knit')
1481
self.failIfExists('dir/test.kndx')
1482
self.failIfExists('dir')
1483
knit.add_lines('revid', [], ['a\n'])
1484
self.failUnlessExists('dir')
1485
self.failUnlessExists('dir/test.knit')
1486
self.assertFileEqual(
1487
"# bzr knit index 8\n"
1489
"revid fulltext 0 84 :",
1492
def test_create_mode_700(self):
1493
trans = get_transport('.')
1494
if not trans._can_roundtrip_unix_modebits():
1495
# Can't roundtrip, so no need to run this test
1497
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1498
factory=None, create=True,
1499
create_parent_dir=True,
1503
knit.add_lines('revid', [], ['a\n'])
1504
self.assertTransportMode(trans, 'dir', 0700)
1505
self.assertTransportMode(trans, 'dir/test.knit', 0600)
1506
self.assertTransportMode(trans, 'dir/test.kndx', 0600)
1508
def test_create_mode_770(self):
1509
trans = get_transport('.')
1510
if not trans._can_roundtrip_unix_modebits():
1511
# Can't roundtrip, so no need to run this test
1513
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1514
factory=None, create=True,
1515
create_parent_dir=True,
1519
knit.add_lines('revid', [], ['a\n'])
1520
self.assertTransportMode(trans, 'dir', 0770)
1521
self.assertTransportMode(trans, 'dir/test.knit', 0660)
1522
self.assertTransportMode(trans, 'dir/test.kndx', 0660)
1524
def test_create_mode_777(self):
1525
trans = get_transport('.')
1526
if not trans._can_roundtrip_unix_modebits():
1527
# Can't roundtrip, so no need to run this test
1529
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1530
factory=None, create=True,
1531
create_parent_dir=True,
1535
knit.add_lines('revid', [], ['a\n'])
1536
self.assertTransportMode(trans, 'dir', 0777)
1537
self.assertTransportMode(trans, 'dir/test.knit', 0666)
1538
self.assertTransportMode(trans, 'dir/test.kndx', 0666)
1540
def test_plan_merge(self):
1541
my_knit = self.make_test_knit(annotate=True)
1542
my_knit.add_lines('text1', [], split_lines(TEXT_1))
1543
my_knit.add_lines('text1a', ['text1'], split_lines(TEXT_1A))
1544
my_knit.add_lines('text1b', ['text1'], split_lines(TEXT_1B))
1545
plan = list(my_knit.plan_merge('text1a', 'text1b'))
1546
for plan_line, expected_line in zip(plan, AB_MERGE):
1547
self.assertEqual(plan_line, expected_line)
1549
def test_get_stream_empty(self):
1550
"""Get a data stream for an empty knit file."""
1551
k1 = self.make_test_knit()
1552
format, data_list, reader_callable = k1.get_data_stream([])
1553
self.assertEqual('knit-plain', format)
1554
self.assertEqual([], data_list)
1555
content = reader_callable(None)
1556
self.assertEqual('', content)
1557
self.assertIsInstance(content, str)
1559
def test_get_stream_one_version(self):
1560
"""Get a data stream for a single record out of a knit containing just
1563
k1 = self.make_test_knit()
1565
('text-a', [], TEXT_1),
1567
expected_data_list = [
1568
# version, options, length, parents
1569
('text-a', ['fulltext'], 122, []),
1571
for version_id, parents, lines in test_data:
1572
k1.add_lines(version_id, parents, split_lines(lines))
1574
format, data_list, reader_callable = k1.get_data_stream(['text-a'])
1575
self.assertEqual('knit-plain', format)
1576
self.assertEqual(expected_data_list, data_list)
1577
# There's only one record in the knit, so the content should be the
1578
# entire knit data file's contents.
1579
self.assertEqual(k1.transport.get_bytes(k1._data._access._filename),
1580
reader_callable(None))
1582
def test_get_stream_get_one_version_of_many(self):
1583
"""Get a data stream for just one version out of a knit containing many
1586
k1 = self.make_test_knit()
1587
# Insert the same data as test_knit_join, as they seem to cover a range
1588
# of cases (no parents, one parent, multiple parents).
1590
('text-a', [], TEXT_1),
1591
('text-b', ['text-a'], TEXT_1),
1592
('text-c', [], TEXT_1),
1593
('text-d', ['text-c'], TEXT_1),
1594
('text-m', ['text-b', 'text-d'], TEXT_1),
1596
expected_data_list = [
1597
# version, options, length, parents
1598
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1600
for version_id, parents, lines in test_data:
1601
k1.add_lines(version_id, parents, split_lines(lines))
1603
format, data_list, reader_callable = k1.get_data_stream(['text-m'])
1604
self.assertEqual('knit-plain', format)
1605
self.assertEqual(expected_data_list, data_list)
1606
self.assertRecordContentEqual(k1, 'text-m', reader_callable(None))
1608
def test_get_stream_ghost_parent(self):
1609
"""Get a data stream for a version with a ghost parent."""
1610
k1 = self.make_test_knit()
1612
k1.add_lines('text-a', [], split_lines(TEXT_1))
1613
k1.add_lines_with_ghosts('text-b', ['text-a', 'text-ghost'],
1614
split_lines(TEXT_1))
1616
expected_data_list = [
1617
# version, options, length, parents
1618
('text-b', ['line-delta'], 84, ['text-a', 'text-ghost']),
1621
format, data_list, reader_callable = k1.get_data_stream(['text-b'])
1622
self.assertEqual('knit-plain', format)
1623
self.assertEqual(expected_data_list, data_list)
1624
self.assertRecordContentEqual(k1, 'text-b', reader_callable(None))
1626
def test_get_stream_get_multiple_records(self):
1627
"""Get a stream for multiple records of a knit."""
1628
k1 = self.make_test_knit()
1629
# Insert the same data as test_knit_join, as they seem to cover a range
1630
# of cases (no parents, one parent, multiple parents).
1632
('text-a', [], TEXT_1),
1633
('text-b', ['text-a'], TEXT_1),
1634
('text-c', [], TEXT_1),
1635
('text-d', ['text-c'], TEXT_1),
1636
('text-m', ['text-b', 'text-d'], TEXT_1),
1638
expected_data_list = [
1639
# version, options, length, parents
1640
('text-b', ['line-delta'], 84, ['text-a']),
1641
('text-d', ['line-delta'], 84, ['text-c']),
1643
for version_id, parents, lines in test_data:
1644
k1.add_lines(version_id, parents, split_lines(lines))
1646
# Note that even though we request the revision IDs in a particular
1647
# order, the data stream may return them in any order it likes. In this
1648
# case, they'll be in the order they were inserted into the knit.
1649
format, data_list, reader_callable = k1.get_data_stream(
1650
['text-d', 'text-b'])
1651
self.assertEqual('knit-plain', format)
1652
self.assertEqual(expected_data_list, data_list)
1653
self.assertRecordContentEqual(k1, 'text-b', reader_callable(84))
1654
self.assertRecordContentEqual(k1, 'text-d', reader_callable(84))
1655
self.assertEqual('', reader_callable(None),
1656
"There should be no more bytes left to read.")
1658
def test_get_stream_all(self):
1659
"""Get a data stream for all the records in a knit.
1661
This exercises fulltext records, line-delta records, records with
1662
various numbers of parents, and reading multiple records out of the
1663
callable. These cases ought to all be exercised individually by the
1664
other test_get_stream_* tests; this test is basically just paranoia.
1666
k1 = self.make_test_knit()
1667
# Insert the same data as test_knit_join, as they seem to cover a range
1668
# of cases (no parents, one parent, multiple parents).
1670
('text-a', [], TEXT_1),
1671
('text-b', ['text-a'], TEXT_1),
1672
('text-c', [], TEXT_1),
1673
('text-d', ['text-c'], TEXT_1),
1674
('text-m', ['text-b', 'text-d'], TEXT_1),
1676
expected_data_list = [
1677
# version, options, length, parents
1678
('text-a', ['fulltext'], 122, []),
1679
('text-b', ['line-delta'], 84, ['text-a']),
1680
('text-c', ['fulltext'], 121, []),
1681
('text-d', ['line-delta'], 84, ['text-c']),
1682
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1684
for version_id, parents, lines in test_data:
1685
k1.add_lines(version_id, parents, split_lines(lines))
1687
format, data_list, reader_callable = k1.get_data_stream(
1688
['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
1689
self.assertEqual('knit-plain', format)
1690
self.assertEqual(expected_data_list, data_list)
1691
for version_id, options, length, parents in expected_data_list:
1692
bytes = reader_callable(length)
1693
self.assertRecordContentEqual(k1, version_id, bytes)
1695
def assertKnitFilesEqual(self, knit1, knit2):
1696
"""Assert that the contents of the index and data files of two knits are
1700
knit1.transport.get_bytes(knit1._data._access._filename),
1701
knit2.transport.get_bytes(knit2._data._access._filename))
1703
knit1.transport.get_bytes(knit1._index._filename),
1704
knit2.transport.get_bytes(knit2._index._filename))
1706
def test_insert_data_stream_empty(self):
1707
"""Inserting a data stream with no records should not put any data into
1710
k1 = self.make_test_knit()
1711
k1.insert_data_stream(
1712
(k1.get_format_signature(), [], lambda ignored: ''))
1713
self.assertEqual('', k1.transport.get_bytes(k1._data._access._filename),
1714
"The .knit should be completely empty.")
1715
self.assertEqual(k1._index.HEADER,
1716
k1.transport.get_bytes(k1._index._filename),
1717
"The .kndx should have nothing apart from the header.")
1719
def test_insert_data_stream_one_record(self):
1720
"""Inserting a data stream with one record from a knit with one record
1721
results in byte-identical files.
1723
source = self.make_test_knit(name='source')
1724
source.add_lines('text-a', [], split_lines(TEXT_1))
1725
data_stream = source.get_data_stream(['text-a'])
1727
target = self.make_test_knit(name='target')
1728
target.insert_data_stream(data_stream)
1730
self.assertKnitFilesEqual(source, target)
1732
def test_insert_data_stream_records_already_present(self):
1733
"""Insert a data stream where some records are alreday present in the
1734
target, and some not. Only the new records are inserted.
1736
source = self.make_test_knit(name='source')
1737
target = self.make_test_knit(name='target')
1738
# Insert 'text-a' into both source and target
1739
source.add_lines('text-a', [], split_lines(TEXT_1))
1740
target.insert_data_stream(source.get_data_stream(['text-a']))
1741
# Insert 'text-b' into just the source.
1742
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1743
# Get a data stream of both text-a and text-b, and insert it.
1744
data_stream = source.get_data_stream(['text-a', 'text-b'])
1745
target.insert_data_stream(data_stream)
1746
# The source and target will now be identical. This means the text-a
1747
# record was not added a second time.
1748
self.assertKnitFilesEqual(source, target)
1750
def test_insert_data_stream_multiple_records(self):
1751
"""Inserting a data stream of all records from a knit with multiple
1752
records results in byte-identical files.
1754
source = self.make_test_knit(name='source')
1755
source.add_lines('text-a', [], split_lines(TEXT_1))
1756
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1757
source.add_lines('text-c', [], split_lines(TEXT_1))
1758
data_stream = source.get_data_stream(['text-a', 'text-b', 'text-c'])
1760
target = self.make_test_knit(name='target')
1761
target.insert_data_stream(data_stream)
1763
self.assertKnitFilesEqual(source, target)
1765
def test_insert_data_stream_ghost_parent(self):
1766
"""Insert a data stream with a record that has a ghost parent."""
1767
# Make a knit with a record, text-a, that has a ghost parent.
1768
source = self.make_test_knit(name='source')
1769
source.add_lines_with_ghosts('text-a', ['text-ghost'],
1770
split_lines(TEXT_1))
1771
data_stream = source.get_data_stream(['text-a'])
1773
target = self.make_test_knit(name='target')
1774
target.insert_data_stream(data_stream)
1776
self.assertKnitFilesEqual(source, target)
1778
# The target knit object is in a consistent state, i.e. the record we
1779
# just added is immediately visible.
1780
self.assertTrue(target.has_version('text-a'))
1781
self.assertTrue(target.has_ghost('text-ghost'))
1782
self.assertEqual(split_lines(TEXT_1), target.get_lines('text-a'))
1784
def test_insert_data_stream_inconsistent_version_lines(self):
1785
"""Inserting a data stream which has different content for a version_id
1786
than already exists in the knit will raise KnitCorrupt.
1788
source = self.make_test_knit(name='source')
1789
target = self.make_test_knit(name='target')
1790
# Insert a different 'text-a' into both source and target
1791
source.add_lines('text-a', [], split_lines(TEXT_1))
1792
target.add_lines('text-a', [], split_lines(TEXT_2))
1793
# Insert a data stream with conflicting content into the target
1794
data_stream = source.get_data_stream(['text-a'])
1796
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1798
def test_insert_data_stream_inconsistent_version_parents(self):
1799
"""Inserting a data stream which has different parents for a version_id
1800
than already exists in the knit will raise KnitCorrupt.
1802
source = self.make_test_knit(name='source')
1803
target = self.make_test_knit(name='target')
1804
# Insert a different 'text-a' into both source and target. They differ
1805
# only by the parents list, the content is the same.
1806
source.add_lines_with_ghosts('text-a', [], split_lines(TEXT_1))
1807
target.add_lines_with_ghosts('text-a', ['a-ghost'], split_lines(TEXT_1))
1808
# Insert a data stream with conflicting content into the target
1809
data_stream = source.get_data_stream(['text-a'])
1811
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1813
def test_insert_data_stream_incompatible_format(self):
1814
"""A data stream in a different format to the target knit cannot be
1817
It will raise KnitDataStreamIncompatible.
1819
data_stream = ('fake-format-signature', [], lambda _: '')
1820
target = self.make_test_knit(name='target')
1822
errors.KnitDataStreamIncompatible,
1823
target.insert_data_stream, data_stream)
1825
# * test that a stream of "already present version, then new version"
1826
# inserts correctly.
1837
Banana cup cake recipe
1843
- self-raising flour
1847
Banana cup cake recipe
1849
- bananas (do not use plantains!!!)
1856
Banana cup cake recipe
1859
- self-raising flour
1872
AB_MERGE_TEXT="""unchanged|Banana cup cake recipe
1877
new-b|- bananas (do not use plantains!!!)
1878
unchanged|- broken tea cups
1879
new-a|- self-raising flour
1882
AB_MERGE=[tuple(l.split('|')) for l in AB_MERGE_TEXT.splitlines(True)]
1885
def line_delta(from_lines, to_lines):
1886
"""Generate line-based delta from one text to another"""
1887
s = difflib.SequenceMatcher(None, from_lines, to_lines)
1888
for op in s.get_opcodes():
1889
if op[0] == 'equal':
1891
yield '%d,%d,%d\n' % (op[1], op[2], op[4]-op[3])
1892
for i in range(op[3], op[4]):
1896
def apply_line_delta(basis_lines, delta_lines):
1897
"""Apply a line-based perfect diff
1899
basis_lines -- text to apply the patch to
1900
delta_lines -- diff instructions and content
1902
out = basis_lines[:]
1905
while i < len(delta_lines):
1907
a, b, c = map(long, l.split(','))
1909
out[offset+a:offset+b] = delta_lines[i:i+c]
1911
offset = offset + (b - a) + c
1915
class TestWeaveToKnit(KnitTests):
1917
def test_weave_to_knit_matches(self):
1918
# check that the WeaveToKnit is_compatible function
1919
# registers True for a Weave to a Knit.
1921
k = self.make_test_knit()
1922
self.failUnless(WeaveToKnit.is_compatible(w, k))
1923
self.failIf(WeaveToKnit.is_compatible(k, w))
1924
self.failIf(WeaveToKnit.is_compatible(w, w))
1925
self.failIf(WeaveToKnit.is_compatible(k, k))
1928
class TestKnitCaching(KnitTests):
1930
def create_knit(self):
1931
k = self.make_test_knit(True)
1932
k.add_lines('text-1', [], split_lines(TEXT_1))
1933
k.add_lines('text-2', [], split_lines(TEXT_2))
1936
def test_no_caching(self):
1937
k = self.create_knit()
1938
# Nothing should be cached without setting 'enable_cache'
1939
self.assertEqual({}, k._data._cache)
1941
def test_cache_data_read_raw(self):
1942
k = self.create_knit()
1944
# Now cache and read
1947
def read_one_raw(version):
1948
pos_map = k._get_components_positions([version])
1949
method, index_memo, next = pos_map[version]
1950
lst = list(k._data.read_records_iter_raw([(version, index_memo)]))
1951
self.assertEqual(1, len(lst))
1954
val = read_one_raw('text-1')
1955
self.assertEqual({'text-1':val[1]}, k._data._cache)
1958
# After clear, new reads are not cached
1959
self.assertEqual({}, k._data._cache)
1961
val2 = read_one_raw('text-1')
1962
self.assertEqual(val, val2)
1963
self.assertEqual({}, k._data._cache)
1965
def test_cache_data_read(self):
1966
k = self.create_knit()
1968
def read_one(version):
1969
pos_map = k._get_components_positions([version])
1970
method, index_memo, next = pos_map[version]
1971
lst = list(k._data.read_records_iter([(version, index_memo)]))
1972
self.assertEqual(1, len(lst))
1975
# Now cache and read
1978
val = read_one('text-2')
1979
self.assertEqual(['text-2'], k._data._cache.keys())
1980
self.assertEqual('text-2', val[0])
1981
content, digest = k._data._parse_record('text-2',
1982
k._data._cache['text-2'])
1983
self.assertEqual(content, val[1])
1984
self.assertEqual(digest, val[2])
1987
self.assertEqual({}, k._data._cache)
1989
val2 = read_one('text-2')
1990
self.assertEqual(val, val2)
1991
self.assertEqual({}, k._data._cache)
1993
def test_cache_read(self):
1994
k = self.create_knit()
1997
text = k.get_text('text-1')
1998
self.assertEqual(TEXT_1, text)
1999
self.assertEqual(['text-1'], k._data._cache.keys())
2002
self.assertEqual({}, k._data._cache)
2004
text = k.get_text('text-1')
2005
self.assertEqual(TEXT_1, text)
2006
self.assertEqual({}, k._data._cache)
1529
2009
class TestKnitIndex(KnitTests):
1657
2128
add_callback = self.catch_add
1659
2130
add_callback = None
1660
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
2131
return KnitGraphIndex(combined_index, deltas=deltas,
1661
2132
add_callback=add_callback)
1663
def test_keys(self):
1664
index = self.two_graph_index()
1665
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2134
def test_get_graph(self):
2135
index = self.two_graph_index()
2136
self.assertEqual(set([
2137
('tip', ('parent', )),
2139
('parent', ('tail', 'ghost')),
2141
]), set(index.get_graph()))
2143
def test_get_ancestry(self):
2144
# get_ancestry is defined as eliding ghosts, not erroring.
2145
index = self.two_graph_index()
2146
self.assertEqual([], index.get_ancestry([]))
2147
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2148
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2149
self.assertEqual(['tail', 'parent'], index.get_ancestry(['parent']))
2150
self.assertEqual(['tail', 'parent', 'tip'], index.get_ancestry(['tip']))
2151
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2152
(['tail', 'parent', 'tip', 'separate'],
2153
['separate', 'tail', 'parent', 'tip'],
2155
# and without topo_sort
2156
self.assertEqual(set(['separate']),
2157
set(index.get_ancestry(['separate'], topo_sorted=False)))
2158
self.assertEqual(set(['tail']),
2159
set(index.get_ancestry(['tail'], topo_sorted=False)))
2160
self.assertEqual(set(['tail', 'parent']),
2161
set(index.get_ancestry(['parent'], topo_sorted=False)))
2162
self.assertEqual(set(['tail', 'parent', 'tip']),
2163
set(index.get_ancestry(['tip'], topo_sorted=False)))
2164
self.assertEqual(set(['separate', 'tail', 'parent', 'tip']),
2165
set(index.get_ancestry(['tip', 'separate'])))
2166
# asking for a ghost makes it go boom.
2167
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2169
def test_get_ancestry_with_ghosts(self):
2170
index = self.two_graph_index()
2171
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2172
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2173
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2174
self.assertTrue(index.get_ancestry_with_ghosts(['parent']) in
2175
(['tail', 'ghost', 'parent'],
2176
['ghost', 'tail', 'parent'],
2178
self.assertTrue(index.get_ancestry_with_ghosts(['tip']) in
2179
(['tail', 'ghost', 'parent', 'tip'],
2180
['ghost', 'tail', 'parent', 'tip'],
2182
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2183
(['tail', 'ghost', 'parent', 'tip', 'separate'],
2184
['ghost', 'tail', 'parent', 'tip', 'separate'],
2185
['separate', 'tail', 'ghost', 'parent', 'tip'],
2186
['separate', 'ghost', 'tail', 'parent', 'tip'],
2188
# asking for a ghost makes it go boom.
2189
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2191
def test_num_versions(self):
2192
index = self.two_graph_index()
2193
self.assertEqual(4, index.num_versions())
2195
def test_get_versions(self):
2196
index = self.two_graph_index()
2197
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2198
set(index.get_versions()))
2200
def test_has_version(self):
2201
index = self.two_graph_index()
2202
self.assertTrue(index.has_version('tail'))
2203
self.assertFalse(index.has_version('ghost'))
1668
2205
def test_get_position(self):
1669
2206
index = self.two_graph_index()
1670
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1671
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
2207
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2208
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1673
2210
def test_get_method_deltas(self):
1674
2211
index = self.two_graph_index(deltas=True)
1675
self.assertEqual('fulltext', index.get_method(('tip',)))
1676
self.assertEqual('line-delta', index.get_method(('parent',)))
2212
self.assertEqual('fulltext', index.get_method('tip'))
2213
self.assertEqual('line-delta', index.get_method('parent'))
1678
2215
def test_get_method_no_deltas(self):
1679
2216
# check that the parent-history lookup is ignored with deltas=False.
1680
2217
index = self.two_graph_index(deltas=False)
1681
self.assertEqual('fulltext', index.get_method(('tip',)))
1682
self.assertEqual('fulltext', index.get_method(('parent',)))
2218
self.assertEqual('fulltext', index.get_method('tip'))
2219
self.assertEqual('fulltext', index.get_method('parent'))
1684
2221
def test_get_options_deltas(self):
1685
2222
index = self.two_graph_index(deltas=True)
1686
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1687
self.assertEqual(['line-delta'], index.get_options(('parent',)))
2223
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2224
self.assertEqual(['line-delta'], index.get_options('parent'))
1689
2226
def test_get_options_no_deltas(self):
1690
2227
# check that the parent-history lookup is ignored with deltas=False.
1691
2228
index = self.two_graph_index(deltas=False)
1692
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1693
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1695
def test_get_parent_map(self):
1696
index = self.two_graph_index()
1697
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1698
index.get_parent_map([('parent',), ('ghost',)]))
2229
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2230
self.assertEqual(['fulltext'], index.get_options('parent'))
2232
def test_get_parents(self):
2233
# get_parents ignores ghosts
2234
index = self.two_graph_index()
2235
self.assertEqual(('tail', ), index.get_parents('parent'))
2236
# and errors on ghosts.
2237
self.assertRaises(errors.RevisionNotPresent,
2238
index.get_parents, 'ghost')
2240
def test_get_parents_with_ghosts(self):
2241
index = self.two_graph_index()
2242
self.assertEqual(('tail', 'ghost'), index.get_parents_with_ghosts('parent'))
2243
# and errors on ghosts.
2244
self.assertRaises(errors.RevisionNotPresent,
2245
index.get_parents_with_ghosts, 'ghost')
2247
def test_check_versions_present(self):
2248
# ghosts should not be considered present
2249
index = self.two_graph_index()
2250
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2252
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2254
index.check_versions_present(['tail', 'separate'])
1700
2256
def catch_add(self, entries):
1701
2257
self.caught_entries.append(entries)
1703
2259
def test_add_no_callback_errors(self):
1704
2260
index = self.two_graph_index()
1705
self.assertRaises(errors.ReadOnlyError, index.add_records,
1706
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
2261
self.assertRaises(errors.ReadOnlyError, index.add_version,
2262
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1708
2264
def test_add_version_smoke(self):
1709
2265
index = self.two_graph_index(catch_adds=True)
1710
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
2266
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1712
2267
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
1713
2268
self.caught_entries)
1715
2270
def test_add_version_delta_not_delta_index(self):
1716
2271
index = self.two_graph_index(catch_adds=True)
1717
self.assertRaises(errors.KnitCorrupt, index.add_records,
1718
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2272
self.assertRaises(errors.KnitCorrupt, index.add_version,
2273
'new', 'no-eol,line-delta', (None, 0, 100), ['parent'])
1719
2274
self.assertEqual([], self.caught_entries)
1721
2276
def test_add_version_same_dup(self):
1722
2277
index = self.two_graph_index(catch_adds=True)
1723
2278
# options can be spelt two different ways
1724
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1725
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1726
# position/length are ignored (because each pack could have fulltext or
1727
# delta, and be at a different position.
1728
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1730
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1732
# but neither should have added data:
1733
self.assertEqual([[], [], [], []], self.caught_entries)
2279
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2280
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])
2281
# but neither should have added data.
2282
self.assertEqual([[], []], self.caught_entries)
1735
2284
def test_add_version_different_dup(self):
1736
2285
index = self.two_graph_index(deltas=True, catch_adds=True)
1737
2286
# change options
1738
self.assertRaises(errors.KnitCorrupt, index.add_records,
1739
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1740
self.assertRaises(errors.KnitCorrupt, index.add_records,
1741
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2287
self.assertRaises(errors.KnitCorrupt, index.add_version,
2288
'tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2289
self.assertRaises(errors.KnitCorrupt, index.add_version,
2290
'tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])
2291
self.assertRaises(errors.KnitCorrupt, index.add_version,
2292
'tip', 'fulltext', (None, 0, 100), ['parent'])
2294
self.assertRaises(errors.KnitCorrupt, index.add_version,
2295
'tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])
2296
self.assertRaises(errors.KnitCorrupt, index.add_version,
2297
'tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])
1743
self.assertRaises(errors.KnitCorrupt, index.add_records,
1744
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2299
self.assertRaises(errors.KnitCorrupt, index.add_version,
2300
'tip', 'fulltext,no-eol', (None, 0, 100), [])
1745
2301
self.assertEqual([], self.caught_entries)
1747
2303
def test_add_versions_nodeltas(self):
1748
2304
index = self.two_graph_index(catch_adds=True)
1750
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1751
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
2305
index.add_versions([
2306
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2307
('new2', 'fulltext', (None, 0, 6), ['new']),
1753
2309
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
1754
2310
(('new2', ), ' 0 6', ((('new',),),))],
1769
2325
def test_add_versions_delta_not_delta_index(self):
1770
2326
index = self.two_graph_index(catch_adds=True)
1771
self.assertRaises(errors.KnitCorrupt, index.add_records,
1772
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2327
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2328
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1773
2329
self.assertEqual([], self.caught_entries)
1775
2331
def test_add_versions_random_id_accepted(self):
1776
2332
index = self.two_graph_index(catch_adds=True)
1777
index.add_records([], random_id=True)
2333
index.add_versions([], random_id=True)
1779
2335
def test_add_versions_same_dup(self):
1780
2336
index = self.two_graph_index(catch_adds=True)
1781
2337
# options can be spelt two different ways
1782
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1784
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1786
# position/length are ignored (because each pack could have fulltext or
1787
# delta, and be at a different position.
1788
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1790
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
2338
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2339
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
1792
2340
# but neither should have added data.
1793
self.assertEqual([[], [], [], []], self.caught_entries)
2341
self.assertEqual([[], []], self.caught_entries)
1795
2343
def test_add_versions_different_dup(self):
1796
2344
index = self.two_graph_index(deltas=True, catch_adds=True)
1797
2345
# change options
1798
self.assertRaises(errors.KnitCorrupt, index.add_records,
1799
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1800
self.assertRaises(errors.KnitCorrupt, index.add_records,
1801
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2346
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2347
[('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2348
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2349
[('tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])])
2350
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2351
[('tip', 'fulltext', (None, 0, 100), ['parent'])])
2353
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2354
[('tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])])
2355
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2356
[('tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])])
1803
self.assertRaises(errors.KnitCorrupt, index.add_records,
1804
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2358
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2359
[('tip', 'fulltext,no-eol', (None, 0, 100), [])])
1805
2360
# change options in the second record
1806
self.assertRaises(errors.KnitCorrupt, index.add_records,
1807
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1808
(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
2361
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2362
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent']),
2363
('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1809
2364
self.assertEqual([], self.caught_entries)
1811
def make_g_index_missing_compression_parent(self):
1812
graph_index = self.make_g_index('missing_comp', 2,
1813
[(('tip', ), ' 100 78',
1814
([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1817
def make_g_index_missing_parent(self):
1818
graph_index = self.make_g_index('missing_parent', 2,
1819
[(('parent', ), ' 100 78', ([], [])),
1820
(('tip', ), ' 100 78',
1821
([('parent', ), ('missing-parent', )], [('parent', )])),
1825
def make_g_index_no_external_refs(self):
1826
graph_index = self.make_g_index('no_external_refs', 2,
1827
[(('rev', ), ' 100 78',
1828
([('parent', ), ('ghost', )], []))])
1831
def test_add_good_unvalidated_index(self):
1832
unvalidated = self.make_g_index_no_external_refs()
1833
combined = CombinedGraphIndex([unvalidated])
1834
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1835
index.scan_unvalidated_index(unvalidated)
1836
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1838
def test_add_missing_compression_parent_unvalidated_index(self):
1839
unvalidated = self.make_g_index_missing_compression_parent()
1840
combined = CombinedGraphIndex([unvalidated])
1841
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1842
index.scan_unvalidated_index(unvalidated)
1843
# This also checks that its only the compression parent that is
1844
# examined, otherwise 'ghost' would also be reported as a missing
1847
frozenset([('missing-parent',)]),
1848
index.get_missing_compression_parents())
1850
def test_add_missing_noncompression_parent_unvalidated_index(self):
1851
unvalidated = self.make_g_index_missing_parent()
1852
combined = CombinedGraphIndex([unvalidated])
1853
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1854
track_external_parent_refs=True)
1855
index.scan_unvalidated_index(unvalidated)
1857
frozenset([('missing-parent',)]), index.get_missing_parents())
1859
def test_track_external_parent_refs(self):
1860
g_index = self.make_g_index('empty', 2, [])
1861
combined = CombinedGraphIndex([g_index])
1862
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1863
add_callback=self.catch_add, track_external_parent_refs=True)
1864
self.caught_entries = []
1866
(('new-key',), 'fulltext,no-eol', (None, 50, 60),
1867
[('parent-1',), ('parent-2',)])])
1869
frozenset([('parent-1',), ('parent-2',)]),
1870
index.get_missing_parents())
1872
def test_add_unvalidated_index_with_present_external_references(self):
1873
index = self.two_graph_index(deltas=True)
1874
# Ugly hack to get at one of the underlying GraphIndex objects that
1875
# two_graph_index built.
1876
unvalidated = index._graph_index._indices[1]
1877
# 'parent' is an external ref of _indices[1] (unvalidated), but is
1878
# present in _indices[0].
1879
index.scan_unvalidated_index(unvalidated)
1880
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1882
def make_new_missing_parent_g_index(self, name):
1883
missing_parent = name + '-missing-parent'
1884
graph_index = self.make_g_index(name, 2,
1885
[((name + 'tip', ), ' 100 78',
1886
([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1889
def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1890
g_index_1 = self.make_new_missing_parent_g_index('one')
1891
g_index_2 = self.make_new_missing_parent_g_index('two')
1892
combined = CombinedGraphIndex([g_index_1, g_index_2])
1893
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1894
index.scan_unvalidated_index(g_index_1)
1895
index.scan_unvalidated_index(g_index_2)
1897
frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1898
index.get_missing_compression_parents())
1900
def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1901
graph_index_a = self.make_g_index('one', 2,
1902
[(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
1903
(('child-of-two', ), ' 100 78',
1904
([('parent-two',)], [('parent-two',)]))])
1905
graph_index_b = self.make_g_index('two', 2,
1906
[(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
1907
(('child-of-one', ), ' 100 78',
1908
([('parent-one',)], [('parent-one',)]))])
1909
combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1910
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1911
index.scan_unvalidated_index(graph_index_a)
1912
index.scan_unvalidated_index(graph_index_b)
1914
frozenset([]), index.get_missing_compression_parents())
2366
def test_iter_parents(self):
2367
index1 = self.make_g_index('1', 1, [
2369
(('r0', ), 'N0 100', ([], )),
2371
(('r1', ), '', ([('r0', )], ))])
2372
index2 = self.make_g_index('2', 1, [
2374
(('r2', ), 'N0 100', ([('r1', ), ('r0', )], )),
2376
combined_index = CombinedGraphIndex([index1, index2])
2377
index = KnitGraphIndex(combined_index)
2379
# cases: each sample data individually:
2380
self.assertEqual(set([('r0', ())]),
2381
set(index.iter_parents(['r0'])))
2382
self.assertEqual(set([('r1', ('r0', ))]),
2383
set(index.iter_parents(['r1'])))
2384
self.assertEqual(set([('r2', ('r1', 'r0'))]),
2385
set(index.iter_parents(['r2'])))
2386
# no nodes returned for a missing node
2387
self.assertEqual(set(),
2388
set(index.iter_parents(['missing'])))
2389
# 1 node returned with missing nodes skipped
2390
self.assertEqual(set([('r1', ('r0', ))]),
2391
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
2393
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2394
set(index.iter_parents(['r0', 'r1'])))
2395
# 2 nodes returned, missing skipped
2396
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2397
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
1917
2400
class TestNoParentsGraphIndexKnit(KnitTests):
1918
"""Tests for knits using _KnitGraphIndex with no parents."""
2401
"""Tests for knits using KnitGraphIndex with no parents."""
1920
2403
def make_g_index(self, name, ref_lists=0, nodes=[]):
1921
2404
builder = GraphIndexBuilder(ref_lists)
1959
2434
add_callback = self.catch_add
1961
2436
add_callback = None
1962
return _KnitGraphIndex(combined_index, lambda:True, parents=False,
2437
return KnitGraphIndex(combined_index, parents=False,
1963
2438
add_callback=add_callback)
1965
def test_keys(self):
1966
index = self.two_graph_index()
1967
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2440
def test_get_graph(self):
2441
index = self.two_graph_index()
2442
self.assertEqual(set([
2447
]), set(index.get_graph()))
2449
def test_get_ancestry(self):
2450
# with no parents, ancestry is always just the key.
2451
index = self.two_graph_index()
2452
self.assertEqual([], index.get_ancestry([]))
2453
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2454
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2455
self.assertEqual(['parent'], index.get_ancestry(['parent']))
2456
self.assertEqual(['tip'], index.get_ancestry(['tip']))
2457
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2458
(['tip', 'separate'],
2459
['separate', 'tip'],
2461
# asking for a ghost makes it go boom.
2462
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2464
def test_get_ancestry_with_ghosts(self):
2465
index = self.two_graph_index()
2466
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2467
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2468
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2469
self.assertEqual(['parent'], index.get_ancestry_with_ghosts(['parent']))
2470
self.assertEqual(['tip'], index.get_ancestry_with_ghosts(['tip']))
2471
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2472
(['tip', 'separate'],
2473
['separate', 'tip'],
2475
# asking for a ghost makes it go boom.
2476
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2478
def test_num_versions(self):
2479
index = self.two_graph_index()
2480
self.assertEqual(4, index.num_versions())
2482
def test_get_versions(self):
2483
index = self.two_graph_index()
2484
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2485
set(index.get_versions()))
2487
def test_has_version(self):
2488
index = self.two_graph_index()
2489
self.assertTrue(index.has_version('tail'))
2490
self.assertFalse(index.has_version('ghost'))
1970
2492
def test_get_position(self):
1971
2493
index = self.two_graph_index()
1972
self.assertEqual((index._graph_index._indices[0], 0, 100),
1973
index.get_position(('tip',)))
1974
self.assertEqual((index._graph_index._indices[1], 100, 78),
1975
index.get_position(('parent',)))
2494
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2495
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1977
2497
def test_get_method(self):
1978
2498
index = self.two_graph_index()
1979
self.assertEqual('fulltext', index.get_method(('tip',)))
1980
self.assertEqual(['fulltext'], index.get_options(('parent',)))
2499
self.assertEqual('fulltext', index.get_method('tip'))
2500
self.assertEqual(['fulltext'], index.get_options('parent'))
1982
2502
def test_get_options(self):
1983
2503
index = self.two_graph_index()
1984
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1985
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1987
def test_get_parent_map(self):
1988
index = self.two_graph_index()
1989
self.assertEqual({('parent',):None},
1990
index.get_parent_map([('parent',), ('ghost',)]))
2504
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2505
self.assertEqual(['fulltext'], index.get_options('parent'))
2507
def test_get_parents(self):
2508
index = self.two_graph_index()
2509
self.assertEqual((), index.get_parents('parent'))
2510
# and errors on ghosts.
2511
self.assertRaises(errors.RevisionNotPresent,
2512
index.get_parents, 'ghost')
2514
def test_get_parents_with_ghosts(self):
2515
index = self.two_graph_index()
2516
self.assertEqual((), index.get_parents_with_ghosts('parent'))
2517
# and errors on ghosts.
2518
self.assertRaises(errors.RevisionNotPresent,
2519
index.get_parents_with_ghosts, 'ghost')
2521
def test_check_versions_present(self):
2522
index = self.two_graph_index()
2523
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2525
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2526
['tail', 'missing'])
2527
index.check_versions_present(['tail', 'separate'])
1992
2529
def catch_add(self, entries):
1993
2530
self.caught_entries.append(entries)
1995
2532
def test_add_no_callback_errors(self):
1996
2533
index = self.two_graph_index()
1997
self.assertRaises(errors.ReadOnlyError, index.add_records,
1998
[(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
2534
self.assertRaises(errors.ReadOnlyError, index.add_version,
2535
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
2000
2537
def test_add_version_smoke(self):
2001
2538
index = self.two_graph_index(catch_adds=True)
2002
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
2539
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), [])
2003
2540
self.assertEqual([[(('new', ), 'N50 60')]],
2004
2541
self.caught_entries)
2006
2543
def test_add_version_delta_not_delta_index(self):
2007
2544
index = self.two_graph_index(catch_adds=True)
2008
self.assertRaises(errors.KnitCorrupt, index.add_records,
2009
[(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
2545
self.assertRaises(errors.KnitCorrupt, index.add_version,
2546
'new', 'no-eol,line-delta', (None, 0, 100), [])
2010
2547
self.assertEqual([], self.caught_entries)
2012
2549
def test_add_version_same_dup(self):
2013
2550
index = self.two_graph_index(catch_adds=True)
2014
2551
# options can be spelt two different ways
2015
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2016
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2017
# position/length are ignored (because each pack could have fulltext or
2018
# delta, and be at a different position.
2019
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2020
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2552
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), [])
2553
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), [])
2021
2554
# but neither should have added data.
2022
self.assertEqual([[], [], [], []], self.caught_entries)
2555
self.assertEqual([[], []], self.caught_entries)
2024
2557
def test_add_version_different_dup(self):
2025
2558
index = self.two_graph_index(catch_adds=True)
2026
2559
# change options
2027
self.assertRaises(errors.KnitCorrupt, index.add_records,
2028
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2029
self.assertRaises(errors.KnitCorrupt, index.add_records,
2030
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2031
self.assertRaises(errors.KnitCorrupt, index.add_records,
2032
[(('tip',), 'fulltext', (None, 0, 100), [])])
2560
self.assertRaises(errors.KnitCorrupt, index.add_version,
2561
'tip', 'no-eol,line-delta', (None, 0, 100), [])
2562
self.assertRaises(errors.KnitCorrupt, index.add_version,
2563
'tip', 'line-delta,no-eol', (None, 0, 100), [])
2564
self.assertRaises(errors.KnitCorrupt, index.add_version,
2565
'tip', 'fulltext', (None, 0, 100), [])
2567
self.assertRaises(errors.KnitCorrupt, index.add_version,
2568
'tip', 'fulltext,no-eol', (None, 50, 100), [])
2569
self.assertRaises(errors.KnitCorrupt, index.add_version,
2570
'tip', 'fulltext,no-eol', (None, 0, 1000), [])
2034
self.assertRaises(errors.KnitCorrupt, index.add_records,
2035
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2572
self.assertRaises(errors.KnitCorrupt, index.add_version,
2573
'tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2036
2574
self.assertEqual([], self.caught_entries)
2038
2576
def test_add_versions(self):
2039
2577
index = self.two_graph_index(catch_adds=True)
2041
(('new',), 'fulltext,no-eol', (None, 50, 60), []),
2042
(('new2',), 'fulltext', (None, 0, 6), []),
2578
index.add_versions([
2579
('new', 'fulltext,no-eol', (None, 50, 60), []),
2580
('new2', 'fulltext', (None, 0, 6), []),
2044
2582
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
2045
2583
sorted(self.caught_entries[0]))
2048
2586
def test_add_versions_delta_not_delta_index(self):
2049
2587
index = self.two_graph_index(catch_adds=True)
2050
self.assertRaises(errors.KnitCorrupt, index.add_records,
2051
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2588
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2589
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2052
2590
self.assertEqual([], self.caught_entries)
2054
2592
def test_add_versions_parents_not_parents_index(self):
2055
2593
index = self.two_graph_index(catch_adds=True)
2056
self.assertRaises(errors.KnitCorrupt, index.add_records,
2057
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2594
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2595
[('new', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2058
2596
self.assertEqual([], self.caught_entries)
2060
2598
def test_add_versions_random_id_accepted(self):
2061
2599
index = self.two_graph_index(catch_adds=True)
2062
index.add_records([], random_id=True)
2600
index.add_versions([], random_id=True)
2064
2602
def test_add_versions_same_dup(self):
2065
2603
index = self.two_graph_index(catch_adds=True)
2066
2604
# options can be spelt two different ways
2067
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2068
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2069
# position/length are ignored (because each pack could have fulltext or
2070
# delta, and be at a different position.
2071
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2072
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2605
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2606
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), [])])
2073
2607
# but neither should have added data.
2074
self.assertEqual([[], [], [], []], self.caught_entries)
2608
self.assertEqual([[], []], self.caught_entries)
2076
2610
def test_add_versions_different_dup(self):
2077
2611
index = self.two_graph_index(catch_adds=True)
2078
2612
# change options
2079
self.assertRaises(errors.KnitCorrupt, index.add_records,
2080
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2081
self.assertRaises(errors.KnitCorrupt, index.add_records,
2082
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2083
self.assertRaises(errors.KnitCorrupt, index.add_records,
2084
[(('tip',), 'fulltext', (None, 0, 100), [])])
2613
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2614
[('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2615
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2616
[('tip', 'line-delta,no-eol', (None, 0, 100), [])])
2617
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2618
[('tip', 'fulltext', (None, 0, 100), [])])
2620
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2621
[('tip', 'fulltext,no-eol', (None, 50, 100), [])])
2622
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2623
[('tip', 'fulltext,no-eol', (None, 0, 1000), [])])
2086
self.assertRaises(errors.KnitCorrupt, index.add_records,
2087
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2625
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2626
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2088
2627
# change options in the second record
2089
self.assertRaises(errors.KnitCorrupt, index.add_records,
2090
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
2091
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2628
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2629
[('tip', 'fulltext,no-eol', (None, 0, 100), []),
2630
('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2092
2631
self.assertEqual([], self.caught_entries)
2095
class TestKnitVersionedFiles(KnitTests):
2097
def assertGroupKeysForIo(self, exp_groups, keys, non_local_keys,
2098
positions, _min_buffer_size=None):
2099
kvf = self.make_test_knit()
2100
if _min_buffer_size is None:
2101
_min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2102
self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2103
non_local_keys, positions,
2104
_min_buffer_size=_min_buffer_size))
2106
def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2108
split, prefix_order = KnitVersionedFiles._split_by_prefix(keys)
2109
self.assertEqual(expected_map, split)
2110
self.assertEqual(expected_prefix_order, prefix_order)
2112
def test__group_keys_for_io(self):
2113
ft_detail = ('fulltext', False)
2114
ld_detail = ('line-delta', False)
2122
f_a: (ft_detail, (f_a, 0, 100), None),
2123
f_b: (ld_detail, (f_b, 100, 21), f_a),
2124
f_c: (ld_detail, (f_c, 180, 15), f_b),
2125
g_a: (ft_detail, (g_a, 121, 35), None),
2126
g_b: (ld_detail, (g_b, 156, 12), g_a),
2127
g_c: (ld_detail, (g_c, 195, 13), g_a),
2129
self.assertGroupKeysForIo([([f_a], set())],
2130
[f_a], [], positions)
2131
self.assertGroupKeysForIo([([f_a], set([f_a]))],
2132
[f_a], [f_a], positions)
2133
self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2134
[f_a, f_b], [], positions)
2135
self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2136
[f_a, f_b], [f_b], positions)
2137
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2138
[f_a, g_a, f_b, g_b], [], positions)
2139
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2140
[f_a, g_a, f_b, g_b], [], positions,
2141
_min_buffer_size=150)
2142
self.assertGroupKeysForIo([([f_a, f_b], set()), ([g_a, g_b], set())],
2143
[f_a, g_a, f_b, g_b], [], positions,
2144
_min_buffer_size=100)
2145
self.assertGroupKeysForIo([([f_c], set()), ([g_b], set())],
2146
[f_c, g_b], [], positions,
2147
_min_buffer_size=125)
2148
self.assertGroupKeysForIo([([g_b, f_c], set())],
2149
[g_b, f_c], [], positions,
2150
_min_buffer_size=125)
2152
def test__split_by_prefix(self):
2153
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2154
'g': [('g', 'b'), ('g', 'a')],
2156
[('f', 'a'), ('g', 'b'),
2157
('g', 'a'), ('f', 'b')])
2159
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2160
'g': [('g', 'b'), ('g', 'a')],
2162
[('f', 'a'), ('f', 'b'),
2163
('g', 'b'), ('g', 'a')])
2165
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2166
'g': [('g', 'b'), ('g', 'a')],
2168
[('f', 'a'), ('f', 'b'),
2169
('g', 'b'), ('g', 'a')])
2171
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2172
'g': [('g', 'b'), ('g', 'a')],
2173
'': [('a',), ('b',)]
2175
[('f', 'a'), ('g', 'b'),
2177
('g', 'a'), ('f', 'b')])
2180
class TestStacking(KnitTests):
2182
def get_basis_and_test_knit(self):
2183
basis = self.make_test_knit(name='basis')
2184
basis = RecordingVersionedFilesDecorator(basis)
2185
test = self.make_test_knit(name='test')
2186
test.add_fallback_versioned_files(basis)
2189
def test_add_fallback_versioned_files(self):
2190
basis = self.make_test_knit(name='basis')
2191
test = self.make_test_knit(name='test')
2192
# It must not error; other tests test that the fallback is referred to
2193
# when accessing data.
2194
test.add_fallback_versioned_files(basis)
2196
def test_add_lines(self):
2197
# lines added to the test are not added to the basis
2198
basis, test = self.get_basis_and_test_knit()
2200
key_basis = ('bar',)
2201
key_cross_border = ('quux',)
2202
key_delta = ('zaphod',)
2203
test.add_lines(key, (), ['foo\n'])
2204
self.assertEqual({}, basis.get_parent_map([key]))
2205
# lines added to the test that reference across the stack do a
2207
basis.add_lines(key_basis, (), ['foo\n'])
2209
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2210
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2211
# we don't even need to look at the basis to see that this should be
2212
# stored as a fulltext
2213
self.assertEqual([], basis.calls)
2214
# Subsequent adds do delta.
2216
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2217
self.assertEqual('line-delta', test._index.get_method(key_delta))
2218
self.assertEqual([], basis.calls)
2220
def test_annotate(self):
2221
# annotations from the test knit are answered without asking the basis
2222
basis, test = self.get_basis_and_test_knit()
2224
key_basis = ('bar',)
2225
key_missing = ('missing',)
2226
test.add_lines(key, (), ['foo\n'])
2227
details = test.annotate(key)
2228
self.assertEqual([(key, 'foo\n')], details)
2229
self.assertEqual([], basis.calls)
2230
# But texts that are not in the test knit are looked for in the basis
2232
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2234
details = test.annotate(key_basis)
2235
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2236
# Not optimised to date:
2237
# self.assertEqual([("annotate", key_basis)], basis.calls)
2238
self.assertEqual([('get_parent_map', set([key_basis])),
2239
('get_parent_map', set([key_basis])),
2240
('get_record_stream', [key_basis], 'topological', True)],
2243
def test_check(self):
2244
# At the moment checking a stacked knit does implicitly check the
2246
basis, test = self.get_basis_and_test_knit()
2249
def test_get_parent_map(self):
2250
# parents in the test knit are answered without asking the basis
2251
basis, test = self.get_basis_and_test_knit()
2253
key_basis = ('bar',)
2254
key_missing = ('missing',)
2255
test.add_lines(key, (), [])
2256
parent_map = test.get_parent_map([key])
2257
self.assertEqual({key: ()}, parent_map)
2258
self.assertEqual([], basis.calls)
2259
# But parents that are not in the test knit are looked for in the basis
2260
basis.add_lines(key_basis, (), [])
2262
parent_map = test.get_parent_map([key, key_basis, key_missing])
2263
self.assertEqual({key: (),
2264
key_basis: ()}, parent_map)
2265
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
2268
def test_get_record_stream_unordered_fulltexts(self):
2269
# records from the test knit are answered without asking the basis:
2270
basis, test = self.get_basis_and_test_knit()
2272
key_basis = ('bar',)
2273
key_missing = ('missing',)
2274
test.add_lines(key, (), ['foo\n'])
2275
records = list(test.get_record_stream([key], 'unordered', True))
2276
self.assertEqual(1, len(records))
2277
self.assertEqual([], basis.calls)
2278
# Missing (from test knit) objects are retrieved from the basis:
2279
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2281
records = list(test.get_record_stream([key_basis, key_missing],
2283
self.assertEqual(2, len(records))
2284
calls = list(basis.calls)
2285
for record in records:
2286
self.assertSubset([record.key], (key_basis, key_missing))
2287
if record.key == key_missing:
2288
self.assertIsInstance(record, AbsentContentFactory)
2290
reference = list(basis.get_record_stream([key_basis],
2291
'unordered', True))[0]
2292
self.assertEqual(reference.key, record.key)
2293
self.assertEqual(reference.sha1, record.sha1)
2294
self.assertEqual(reference.storage_kind, record.storage_kind)
2295
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2296
record.get_bytes_as(record.storage_kind))
2297
self.assertEqual(reference.get_bytes_as('fulltext'),
2298
record.get_bytes_as('fulltext'))
2299
# It's not strictly minimal, but it seems reasonable for now for it to
2300
# ask which fallbacks have which parents.
2302
("get_parent_map", set([key_basis, key_missing])),
2303
("get_record_stream", [key_basis], 'unordered', True)],
2306
def test_get_record_stream_ordered_fulltexts(self):
2307
# ordering is preserved down into the fallback store.
2308
basis, test = self.get_basis_and_test_knit()
2310
key_basis = ('bar',)
2311
key_basis_2 = ('quux',)
2312
key_missing = ('missing',)
2313
test.add_lines(key, (key_basis,), ['foo\n'])
2314
# Missing (from test knit) objects are retrieved from the basis:
2315
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2316
basis.add_lines(key_basis_2, (), ['quux\n'])
2318
# ask for in non-topological order
2319
records = list(test.get_record_stream(
2320
[key, key_basis, key_missing, key_basis_2], 'topological', True))
2321
self.assertEqual(4, len(records))
2323
for record in records:
2324
self.assertSubset([record.key],
2325
(key_basis, key_missing, key_basis_2, key))
2326
if record.key == key_missing:
2327
self.assertIsInstance(record, AbsentContentFactory)
2329
results.append((record.key, record.sha1, record.storage_kind,
2330
record.get_bytes_as('fulltext')))
2331
calls = list(basis.calls)
2332
order = [record[0] for record in results]
2333
self.assertEqual([key_basis_2, key_basis, key], order)
2334
for result in results:
2335
if result[0] == key:
2339
record = source.get_record_stream([result[0]], 'unordered',
2341
self.assertEqual(record.key, result[0])
2342
self.assertEqual(record.sha1, result[1])
2343
# We used to check that the storage kind matched, but actually it
2344
# depends on whether it was sourced from the basis, or in a single
2345
# group, because asking for full texts returns proxy objects to a
2346
# _ContentMapGenerator object; so checking the kind is unneeded.
2347
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2348
# It's not strictly minimal, but it seems reasonable for now for it to
2349
# ask which fallbacks have which parents.
2351
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2352
# topological is requested from the fallback, because that is what
2353
# was requested at the top level.
2354
("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
2357
def test_get_record_stream_unordered_deltas(self):
2358
# records from the test knit are answered without asking the basis:
2359
basis, test = self.get_basis_and_test_knit()
2361
key_basis = ('bar',)
2362
key_missing = ('missing',)
2363
test.add_lines(key, (), ['foo\n'])
2364
records = list(test.get_record_stream([key], 'unordered', False))
2365
self.assertEqual(1, len(records))
2366
self.assertEqual([], basis.calls)
2367
# Missing (from test knit) objects are retrieved from the basis:
2368
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2370
records = list(test.get_record_stream([key_basis, key_missing],
2371
'unordered', False))
2372
self.assertEqual(2, len(records))
2373
calls = list(basis.calls)
2374
for record in records:
2375
self.assertSubset([record.key], (key_basis, key_missing))
2376
if record.key == key_missing:
2377
self.assertIsInstance(record, AbsentContentFactory)
2379
reference = list(basis.get_record_stream([key_basis],
2380
'unordered', False))[0]
2381
self.assertEqual(reference.key, record.key)
2382
self.assertEqual(reference.sha1, record.sha1)
2383
self.assertEqual(reference.storage_kind, record.storage_kind)
2384
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2385
record.get_bytes_as(record.storage_kind))
2386
# It's not strictly minimal, but it seems reasonable for now for it to
2387
# ask which fallbacks have which parents.
2389
("get_parent_map", set([key_basis, key_missing])),
2390
("get_record_stream", [key_basis], 'unordered', False)],
2393
def test_get_record_stream_ordered_deltas(self):
2394
# ordering is preserved down into the fallback store.
2395
basis, test = self.get_basis_and_test_knit()
2397
key_basis = ('bar',)
2398
key_basis_2 = ('quux',)
2399
key_missing = ('missing',)
2400
test.add_lines(key, (key_basis,), ['foo\n'])
2401
# Missing (from test knit) objects are retrieved from the basis:
2402
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2403
basis.add_lines(key_basis_2, (), ['quux\n'])
2405
# ask for in non-topological order
2406
records = list(test.get_record_stream(
2407
[key, key_basis, key_missing, key_basis_2], 'topological', False))
2408
self.assertEqual(4, len(records))
2410
for record in records:
2411
self.assertSubset([record.key],
2412
(key_basis, key_missing, key_basis_2, key))
2413
if record.key == key_missing:
2414
self.assertIsInstance(record, AbsentContentFactory)
2416
results.append((record.key, record.sha1, record.storage_kind,
2417
record.get_bytes_as(record.storage_kind)))
2418
calls = list(basis.calls)
2419
order = [record[0] for record in results]
2420
self.assertEqual([key_basis_2, key_basis, key], order)
2421
for result in results:
2422
if result[0] == key:
2426
record = source.get_record_stream([result[0]], 'unordered',
2428
self.assertEqual(record.key, result[0])
2429
self.assertEqual(record.sha1, result[1])
2430
self.assertEqual(record.storage_kind, result[2])
2431
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2432
# It's not strictly minimal, but it seems reasonable for now for it to
2433
# ask which fallbacks have which parents.
2435
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2436
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2439
def test_get_sha1s(self):
2440
# sha1's in the test knit are answered without asking the basis
2441
basis, test = self.get_basis_and_test_knit()
2443
key_basis = ('bar',)
2444
key_missing = ('missing',)
2445
test.add_lines(key, (), ['foo\n'])
2446
key_sha1sum = osutils.sha_string('foo\n')
2447
sha1s = test.get_sha1s([key])
2448
self.assertEqual({key: key_sha1sum}, sha1s)
2449
self.assertEqual([], basis.calls)
2450
# But texts that are not in the test knit are looked for in the basis
2451
# directly (rather than via text reconstruction) so that remote servers
2452
# etc don't have to answer with full content.
2453
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2454
basis_sha1sum = osutils.sha_string('foo\nbar\n')
2456
sha1s = test.get_sha1s([key, key_missing, key_basis])
2457
self.assertEqual({key: key_sha1sum,
2458
key_basis: basis_sha1sum}, sha1s)
2459
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
2462
def test_insert_record_stream(self):
2463
# records are inserted as normal; insert_record_stream builds on
2464
# add_lines, so a smoke test should be all that's needed:
2466
key_basis = ('bar',)
2467
key_delta = ('zaphod',)
2468
basis, test = self.get_basis_and_test_knit()
2469
source = self.make_test_knit(name='source')
2470
basis.add_lines(key_basis, (), ['foo\n'])
2472
source.add_lines(key_basis, (), ['foo\n'])
2473
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2474
stream = source.get_record_stream([key_delta], 'unordered', False)
2475
test.insert_record_stream(stream)
2476
# XXX: this does somewhat too many calls in making sure of whether it
2477
# has to recreate the full text.
2478
self.assertEqual([("get_parent_map", set([key_basis])),
2479
('get_parent_map', set([key_basis])),
2480
('get_record_stream', [key_basis], 'unordered', True)],
2482
self.assertEqual({key_delta:(key_basis,)},
2483
test.get_parent_map([key_delta]))
2484
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2485
'unordered', True).next().get_bytes_as('fulltext'))
2487
def test_iter_lines_added_or_present_in_keys(self):
2488
# Lines from the basis are returned, and lines for a given key are only
2492
# all sources are asked for keys:
2493
basis, test = self.get_basis_and_test_knit()
2494
basis.add_lines(key1, (), ["foo"])
2496
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2497
self.assertEqual([("foo\n", key1)], lines)
2498
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
2500
# keys in both are not duplicated:
2501
test.add_lines(key2, (), ["bar\n"])
2502
basis.add_lines(key2, (), ["bar\n"])
2504
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2505
self.assertEqual([("bar\n", key2)], lines)
2506
self.assertEqual([], basis.calls)
2508
def test_keys(self):
2511
# all sources are asked for keys:
2512
basis, test = self.get_basis_and_test_knit()
2514
self.assertEqual(set(), set(keys))
2515
self.assertEqual([("keys",)], basis.calls)
2516
# keys from a basis are returned:
2517
basis.add_lines(key1, (), [])
2520
self.assertEqual(set([key1]), set(keys))
2521
self.assertEqual([("keys",)], basis.calls)
2522
# keys in both are not duplicated:
2523
test.add_lines(key2, (), [])
2524
basis.add_lines(key2, (), [])
2527
self.assertEqual(2, len(keys))
2528
self.assertEqual(set([key1, key2]), set(keys))
2529
self.assertEqual([("keys",)], basis.calls)
2531
def test_add_mpdiffs(self):
2532
# records are inserted as normal; add_mpdiff builds on
2533
# add_lines, so a smoke test should be all that's needed:
2535
key_basis = ('bar',)
2536
key_delta = ('zaphod',)
2537
basis, test = self.get_basis_and_test_knit()
2538
source = self.make_test_knit(name='source')
2539
basis.add_lines(key_basis, (), ['foo\n'])
2541
source.add_lines(key_basis, (), ['foo\n'])
2542
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2543
diffs = source.make_mpdiffs([key_delta])
2544
test.add_mpdiffs([(key_delta, (key_basis,),
2545
source.get_sha1s([key_delta])[key_delta], diffs[0])])
2546
self.assertEqual([("get_parent_map", set([key_basis])),
2547
('get_record_stream', [key_basis], 'unordered', True),],
2549
self.assertEqual({key_delta:(key_basis,)},
2550
test.get_parent_map([key_delta]))
2551
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2552
'unordered', True).next().get_bytes_as('fulltext'))
2554
def test_make_mpdiffs(self):
2555
# Generating an mpdiff across a stacking boundary should detect parent
2559
key_right = ('zaphod',)
2560
basis, test = self.get_basis_and_test_knit()
2561
basis.add_lines(key_left, (), ['bar\n'])
2562
basis.add_lines(key_right, (), ['zaphod\n'])
2564
test.add_lines(key, (key_left, key_right),
2565
['bar\n', 'foo\n', 'zaphod\n'])
2566
diffs = test.make_mpdiffs([key])
2568
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2569
multiparent.NewText(['foo\n']),
2570
multiparent.ParentText(1, 0, 2, 1)])],
2572
self.assertEqual(3, len(basis.calls))
2574
("get_parent_map", set([key_left, key_right])),
2575
("get_parent_map", set([key_left, key_right])),
2578
last_call = basis.calls[-1]
2579
self.assertEqual('get_record_stream', last_call[0])
2580
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2581
self.assertEqual('topological', last_call[2])
2582
self.assertEqual(True, last_call[3])
2585
class TestNetworkBehaviour(KnitTests):
2586
"""Tests for getting data out of/into knits over the network."""
2588
def test_include_delta_closure_generates_a_knit_delta_closure(self):
2589
vf = self.make_test_knit(name='test')
2590
# put in three texts, giving ft, delta, delta
2591
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2592
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2593
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2594
# But heuristics could interfere, so check what happened:
2595
self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2596
[record.storage_kind for record in
2597
vf.get_record_stream([('base',), ('d1',), ('d2',)],
2598
'topological', False)])
2599
# generate a stream of just the deltas include_delta_closure=True,
2600
# serialise to the network, and check that we get a delta closure on the wire.
2601
stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2602
netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2603
# The first bytes should be a memo from _ContentMapGenerator, and the
2604
# second bytes should be empty (because its a API proxy not something
2605
# for wire serialisation.
2606
self.assertEqual('', netb[1])
2608
kind, line_end = network_bytes_to_kind_and_offset(bytes)
2609
self.assertEqual('knit-delta-closure', kind)
2612
class TestContentMapGenerator(KnitTests):
2613
"""Tests for ContentMapGenerator"""
2615
def test_get_record_stream_gives_records(self):
2616
vf = self.make_test_knit(name='test')
2617
# put in three texts, giving ft, delta, delta
2618
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2619
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2620
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2621
keys = [('d1',), ('d2',)]
2622
generator = _VFContentMapGenerator(vf, keys,
2623
global_map=vf.get_parent_map(keys))
2624
for record in generator.get_record_stream():
2625
if record.key == ('d1',):
2626
self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2628
self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2630
def test_get_record_stream_kinds_are_raw(self):
2631
vf = self.make_test_knit(name='test')
2632
# put in three texts, giving ft, delta, delta
2633
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2634
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2635
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2636
keys = [('base',), ('d1',), ('d2',)]
2637
generator = _VFContentMapGenerator(vf, keys,
2638
global_map=vf.get_parent_map(keys))
2639
kinds = {('base',): 'knit-delta-closure',
2640
('d1',): 'knit-delta-closure-ref',
2641
('d2',): 'knit-delta-closure-ref',
2643
for record in generator.get_record_stream():
2644
self.assertEqual(kinds[record.key], record.storage_kind)
2633
def test_iter_parents(self):
2634
index = self.two_graph_index()
2635
self.assertEqual(set([
2636
('tip', ()), ('tail', ()), ('parent', ()), ('separate', ())
2638
set(index.iter_parents(['tip', 'tail', 'ghost', 'parent', 'separate'])))
2639
self.assertEqual(set([('tip', ())]),
2640
set(index.iter_parents(['tip'])))
2641
self.assertEqual(set(),
2642
set(index.iter_parents([])))