487
358
writer = pack.ContainerWriter(write_data)
489
360
access.set_writer(writer, index, (transport, packname))
490
memos = access.add_raw_records([('key', 10)], '1234567890')
361
memos = access.add_raw_records([10], '1234567890')
492
363
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
494
def test_missing_index_raises_retry(self):
495
memos = self.make_pack_file()
496
transport = self.get_transport()
497
reload_called, reload_func = self.make_reload_func()
498
# Note that the index key has changed from 'foo' to 'bar'
499
access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')},
500
reload_func=reload_func)
501
e = self.assertListRaises(errors.RetryWithNewPacks,
502
access.get_raw_records, memos)
503
# Because a key was passed in which does not match our index list, we
504
# assume that the listing was already reloaded
505
self.assertTrue(e.reload_occurred)
506
self.assertIsInstance(e.exc_info, tuple)
507
self.assertIs(e.exc_info[0], KeyError)
508
self.assertIsInstance(e.exc_info[1], KeyError)
510
def test_missing_index_raises_key_error_with_no_reload(self):
511
memos = self.make_pack_file()
512
transport = self.get_transport()
513
# Note that the index key has changed from 'foo' to 'bar'
514
access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')})
515
e = self.assertListRaises(KeyError, access.get_raw_records, memos)
517
def test_missing_file_raises_retry(self):
518
memos = self.make_pack_file()
519
transport = self.get_transport()
520
reload_called, reload_func = self.make_reload_func()
521
# Note that the 'filename' has been changed to 'different-packname'
522
access = pack_repo._DirectPackAccess(
523
{'foo':(transport, 'different-packname')},
524
reload_func=reload_func)
525
e = self.assertListRaises(errors.RetryWithNewPacks,
526
access.get_raw_records, memos)
527
# The file has gone missing, so we assume we need to reload
528
self.assertFalse(e.reload_occurred)
529
self.assertIsInstance(e.exc_info, tuple)
530
self.assertIs(e.exc_info[0], errors.NoSuchFile)
531
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
532
self.assertEqual('different-packname', e.exc_info[1].path)
534
def test_missing_file_raises_no_such_file_with_no_reload(self):
535
memos = self.make_pack_file()
536
transport = self.get_transport()
537
# Note that the 'filename' has been changed to 'different-packname'
538
access = pack_repo._DirectPackAccess(
539
{'foo': (transport, 'different-packname')})
540
e = self.assertListRaises(errors.NoSuchFile,
541
access.get_raw_records, memos)
543
def test_failing_readv_raises_retry(self):
544
memos = self.make_pack_file()
545
transport = self.get_transport()
546
failing_transport = MockReadvFailingTransport(
547
[transport.get_bytes('packname')])
548
reload_called, reload_func = self.make_reload_func()
549
access = pack_repo._DirectPackAccess(
550
{'foo': (failing_transport, 'packname')},
551
reload_func=reload_func)
552
# Asking for a single record will not trigger the Mock failure
553
self.assertEqual(['1234567890'],
554
list(access.get_raw_records(memos[:1])))
555
self.assertEqual(['12345'],
556
list(access.get_raw_records(memos[1:2])))
557
# A multiple offset readv() will fail mid-way through
558
e = self.assertListRaises(errors.RetryWithNewPacks,
559
access.get_raw_records, memos)
560
# The file has gone missing, so we assume we need to reload
561
self.assertFalse(e.reload_occurred)
562
self.assertIsInstance(e.exc_info, tuple)
563
self.assertIs(e.exc_info[0], errors.NoSuchFile)
564
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
565
self.assertEqual('packname', e.exc_info[1].path)
567
def test_failing_readv_raises_no_such_file_with_no_reload(self):
568
memos = self.make_pack_file()
569
transport = self.get_transport()
570
failing_transport = MockReadvFailingTransport(
571
[transport.get_bytes('packname')])
572
reload_called, reload_func = self.make_reload_func()
573
access = pack_repo._DirectPackAccess(
574
{'foo':(failing_transport, 'packname')})
575
# Asking for a single record will not trigger the Mock failure
576
self.assertEqual(['1234567890'],
577
list(access.get_raw_records(memos[:1])))
578
self.assertEqual(['12345'],
579
list(access.get_raw_records(memos[1:2])))
580
# A multiple offset readv() will fail mid-way through
581
e = self.assertListRaises(errors.NoSuchFile,
582
access.get_raw_records, memos)
584
def test_reload_or_raise_no_reload(self):
585
access = pack_repo._DirectPackAccess({}, reload_func=None)
586
retry_exc = self.make_retry_exception()
587
# Without a reload_func, we will just re-raise the original exception
588
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
590
def test_reload_or_raise_reload_changed(self):
591
reload_called, reload_func = self.make_reload_func(return_val=True)
592
access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
593
retry_exc = self.make_retry_exception()
594
access.reload_or_raise(retry_exc)
595
self.assertEqual([1], reload_called)
596
retry_exc.reload_occurred=True
597
access.reload_or_raise(retry_exc)
598
self.assertEqual([2], reload_called)
600
def test_reload_or_raise_reload_no_change(self):
601
reload_called, reload_func = self.make_reload_func(return_val=False)
602
access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
603
retry_exc = self.make_retry_exception()
604
# If reload_occurred is False, then we consider it an error to have
605
# reload_func() return False (no changes).
606
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
607
self.assertEqual([1], reload_called)
608
retry_exc.reload_occurred=True
609
# If reload_occurred is True, then we assume nothing changed because
610
# it had changed earlier, but didn't change again
611
access.reload_or_raise(retry_exc)
612
self.assertEqual([2], reload_called)
614
def test_annotate_retries(self):
615
vf, reload_counter = self.make_vf_for_retrying()
616
# It is a little bit bogus to annotate the Revision VF, but it works,
617
# as we have ancestry stored there
619
reload_lines = vf.annotate(key)
620
self.assertEqual([1, 1, 0], reload_counter)
621
plain_lines = vf.annotate(key)
622
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
623
if reload_lines != plain_lines:
624
self.fail('Annotation was not identical with reloading.')
625
# Now delete the packs-in-use, which should trigger another reload, but
626
# this time we just raise an exception because we can't recover
627
for trans, name in vf._access._indices.itervalues():
629
self.assertRaises(errors.NoSuchFile, vf.annotate, key)
630
self.assertEqual([2, 1, 1], reload_counter)
632
def test__get_record_map_retries(self):
633
vf, reload_counter = self.make_vf_for_retrying()
634
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
635
records = vf._get_record_map(keys)
636
self.assertEqual(keys, sorted(records.keys()))
637
self.assertEqual([1, 1, 0], reload_counter)
638
# Now delete the packs-in-use, which should trigger another reload, but
639
# this time we just raise an exception because we can't recover
640
for trans, name in vf._access._indices.itervalues():
642
self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
643
self.assertEqual([2, 1, 1], reload_counter)
645
def test_get_record_stream_retries(self):
646
vf, reload_counter = self.make_vf_for_retrying()
647
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
648
record_stream = vf.get_record_stream(keys, 'topological', False)
649
record = record_stream.next()
650
self.assertEqual(('rev-1',), record.key)
651
self.assertEqual([0, 0, 0], reload_counter)
652
record = record_stream.next()
653
self.assertEqual(('rev-2',), record.key)
654
self.assertEqual([1, 1, 0], reload_counter)
655
record = record_stream.next()
656
self.assertEqual(('rev-3',), record.key)
657
self.assertEqual([1, 1, 0], reload_counter)
658
# Now delete all pack files, and see that we raise the right error
659
for trans, name in vf._access._indices.itervalues():
661
self.assertListRaises(errors.NoSuchFile,
662
vf.get_record_stream, keys, 'topological', False)
664
def test_iter_lines_added_or_present_in_keys_retries(self):
665
vf, reload_counter = self.make_vf_for_retrying()
666
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
667
# Unfortunately, iter_lines_added_or_present_in_keys iterates the
668
# result in random order (determined by the iteration order from a
669
# set()), so we don't have any solid way to trigger whether data is
670
# read before or after. However we tried to delete the middle node to
671
# exercise the code well.
672
# What we care about is that all lines are always yielded, but not
675
reload_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
676
self.assertEqual([1, 1, 0], reload_counter)
677
# Now do it again, to make sure the result is equivalent
678
plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
679
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
680
self.assertEqual(plain_lines, reload_lines)
681
self.assertEqual(21, len(plain_lines))
682
# Now delete all pack files, and see that we raise the right error
683
for trans, name in vf._access._indices.itervalues():
685
self.assertListRaises(errors.NoSuchFile,
686
vf.iter_lines_added_or_present_in_keys, keys)
687
self.assertEqual([2, 1, 1], reload_counter)
689
def test_get_record_stream_yields_disk_sorted_order(self):
690
# if we get 'unordered' pick a semi-optimal order for reading. The
691
# order should be grouped by pack file, and then by position in file
692
repo = self.make_repository('test', format='pack-0.92')
694
self.addCleanup(repo.unlock)
695
repo.start_write_group()
697
vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
698
vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
699
vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
700
repo.commit_write_group()
701
# We inserted them as rev-5, rev-1, rev-2, we should get them back in
703
stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
704
('f-id', 'rev-2')], 'unordered', False)
705
keys = [r.key for r in stream]
706
self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
707
('f-id', 'rev-2')], keys)
708
repo.start_write_group()
709
vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
710
vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
711
vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
712
repo.commit_write_group()
713
# Request in random order, to make sure the output order isn't based on
715
request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
716
stream = vf.get_record_stream(request_keys, 'unordered', False)
717
keys = [r.key for r in stream]
718
# We want to get the keys back in disk order, but it doesn't matter
719
# which pack we read from first. So this can come back in 2 orders
720
alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
721
alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
722
if keys != alt1 and keys != alt2:
723
self.fail('Returned key order did not match either expected order.'
724
' expected %s or %s, not %s'
725
% (alt1, alt2, keys))
728
366
class LowLevelKnitDataTests(TestCase):
877
472
# Change 2 bytes in the middle to \xff
878
473
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
879
474
transport = MockTransport([gz_txt])
880
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
881
knit = KnitVersionedFiles(None, access)
882
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
883
self.assertRaises(errors.KnitCorrupt, list,
884
knit._read_records_iter(records))
885
# read_records_iter_raw will barf on bad gz data
886
self.assertRaises(errors.KnitCorrupt, list,
887
knit._read_records_iter_raw(records))
475
access = _KnitAccess(transport, 'filename', None, None, False, False)
476
data = _KnitData(access=access)
477
records = [('rev-id-1', (None, 0, len(gz_txt)))]
479
self.assertRaises(errors.KnitCorrupt, data.read_records, records)
481
# read_records_iter_raw will notice if we request the wrong version.
482
self.assertRaises(errors.KnitCorrupt, list,
483
data.read_records_iter_raw(records))
890
486
class LowLevelKnitIndexTests(TestCase):
892
def get_knit_index(self, transport, name, mode):
893
mapper = ConstantMapper(name)
488
def get_knit_index(self, *args, **kwargs):
489
orig = knit._load_data
491
knit._load_data = orig
492
self.addCleanup(reset)
894
493
from bzrlib._knit_load_data_py import _load_data_py
895
self.overrideAttr(knit, '_load_data', _load_data_py)
896
allow_writes = lambda: 'w' in mode
897
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
494
knit._load_data = _load_data_py
495
return _KnitIndex(*args, **kwargs)
497
def test_no_such_file(self):
498
transport = MockTransport()
500
self.assertRaises(NoSuchFile, self.get_knit_index,
501
transport, "filename", "r")
502
self.assertRaises(NoSuchFile, self.get_knit_index,
503
transport, "filename", "w", create=False)
899
505
def test_create_file(self):
900
506
transport = MockTransport()
901
index = self.get_knit_index(transport, "filename", "w")
903
call = transport.calls.pop(0)
904
# call[1][1] is a StringIO - we can't test it by simple equality.
905
self.assertEqual('put_file_non_atomic', call[0])
906
self.assertEqual('filename.kndx', call[1][0])
907
# With no history, _KndxIndex writes a new index:
908
self.assertEqual(_KndxIndex.HEADER,
909
call[1][1].getvalue())
910
self.assertEqual({'create_parent_dir': True}, call[2])
508
index = self.get_knit_index(transport, "filename", "w",
509
file_mode="wb", create=True)
511
("put_bytes_non_atomic",
512
("filename", index.HEADER), {"mode": "wb"}),
513
transport.calls.pop(0))
515
def test_delay_create_file(self):
516
transport = MockTransport()
518
index = self.get_knit_index(transport, "filename", "w",
519
create=True, file_mode="wb", create_parent_dir=True,
520
delay_create=True, dir_mode=0777)
521
self.assertEqual([], transport.calls)
523
index.add_versions([])
524
name, (filename, f), kwargs = transport.calls.pop(0)
525
self.assertEqual("put_file_non_atomic", name)
527
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
529
self.assertEqual("filename", filename)
530
self.assertEqual(index.HEADER, f.read())
532
index.add_versions([])
533
self.assertEqual(("append_bytes", ("filename", ""), {}),
534
transport.calls.pop(0))
912
536
def test_read_utf8_version_id(self):
913
537
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
914
538
utf8_revision_id = unicode_revision_id.encode('utf-8')
915
539
transport = MockTransport([
917
541
'%s option 0 1 :' % (utf8_revision_id,)
919
543
index = self.get_knit_index(transport, "filename", "r")
920
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
544
# _KnitIndex is a private class, and deals in utf8 revision_ids, not
921
545
# Unicode revision_ids.
922
self.assertEqual({(utf8_revision_id,):()},
923
index.get_parent_map(index.keys()))
924
self.assertFalse((unicode_revision_id,) in index.keys())
546
self.assertTrue(index.has_version(utf8_revision_id))
547
self.assertFalse(index.has_version(unicode_revision_id))
926
549
def test_read_utf8_parents(self):
927
550
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
928
551
utf8_revision_id = unicode_revision_id.encode('utf-8')
929
552
transport = MockTransport([
931
554
"version option 0 1 .%s :" % (utf8_revision_id,)
933
556
index = self.get_knit_index(transport, "filename", "r")
934
self.assertEqual({("version",):((utf8_revision_id,),)},
935
index.get_parent_map(index.keys()))
557
self.assertEqual([utf8_revision_id],
558
index.get_parents_with_ghosts("version"))
937
560
def test_read_ignore_corrupted_lines(self):
938
561
transport = MockTransport([
941
564
"corrupted options 0 1 .b .c ",
942
565
"version options 0 1 :"
944
567
index = self.get_knit_index(transport, "filename", "r")
945
self.assertEqual(1, len(index.keys()))
946
self.assertEqual(set([("version",)]), index.keys())
568
self.assertEqual(1, index.num_versions())
569
self.assertTrue(index.has_version("version"))
948
571
def test_read_corrupted_header(self):
949
572
transport = MockTransport(['not a bzr knit index header\n'])
950
index = self.get_knit_index(transport, "filename", "r")
951
self.assertRaises(KnitHeaderError, index.keys)
573
self.assertRaises(KnitHeaderError,
574
self.get_knit_index, transport, "filename", "r")
953
576
def test_read_duplicate_entries(self):
954
577
transport = MockTransport([
956
579
"parent options 0 1 :",
957
580
"version options1 0 1 0 :",
958
581
"version options2 1 2 .other :",
959
582
"version options3 3 4 0 .other :"
961
584
index = self.get_knit_index(transport, "filename", "r")
962
self.assertEqual(2, len(index.keys()))
585
self.assertEqual(2, index.num_versions())
963
586
# check that the index used is the first one written. (Specific
964
587
# to KnitIndex style indices.
965
self.assertEqual("1", index._dictionary_compress([("version",)]))
966
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
967
self.assertEqual(["options3"], index.get_options(("version",)))
968
self.assertEqual({("version",):(("parent",), ("other",))},
969
index.get_parent_map([("version",)]))
588
self.assertEqual("1", index._version_list_to_index(["version"]))
589
self.assertEqual((None, 3, 4), index.get_position("version"))
590
self.assertEqual(["options3"], index.get_options("version"))
591
self.assertEqual(["parent", "other"],
592
index.get_parents_with_ghosts("version"))
971
594
def test_read_compressed_parents(self):
972
595
transport = MockTransport([
974
597
"a option 0 1 :",
975
598
"b option 0 1 0 :",
976
599
"c option 0 1 1 0 :",
978
601
index = self.get_knit_index(transport, "filename", "r")
979
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
980
index.get_parent_map([("b",), ("c",)]))
602
self.assertEqual(["a"], index.get_parents("b"))
603
self.assertEqual(["b", "a"], index.get_parents("c"))
982
605
def test_write_utf8_version_id(self):
983
606
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
984
607
utf8_revision_id = unicode_revision_id.encode('utf-8')
985
608
transport = MockTransport([
988
611
index = self.get_knit_index(transport, "filename", "r")
990
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
991
call = transport.calls.pop(0)
992
# call[1][1] is a StringIO - we can't test it by simple equality.
993
self.assertEqual('put_file_non_atomic', call[0])
994
self.assertEqual('filename.kndx', call[1][0])
995
# With no history, _KndxIndex writes a new index:
996
self.assertEqual(_KndxIndex.HEADER +
997
"\n%s option 0 1 :" % (utf8_revision_id,),
998
call[1][1].getvalue())
999
self.assertEqual({'create_parent_dir': True}, call[2])
612
index.add_version(utf8_revision_id, ["option"], (None, 0, 1), [])
613
self.assertEqual(("append_bytes", ("filename",
614
"\n%s option 0 1 :" % (utf8_revision_id,)),
616
transport.calls.pop(0))
1001
618
def test_write_utf8_parents(self):
1002
619
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
1003
620
utf8_revision_id = unicode_revision_id.encode('utf-8')
1004
621
transport = MockTransport([
1007
index = self.get_knit_index(transport, "filename", "r")
1009
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
1010
call = transport.calls.pop(0)
1011
# call[1][1] is a StringIO - we can't test it by simple equality.
1012
self.assertEqual('put_file_non_atomic', call[0])
1013
self.assertEqual('filename.kndx', call[1][0])
1014
# With no history, _KndxIndex writes a new index:
1015
self.assertEqual(_KndxIndex.HEADER +
1016
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
1017
call[1][1].getvalue())
1018
self.assertEqual({'create_parent_dir': True}, call[2])
1020
def test_keys(self):
1021
transport = MockTransport([
1024
index = self.get_knit_index(transport, "filename", "r")
1026
self.assertEqual(set(), index.keys())
1028
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1029
self.assertEqual(set([("a",)]), index.keys())
1031
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1032
self.assertEqual(set([("a",)]), index.keys())
1034
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
1035
self.assertEqual(set([("a",), ("b",)]), index.keys())
1037
def add_a_b(self, index, random_id=None):
1039
if random_id is not None:
1040
kwargs["random_id"] = random_id
1042
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
1043
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
1044
(("b",), ["option"], (("b",), 2, 3), [("a",)])
1047
def assertIndexIsAB(self, index):
1052
index.get_parent_map(index.keys()))
1053
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
1054
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
1055
self.assertEqual(["opt"], index.get_options(("a",)))
624
index = self.get_knit_index(transport, "filename", "r")
625
index.add_version("version", ["option"], (None, 0, 1), [utf8_revision_id])
626
self.assertEqual(("append_bytes", ("filename",
627
"\nversion option 0 1 .%s :" % (utf8_revision_id,)),
629
transport.calls.pop(0))
631
def test_get_graph(self):
632
transport = MockTransport()
633
index = self.get_knit_index(transport, "filename", "w", create=True)
634
self.assertEqual([], index.get_graph())
636
index.add_version("a", ["option"], (None, 0, 1), ["b"])
637
self.assertEqual([("a", ["b"])], index.get_graph())
639
index.add_version("c", ["option"], (None, 0, 1), ["d"])
640
self.assertEqual([("a", ["b"]), ("c", ["d"])],
641
sorted(index.get_graph()))
643
def test_get_ancestry(self):
644
transport = MockTransport([
647
"b option 0 1 0 .e :",
648
"c option 0 1 1 0 :",
649
"d option 0 1 2 .f :"
651
index = self.get_knit_index(transport, "filename", "r")
653
self.assertEqual([], index.get_ancestry([]))
654
self.assertEqual(["a"], index.get_ancestry(["a"]))
655
self.assertEqual(["a", "b"], index.get_ancestry(["b"]))
656
self.assertEqual(["a", "b", "c"], index.get_ancestry(["c"]))
657
self.assertEqual(["a", "b", "c", "d"], index.get_ancestry(["d"]))
658
self.assertEqual(["a", "b"], index.get_ancestry(["a", "b"]))
659
self.assertEqual(["a", "b", "c"], index.get_ancestry(["a", "c"]))
661
self.assertRaises(RevisionNotPresent, index.get_ancestry, ["e"])
663
def test_get_ancestry_with_ghosts(self):
664
transport = MockTransport([
667
"b option 0 1 0 .e :",
668
"c option 0 1 0 .f .g :",
669
"d option 0 1 2 .h .j .k :"
671
index = self.get_knit_index(transport, "filename", "r")
673
self.assertEqual([], index.get_ancestry_with_ghosts([]))
674
self.assertEqual(["a"], index.get_ancestry_with_ghosts(["a"]))
675
self.assertEqual(["a", "e", "b"],
676
index.get_ancestry_with_ghosts(["b"]))
677
self.assertEqual(["a", "g", "f", "c"],
678
index.get_ancestry_with_ghosts(["c"]))
679
self.assertEqual(["a", "g", "f", "c", "k", "j", "h", "d"],
680
index.get_ancestry_with_ghosts(["d"]))
681
self.assertEqual(["a", "e", "b"],
682
index.get_ancestry_with_ghosts(["a", "b"]))
683
self.assertEqual(["a", "g", "f", "c"],
684
index.get_ancestry_with_ghosts(["a", "c"]))
686
["a", "g", "f", "c", "e", "b", "k", "j", "h", "d"],
687
index.get_ancestry_with_ghosts(["b", "d"]))
689
self.assertRaises(RevisionNotPresent,
690
index.get_ancestry_with_ghosts, ["e"])
692
def test_iter_parents(self):
693
transport = MockTransport()
694
index = self.get_knit_index(transport, "filename", "w", create=True)
696
index.add_version('r0', ['option'], (None, 0, 1), [])
698
index.add_version('r1', ['option'], (None, 0, 1), ['r0'])
700
index.add_version('r2', ['option'], (None, 0, 1), ['r1', 'r0'])
702
# cases: each sample data individually:
703
self.assertEqual(set([('r0', ())]),
704
set(index.iter_parents(['r0'])))
705
self.assertEqual(set([('r1', ('r0', ))]),
706
set(index.iter_parents(['r1'])))
707
self.assertEqual(set([('r2', ('r1', 'r0'))]),
708
set(index.iter_parents(['r2'])))
709
# no nodes returned for a missing node
710
self.assertEqual(set(),
711
set(index.iter_parents(['missing'])))
712
# 1 node returned with missing nodes skipped
713
self.assertEqual(set([('r1', ('r0', ))]),
714
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
716
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
717
set(index.iter_parents(['r0', 'r1'])))
718
# 2 nodes returned, missing skipped
719
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
720
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
722
def test_num_versions(self):
723
transport = MockTransport([
726
index = self.get_knit_index(transport, "filename", "r")
728
self.assertEqual(0, index.num_versions())
729
self.assertEqual(0, len(index))
731
index.add_version("a", ["option"], (None, 0, 1), [])
732
self.assertEqual(1, index.num_versions())
733
self.assertEqual(1, len(index))
735
index.add_version("a", ["option2"], (None, 1, 2), [])
736
self.assertEqual(1, index.num_versions())
737
self.assertEqual(1, len(index))
739
index.add_version("b", ["option"], (None, 0, 1), [])
740
self.assertEqual(2, index.num_versions())
741
self.assertEqual(2, len(index))
743
def test_get_versions(self):
744
transport = MockTransport([
747
index = self.get_knit_index(transport, "filename", "r")
749
self.assertEqual([], index.get_versions())
751
index.add_version("a", ["option"], (None, 0, 1), [])
752
self.assertEqual(["a"], index.get_versions())
754
index.add_version("a", ["option"], (None, 0, 1), [])
755
self.assertEqual(["a"], index.get_versions())
757
index.add_version("b", ["option"], (None, 0, 1), [])
758
self.assertEqual(["a", "b"], index.get_versions())
760
def test_add_version(self):
761
transport = MockTransport([
764
index = self.get_knit_index(transport, "filename", "r")
766
index.add_version("a", ["option"], (None, 0, 1), ["b"])
767
self.assertEqual(("append_bytes",
768
("filename", "\na option 0 1 .b :"),
769
{}), transport.calls.pop(0))
770
self.assertTrue(index.has_version("a"))
771
self.assertEqual(1, index.num_versions())
772
self.assertEqual((None, 0, 1), index.get_position("a"))
773
self.assertEqual(["option"], index.get_options("a"))
774
self.assertEqual(["b"], index.get_parents_with_ghosts("a"))
776
index.add_version("a", ["opt"], (None, 1, 2), ["c"])
777
self.assertEqual(("append_bytes",
778
("filename", "\na opt 1 2 .c :"),
779
{}), transport.calls.pop(0))
780
self.assertTrue(index.has_version("a"))
781
self.assertEqual(1, index.num_versions())
782
self.assertEqual((None, 1, 2), index.get_position("a"))
783
self.assertEqual(["opt"], index.get_options("a"))
784
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
786
index.add_version("b", ["option"], (None, 2, 3), ["a"])
787
self.assertEqual(("append_bytes",
788
("filename", "\nb option 2 3 0 :"),
789
{}), transport.calls.pop(0))
790
self.assertTrue(index.has_version("b"))
791
self.assertEqual(2, index.num_versions())
792
self.assertEqual((None, 2, 3), index.get_position("b"))
793
self.assertEqual(["option"], index.get_options("b"))
794
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1057
796
def test_add_versions(self):
1058
797
transport = MockTransport([
1061
800
index = self.get_knit_index(transport, "filename", "r")
1064
call = transport.calls.pop(0)
1065
# call[1][1] is a StringIO - we can't test it by simple equality.
1066
self.assertEqual('put_file_non_atomic', call[0])
1067
self.assertEqual('filename.kndx', call[1][0])
1068
# With no history, _KndxIndex writes a new index:
803
("a", ["option"], (None, 0, 1), ["b"]),
804
("a", ["opt"], (None, 1, 2), ["c"]),
805
("b", ["option"], (None, 2, 3), ["a"])
807
self.assertEqual(("append_bytes", ("filename",
1071
808
"\na option 0 1 .b :"
1072
809
"\na opt 1 2 .c :"
1073
"\nb option 2 3 0 :",
1074
call[1][1].getvalue())
1075
self.assertEqual({'create_parent_dir': True}, call[2])
1076
self.assertIndexIsAB(index)
811
), {}), transport.calls.pop(0))
812
self.assertTrue(index.has_version("a"))
813
self.assertTrue(index.has_version("b"))
814
self.assertEqual(2, index.num_versions())
815
self.assertEqual((None, 1, 2), index.get_position("a"))
816
self.assertEqual((None, 2, 3), index.get_position("b"))
817
self.assertEqual(["opt"], index.get_options("a"))
818
self.assertEqual(["option"], index.get_options("b"))
819
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
820
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1078
822
def test_add_versions_random_id_is_accepted(self):
1079
823
transport = MockTransport([
1082
826
index = self.get_knit_index(transport, "filename", "r")
1083
self.add_a_b(index, random_id=True)
829
("a", ["option"], (None, 0, 1), ["b"]),
830
("a", ["opt"], (None, 1, 2), ["c"]),
831
("b", ["option"], (None, 2, 3), ["a"])
1085
834
def test_delay_create_and_add_versions(self):
1086
835
transport = MockTransport()
1088
index = self.get_knit_index(transport, "filename", "w")
837
index = self.get_knit_index(transport, "filename", "w",
838
create=True, file_mode="wb", create_parent_dir=True,
839
delay_create=True, dir_mode=0777)
1090
840
self.assertEqual([], transport.calls)
1093
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1095
# Two calls: one during which we load the existing index (and when its
1096
# missing create it), then a second where we write the contents out.
1097
self.assertEqual(2, len(transport.calls))
1098
call = transport.calls.pop(0)
1099
self.assertEqual('put_file_non_atomic', call[0])
1100
self.assertEqual('filename.kndx', call[1][0])
1101
# With no history, _KndxIndex writes a new index:
1102
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1103
self.assertEqual({'create_parent_dir': True}, call[2])
1104
call = transport.calls.pop(0)
1105
# call[1][1] is a StringIO - we can't test it by simple equality.
1106
self.assertEqual('put_file_non_atomic', call[0])
1107
self.assertEqual('filename.kndx', call[1][0])
1108
# With no history, _KndxIndex writes a new index:
843
("a", ["option"], (None, 0, 1), ["b"]),
844
("a", ["opt"], (None, 1, 2), ["c"]),
845
("b", ["option"], (None, 2, 3), ["a"])
847
name, (filename, f), kwargs = transport.calls.pop(0)
848
self.assertEqual("put_file_non_atomic", name)
850
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
852
self.assertEqual("filename", filename)
1111
855
"\na option 0 1 .b :"
1112
856
"\na opt 1 2 .c :"
1113
857
"\nb option 2 3 0 :",
1114
call[1][1].getvalue())
1115
self.assertEqual({'create_parent_dir': True}, call[2])
1117
def assertTotalBuildSize(self, size, keys, positions):
1118
self.assertEqual(size,
1119
knit._get_total_build_size(None, keys, positions))
1121
def test__get_total_build_size(self):
1123
('a',): (('fulltext', False), (('a',), 0, 100), None),
1124
('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
1125
('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
1126
('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1128
self.assertTotalBuildSize(100, [('a',)], positions)
1129
self.assertTotalBuildSize(121, [('b',)], positions)
1130
# c needs both a & b
1131
self.assertTotalBuildSize(156, [('c',)], positions)
1132
# we shouldn't count 'b' twice
1133
self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
1134
self.assertTotalBuildSize(133, [('d',)], positions)
1135
self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
860
def test_has_version(self):
861
transport = MockTransport([
865
index = self.get_knit_index(transport, "filename", "r")
867
self.assertTrue(index.has_version("a"))
868
self.assertFalse(index.has_version("b"))
1137
870
def test_get_position(self):
1138
871
transport = MockTransport([
1140
873
"a option 0 1 :",
1141
874
"b option 1 2 :"
1143
876
index = self.get_knit_index(transport, "filename", "r")
1145
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
1146
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
878
self.assertEqual((None, 0, 1), index.get_position("a"))
879
self.assertEqual((None, 1, 2), index.get_position("b"))
1148
881
def test_get_method(self):
1149
882
transport = MockTransport([
1151
884
"a fulltext,unknown 0 1 :",
1152
885
"b unknown,line-delta 1 2 :",
1261
1027
def test_invalid_size(self):
1262
1028
transport = MockTransport([
1264
1030
"a option 1 1v :",
1266
index = self.get_knit_index(transport, 'filename', 'r')
1268
self.assertRaises(errors.KnitCorrupt, index.keys)
1033
self.assertRaises(errors.KnitCorrupt,
1034
self.get_knit_index, transport, 'filename', 'r')
1269
1035
except TypeError, e:
1270
1036
if (str(e) == ('exceptions must be strings, classes, or instances,'
1271
' not exceptions.ValueError')):
1037
' not exceptions.ValueError')
1038
and sys.version_info[0:2] >= (2,5)):
1272
1039
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1273
1040
' raising new style exceptions with python'
1278
def test_scan_unvalidated_index_not_implemented(self):
1279
transport = MockTransport()
1280
index = self.get_knit_index(transport, 'filename', 'r')
1282
NotImplementedError, index.scan_unvalidated_index,
1283
'dummy graph_index')
1285
NotImplementedError, index.get_missing_compression_parents)
1287
1045
def test_short_line(self):
1288
1046
transport = MockTransport([
1290
1048
"a option 0 10 :",
1291
1049
"b option 10 10 0", # This line isn't terminated, ignored
1293
1051
index = self.get_knit_index(transport, "filename", "r")
1294
self.assertEqual(set([('a',)]), index.keys())
1052
self.assertEqual(['a'], index.get_versions())
1296
1054
def test_skip_incomplete_record(self):
1297
1055
# A line with bogus data should just be skipped
1298
1056
transport = MockTransport([
1300
1058
"a option 0 10 :",
1301
1059
"b option 10 10 0", # This line isn't terminated, ignored
1302
1060
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1304
1062
index = self.get_knit_index(transport, "filename", "r")
1305
self.assertEqual(set([('a',), ('c',)]), index.keys())
1063
self.assertEqual(['a', 'c'], index.get_versions())
1307
1065
def test_trailing_characters(self):
1308
1066
# A line with bogus data should just be skipped
1309
1067
transport = MockTransport([
1311
1069
"a option 0 10 :",
1312
1070
"b option 10 10 0 :a", # This line has extra trailing characters
1313
1071
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1315
1073
index = self.get_knit_index(transport, "filename", "r")
1316
self.assertEqual(set([('a',), ('c',)]), index.keys())
1074
self.assertEqual(['a', 'c'], index.get_versions())
1319
1077
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1321
_test_needs_features = [compiled_knit_feature]
1323
def get_knit_index(self, transport, name, mode):
1324
mapper = ConstantMapper(name)
1325
from bzrlib._knit_load_data_pyx import _load_data_c
1326
self.overrideAttr(knit, '_load_data', _load_data_c)
1327
allow_writes = lambda: mode == 'w'
1328
return _KndxIndex(transport, mapper, lambda:None,
1329
allow_writes, lambda:True)
1332
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1334
def make_annotator(self):
1335
factory = knit.make_pack_factory(True, True, 1)
1336
vf = factory(self.get_transport())
1337
return knit._KnitAnnotator(vf)
1339
def test__expand_fulltext(self):
1340
ann = self.make_annotator()
1341
rev_key = ('rev-id',)
1342
ann._num_compression_children[rev_key] = 1
1343
res = ann._expand_record(rev_key, (('parent-id',),), None,
1344
['line1\n', 'line2\n'], ('fulltext', True))
1345
# The content object and text lines should be cached appropriately
1346
self.assertEqual(['line1\n', 'line2'], res)
1347
content_obj = ann._content_objects[rev_key]
1348
self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1349
self.assertEqual(res, content_obj.text())
1350
self.assertEqual(res, ann._text_cache[rev_key])
1352
def test__expand_delta_comp_parent_not_available(self):
1353
# Parent isn't available yet, so we return nothing, but queue up this
1354
# node for later processing
1355
ann = self.make_annotator()
1356
rev_key = ('rev-id',)
1357
parent_key = ('parent-id',)
1358
record = ['0,1,1\n', 'new-line\n']
1359
details = ('line-delta', False)
1360
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1362
self.assertEqual(None, res)
1363
self.assertTrue(parent_key in ann._pending_deltas)
1364
pending = ann._pending_deltas[parent_key]
1365
self.assertEqual(1, len(pending))
1366
self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1368
def test__expand_record_tracks_num_children(self):
1369
ann = self.make_annotator()
1370
rev_key = ('rev-id',)
1371
rev2_key = ('rev2-id',)
1372
parent_key = ('parent-id',)
1373
record = ['0,1,1\n', 'new-line\n']
1374
details = ('line-delta', False)
1375
ann._num_compression_children[parent_key] = 2
1376
ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1377
('fulltext', False))
1378
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1380
self.assertEqual({parent_key: 1}, ann._num_compression_children)
1381
# Expanding the second child should remove the content object, and the
1382
# num_compression_children entry
1383
res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1385
self.assertFalse(parent_key in ann._content_objects)
1386
self.assertEqual({}, ann._num_compression_children)
1387
# We should not cache the content_objects for rev2 and rev, because
1388
# they do not have compression children of their own.
1389
self.assertEqual({}, ann._content_objects)
1391
def test__expand_delta_records_blocks(self):
1392
ann = self.make_annotator()
1393
rev_key = ('rev-id',)
1394
parent_key = ('parent-id',)
1395
record = ['0,1,1\n', 'new-line\n']
1396
details = ('line-delta', True)
1397
ann._num_compression_children[parent_key] = 2
1398
ann._expand_record(parent_key, (), None,
1399
['line1\n', 'line2\n', 'line3\n'],
1400
('fulltext', False))
1401
ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1402
self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1403
ann._matching_blocks)
1404
rev2_key = ('rev2-id',)
1405
record = ['0,1,1\n', 'new-line\n']
1406
details = ('line-delta', False)
1407
ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1408
self.assertEqual([(1, 1, 2), (3, 3, 0)],
1409
ann._matching_blocks[(rev2_key, parent_key)])
1411
def test__get_parent_ann_uses_matching_blocks(self):
1412
ann = self.make_annotator()
1413
rev_key = ('rev-id',)
1414
parent_key = ('parent-id',)
1415
parent_ann = [(parent_key,)]*3
1416
block_key = (rev_key, parent_key)
1417
ann._annotations_cache[parent_key] = parent_ann
1418
ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1419
# We should not try to access any parent_lines content, because we know
1420
# we already have the matching blocks
1421
par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1422
['1\n', '2\n', '3\n'], parent_key)
1423
self.assertEqual(parent_ann, par_ann)
1424
self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1425
self.assertEqual({}, ann._matching_blocks)
1427
def test__process_pending(self):
1428
ann = self.make_annotator()
1429
rev_key = ('rev-id',)
1432
record = ['0,1,1\n', 'new-line\n']
1433
details = ('line-delta', False)
1434
p1_record = ['line1\n', 'line2\n']
1435
ann._num_compression_children[p1_key] = 1
1436
res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1438
self.assertEqual(None, res)
1439
# self.assertTrue(p1_key in ann._pending_deltas)
1440
self.assertEqual({}, ann._pending_annotation)
1441
# Now insert p1, and we should be able to expand the delta
1442
res = ann._expand_record(p1_key, (), None, p1_record,
1443
('fulltext', False))
1444
self.assertEqual(p1_record, res)
1445
ann._annotations_cache[p1_key] = [(p1_key,)]*2
1446
res = ann._process_pending(p1_key)
1447
self.assertEqual([], res)
1448
self.assertFalse(p1_key in ann._pending_deltas)
1449
self.assertTrue(p2_key in ann._pending_annotation)
1450
self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1451
ann._pending_annotation)
1452
# Now fill in parent 2, and pending annotation should be satisfied
1453
res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1454
ann._annotations_cache[p2_key] = []
1455
res = ann._process_pending(p2_key)
1456
self.assertEqual([rev_key], res)
1457
self.assertEqual({}, ann._pending_annotation)
1458
self.assertEqual({}, ann._pending_deltas)
1460
def test_record_delta_removes_basis(self):
1461
ann = self.make_annotator()
1462
ann._expand_record(('parent-id',), (), None,
1463
['line1\n', 'line2\n'], ('fulltext', False))
1464
ann._num_compression_children['parent-id'] = 2
1466
def test_annotate_special_text(self):
1467
ann = self.make_annotator()
1469
rev1_key = ('rev-1',)
1470
rev2_key = ('rev-2',)
1471
rev3_key = ('rev-3',)
1472
spec_key = ('special:',)
1473
vf.add_lines(rev1_key, [], ['initial content\n'])
1474
vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1477
vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1480
spec_text = ('initial content\n'
1484
ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1485
anns, lines = ann.annotate(spec_key)
1486
self.assertEqual([(rev1_key,),
1487
(rev2_key, rev3_key),
1491
self.assertEqualDiff(spec_text, ''.join(lines))
1079
_test_needs_features = [CompiledKnitFeature]
1081
def get_knit_index(self, *args, **kwargs):
1082
orig = knit._load_data
1084
knit._load_data = orig
1085
self.addCleanup(reset)
1086
from bzrlib._knit_load_data_c import _load_data_c
1087
knit._load_data = _load_data_c
1088
return _KnitIndex(*args, **kwargs)
1494
1092
class KnitTests(TestCaseWithTransport):
1495
1093
"""Class containing knit test helper routines."""
1497
def make_test_knit(self, annotate=False, name='test'):
1498
mapper = ConstantMapper(name)
1499
return make_file_factory(annotate, mapper)(self.get_transport())
1502
class TestBadShaError(KnitTests):
1503
"""Tests for handling of sha errors."""
1505
def test_sha_exception_has_text(self):
1506
# having the failed text included in the error allows for recovery.
1507
source = self.make_test_knit()
1508
target = self.make_test_knit(name="target")
1509
if not source._max_delta_chain:
1510
raise TestNotApplicable(
1511
"cannot get delta-caused sha failures without deltas.")
1514
broken = ('broken',)
1515
source.add_lines(basis, (), ['foo\n'])
1516
source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1517
# Seed target with a bad basis text
1518
target.add_lines(basis, (), ['gam\n'])
1519
target.insert_record_stream(
1520
source.get_record_stream([broken], 'unordered', False))
1521
err = self.assertRaises(errors.KnitCorrupt,
1522
target.get_record_stream([broken], 'unordered', True
1523
).next().get_bytes_as, 'chunked')
1524
self.assertEqual(['gam\n', 'bar\n'], err.content)
1525
# Test for formatting with live data
1526
self.assertStartsWith(str(err), "Knit ")
1095
def make_test_knit(self, annotate=False, delay_create=False, index=None,
1098
factory = KnitPlainFactory()
1101
return KnitVersionedFile(name, get_transport('.'), access_mode='w',
1102
factory=factory, create=True,
1103
delay_create=delay_create, index=index)
1105
def assertRecordContentEqual(self, knit, version_id, candidate_content):
1106
"""Assert that some raw record content matches the raw record content
1107
for a particular version_id in the given knit.
1109
index_memo = knit._index.get_position(version_id)
1110
record = (version_id, index_memo)
1111
[(_, expected_content)] = list(knit._data.read_records_iter_raw([record]))
1112
self.assertEqual(expected_content, candidate_content)
1115
class BasicKnitTests(KnitTests):
1117
def add_stock_one_and_one_a(self, k):
1118
k.add_lines('text-1', [], split_lines(TEXT_1))
1119
k.add_lines('text-1a', ['text-1'], split_lines(TEXT_1A))
1121
def test_knit_constructor(self):
1122
"""Construct empty k"""
1123
self.make_test_knit()
1125
def test_make_explicit_index(self):
1126
"""We can supply an index to use."""
1127
knit = KnitVersionedFile('test', get_transport('.'),
1128
index='strangelove')
1129
self.assertEqual(knit._index, 'strangelove')
1131
def test_knit_add(self):
1132
"""Store one text in knit and retrieve"""
1133
k = self.make_test_knit()
1134
k.add_lines('text-1', [], split_lines(TEXT_1))
1135
self.assertTrue(k.has_version('text-1'))
1136
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1138
def test_newline_empty_lines(self):
1139
# ensure that ["\n"] round trips ok.
1140
knit = self.make_test_knit()
1141
knit.add_lines('a', [], ["\n"])
1142
knit.add_lines_with_ghosts('b', [], ["\n"])
1143
self.assertEqual(["\n"], knit.get_lines('a'))
1144
self.assertEqual(["\n"], knit.get_lines('b'))
1145
self.assertEqual(['fulltext'], knit._index.get_options('a'))
1146
self.assertEqual(['fulltext'], knit._index.get_options('b'))
1147
knit.add_lines('c', ['a'], ["\n"])
1148
knit.add_lines_with_ghosts('d', ['b'], ["\n"])
1149
self.assertEqual(["\n"], knit.get_lines('c'))
1150
self.assertEqual(["\n"], knit.get_lines('d'))
1151
self.assertEqual(['line-delta'], knit._index.get_options('c'))
1152
self.assertEqual(['line-delta'], knit._index.get_options('d'))
1154
def test_empty_lines(self):
1155
# bizarrely, [] is not listed as having no-eol.
1156
knit = self.make_test_knit()
1157
knit.add_lines('a', [], [])
1158
knit.add_lines_with_ghosts('b', [], [])
1159
self.assertEqual([], knit.get_lines('a'))
1160
self.assertEqual([], knit.get_lines('b'))
1161
self.assertEqual(['fulltext'], knit._index.get_options('a'))
1162
self.assertEqual(['fulltext'], knit._index.get_options('b'))
1163
knit.add_lines('c', ['a'], [])
1164
knit.add_lines_with_ghosts('d', ['b'], [])
1165
self.assertEqual([], knit.get_lines('c'))
1166
self.assertEqual([], knit.get_lines('d'))
1167
self.assertEqual(['line-delta'], knit._index.get_options('c'))
1168
self.assertEqual(['line-delta'], knit._index.get_options('d'))
1170
def test_knit_reload(self):
1171
# test that the content in a reloaded knit is correct
1172
k = self.make_test_knit()
1173
k.add_lines('text-1', [], split_lines(TEXT_1))
1175
k2 = KnitVersionedFile('test', get_transport('.'), access_mode='r', factory=KnitPlainFactory(), create=True)
1176
self.assertTrue(k2.has_version('text-1'))
1177
self.assertEqualDiff(''.join(k2.get_lines('text-1')), TEXT_1)
1179
def test_knit_several(self):
1180
"""Store several texts in a knit"""
1181
k = self.make_test_knit()
1182
k.add_lines('text-1', [], split_lines(TEXT_1))
1183
k.add_lines('text-2', [], split_lines(TEXT_2))
1184
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1185
self.assertEqualDiff(''.join(k.get_lines('text-2')), TEXT_2)
1187
def test_repeated_add(self):
1188
"""Knit traps attempt to replace existing version"""
1189
k = self.make_test_knit()
1190
k.add_lines('text-1', [], split_lines(TEXT_1))
1191
self.assertRaises(RevisionAlreadyPresent,
1193
'text-1', [], split_lines(TEXT_1))
1195
def test_empty(self):
1196
k = self.make_test_knit(True)
1197
k.add_lines('text-1', [], [])
1198
self.assertEquals(k.get_lines('text-1'), [])
1200
def test_incomplete(self):
1201
"""Test if texts without a ending line-end can be inserted and
1203
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1204
k.add_lines('text-1', [], ['a\n', 'b' ])
1205
k.add_lines('text-2', ['text-1'], ['a\rb\n', 'b\n'])
1206
# reopening ensures maximum room for confusion
1207
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1208
self.assertEquals(k.get_lines('text-1'), ['a\n', 'b' ])
1209
self.assertEquals(k.get_lines('text-2'), ['a\rb\n', 'b\n'])
1211
def test_delta(self):
1212
"""Expression of knit delta as lines"""
1213
k = self.make_test_knit()
1214
td = list(line_delta(TEXT_1.splitlines(True),
1215
TEXT_1A.splitlines(True)))
1216
self.assertEqualDiff(''.join(td), delta_1_1a)
1217
out = apply_line_delta(TEXT_1.splitlines(True), td)
1218
self.assertEqualDiff(''.join(out), TEXT_1A)
1220
def test_add_with_parents(self):
1221
"""Store in knit with parents"""
1222
k = self.make_test_knit()
1223
self.add_stock_one_and_one_a(k)
1224
self.assertEquals(k.get_parents('text-1'), [])
1225
self.assertEquals(k.get_parents('text-1a'), ['text-1'])
1227
def test_ancestry(self):
1228
"""Store in knit with parents"""
1229
k = self.make_test_knit()
1230
self.add_stock_one_and_one_a(k)
1231
self.assertEquals(set(k.get_ancestry(['text-1a'])), set(['text-1a', 'text-1']))
1233
def test_add_delta(self):
1234
"""Store in knit with parents"""
1235
k = KnitVersionedFile('test', get_transport('.'), factory=KnitPlainFactory(),
1236
delta=True, create=True)
1237
self.add_stock_one_and_one_a(k)
1239
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1241
def test_add_delta_knit_graph_index(self):
1242
"""Does adding work with a KnitGraphIndex."""
1243
index = InMemoryGraphIndex(2)
1244
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
1246
k = KnitVersionedFile('test', get_transport('.'),
1247
delta=True, create=True, index=knit_index)
1248
self.add_stock_one_and_one_a(k)
1250
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1251
# check the index had the right data added.
1252
self.assertEqual(set([
1253
(index, ('text-1', ), ' 0 127', ((), ())),
1254
(index, ('text-1a', ), ' 127 140', ((('text-1', ),), (('text-1', ),))),
1255
]), set(index.iter_all_entries()))
1256
# we should not have a .kndx file
1257
self.assertFalse(get_transport('.').has('test.kndx'))
1259
def test_annotate(self):
1261
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1262
delta=True, create=True)
1263
self.insert_and_test_small_annotate(k)
1265
def insert_and_test_small_annotate(self, k):
1266
"""test annotation with k works correctly."""
1267
k.add_lines('text-1', [], ['a\n', 'b\n'])
1268
k.add_lines('text-2', ['text-1'], ['a\n', 'c\n'])
1270
origins = k.annotate('text-2')
1271
self.assertEquals(origins[0], ('text-1', 'a\n'))
1272
self.assertEquals(origins[1], ('text-2', 'c\n'))
1274
def test_annotate_fulltext(self):
1276
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1277
delta=False, create=True)
1278
self.insert_and_test_small_annotate(k)
1280
def test_annotate_merge_1(self):
1281
k = self.make_test_knit(True)
1282
k.add_lines('text-a1', [], ['a\n', 'b\n'])
1283
k.add_lines('text-a2', [], ['d\n', 'c\n'])
1284
k.add_lines('text-am', ['text-a1', 'text-a2'], ['d\n', 'b\n'])
1285
origins = k.annotate('text-am')
1286
self.assertEquals(origins[0], ('text-a2', 'd\n'))
1287
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1289
def test_annotate_merge_2(self):
1290
k = self.make_test_knit(True)
1291
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1292
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1293
k.add_lines('text-am', ['text-a1', 'text-a2'], ['a\n', 'y\n', 'c\n'])
1294
origins = k.annotate('text-am')
1295
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1296
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1297
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1299
def test_annotate_merge_9(self):
1300
k = self.make_test_knit(True)
1301
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1302
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1303
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'c\n'])
1304
origins = k.annotate('text-am')
1305
self.assertEquals(origins[0], ('text-am', 'k\n'))
1306
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1307
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1309
def test_annotate_merge_3(self):
1310
k = self.make_test_knit(True)
1311
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1312
k.add_lines('text-a2', [] ,['x\n', 'y\n', 'z\n'])
1313
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'z\n'])
1314
origins = k.annotate('text-am')
1315
self.assertEquals(origins[0], ('text-am', 'k\n'))
1316
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1317
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1319
def test_annotate_merge_4(self):
1320
k = self.make_test_knit(True)
1321
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1322
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1323
k.add_lines('text-a3', ['text-a1'], ['a\n', 'b\n', 'p\n'])
1324
k.add_lines('text-am', ['text-a2', 'text-a3'], ['a\n', 'b\n', 'z\n'])
1325
origins = k.annotate('text-am')
1326
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1327
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1328
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1330
def test_annotate_merge_5(self):
1331
k = self.make_test_knit(True)
1332
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1333
k.add_lines('text-a2', [], ['d\n', 'e\n', 'f\n'])
1334
k.add_lines('text-a3', [], ['x\n', 'y\n', 'z\n'])
1335
k.add_lines('text-am',
1336
['text-a1', 'text-a2', 'text-a3'],
1337
['a\n', 'e\n', 'z\n'])
1338
origins = k.annotate('text-am')
1339
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1340
self.assertEquals(origins[1], ('text-a2', 'e\n'))
1341
self.assertEquals(origins[2], ('text-a3', 'z\n'))
1343
def test_annotate_file_cherry_pick(self):
1344
k = self.make_test_knit(True)
1345
k.add_lines('text-1', [], ['a\n', 'b\n', 'c\n'])
1346
k.add_lines('text-2', ['text-1'], ['d\n', 'e\n', 'f\n'])
1347
k.add_lines('text-3', ['text-2', 'text-1'], ['a\n', 'b\n', 'c\n'])
1348
origins = k.annotate('text-3')
1349
self.assertEquals(origins[0], ('text-1', 'a\n'))
1350
self.assertEquals(origins[1], ('text-1', 'b\n'))
1351
self.assertEquals(origins[2], ('text-1', 'c\n'))
1353
def _test_join_with_factories(self, k1_factory, k2_factory):
1354
k1 = KnitVersionedFile('test1', get_transport('.'), factory=k1_factory, create=True)
1355
k1.add_lines('text-a', [], ['a1\n', 'a2\n', 'a3\n'])
1356
k1.add_lines('text-b', ['text-a'], ['a1\n', 'b2\n', 'a3\n'])
1357
k1.add_lines('text-c', [], ['c1\n', 'c2\n', 'c3\n'])
1358
k1.add_lines('text-d', ['text-c'], ['c1\n', 'd2\n', 'd3\n'])
1359
k1.add_lines('text-m', ['text-b', 'text-d'], ['a1\n', 'b2\n', 'd3\n'])
1360
k2 = KnitVersionedFile('test2', get_transport('.'), factory=k2_factory, create=True)
1361
count = k2.join(k1, version_ids=['text-m'])
1362
self.assertEquals(count, 5)
1363
self.assertTrue(k2.has_version('text-a'))
1364
self.assertTrue(k2.has_version('text-c'))
1365
origins = k2.annotate('text-m')
1366
self.assertEquals(origins[0], ('text-a', 'a1\n'))
1367
self.assertEquals(origins[1], ('text-b', 'b2\n'))
1368
self.assertEquals(origins[2], ('text-d', 'd3\n'))
1370
def test_knit_join_plain_to_plain(self):
1371
"""Test joining a plain knit with a plain knit."""
1372
self._test_join_with_factories(KnitPlainFactory(), KnitPlainFactory())
1374
def test_knit_join_anno_to_anno(self):
1375
"""Test joining an annotated knit with an annotated knit."""
1376
self._test_join_with_factories(None, None)
1378
def test_knit_join_anno_to_plain(self):
1379
"""Test joining an annotated knit with a plain knit."""
1380
self._test_join_with_factories(None, KnitPlainFactory())
1382
def test_knit_join_plain_to_anno(self):
1383
"""Test joining a plain knit with an annotated knit."""
1384
self._test_join_with_factories(KnitPlainFactory(), None)
1386
def test_reannotate(self):
1387
k1 = KnitVersionedFile('knit1', get_transport('.'),
1388
factory=KnitAnnotateFactory(), create=True)
1390
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1392
k1.add_lines('text-b', ['text-a'], ['a\n', 'c\n'])
1394
k2 = KnitVersionedFile('test2', get_transport('.'),
1395
factory=KnitAnnotateFactory(), create=True)
1396
k2.join(k1, version_ids=['text-b'])
1399
k1.add_lines('text-X', ['text-b'], ['a\n', 'b\n'])
1401
k2.add_lines('text-c', ['text-b'], ['z\n', 'c\n'])
1403
k2.add_lines('text-Y', ['text-b'], ['b\n', 'c\n'])
1405
# test-c will have index 3
1406
k1.join(k2, version_ids=['text-c'])
1408
lines = k1.get_lines('text-c')
1409
self.assertEquals(lines, ['z\n', 'c\n'])
1411
origins = k1.annotate('text-c')
1412
self.assertEquals(origins[0], ('text-c', 'z\n'))
1413
self.assertEquals(origins[1], ('text-b', 'c\n'))
1415
def test_get_line_delta_texts(self):
1416
"""Make sure we can call get_texts on text with reused line deltas"""
1417
k1 = KnitVersionedFile('test1', get_transport('.'),
1418
factory=KnitPlainFactory(), create=True)
1423
parents = ['%d' % (t-1)]
1424
k1.add_lines('%d' % t, parents, ['hello\n'] * t)
1425
k1.get_texts(('%d' % t) for t in range(3))
1427
def test_iter_lines_reads_in_order(self):
1428
instrumented_t = get_transport('trace+memory:///')
1429
k1 = KnitVersionedFile('id', instrumented_t, create=True, delta=True)
1430
self.assertEqual([('get', 'id.kndx',)], instrumented_t._activity)
1431
# add texts with no required ordering
1432
k1.add_lines('base', [], ['text\n'])
1433
k1.add_lines('base2', [], ['text2\n'])
1435
# clear the logged activity, but preserve the list instance in case of
1436
# clones pointing at it.
1437
del instrumented_t._activity[:]
1438
# request a last-first iteration
1439
results = list(k1.iter_lines_added_or_present_in_versions(
1442
[('readv', 'id.knit', [(0, 87), (87, 89)], False, None)],
1443
instrumented_t._activity)
1444
self.assertEqual([('text\n', 'base'), ('text2\n', 'base2')], results)
1446
def test_create_empty_annotated(self):
1447
k1 = self.make_test_knit(True)
1449
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1450
k2 = k1.create_empty('t', MemoryTransport())
1451
self.assertTrue(isinstance(k2.factory, KnitAnnotateFactory))
1452
self.assertEqual(k1.delta, k2.delta)
1453
# the generic test checks for empty content and file class
1455
def test_knit_format(self):
1456
# this tests that a new knit index file has the expected content
1457
# and that is writes the data we expect as records are added.
1458
knit = self.make_test_knit(True)
1459
# Now knit files are not created until we first add data to them
1460
self.assertFileEqual("# bzr knit index 8\n", 'test.kndx')
1461
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1462
self.assertFileEqual(
1463
"# bzr knit index 8\n"
1465
"revid fulltext 0 84 .a_ghost :",
1467
knit.add_lines_with_ghosts('revid2', ['revid'], ['a\n'])
1468
self.assertFileEqual(
1469
"# bzr knit index 8\n"
1470
"\nrevid fulltext 0 84 .a_ghost :"
1471
"\nrevid2 line-delta 84 82 0 :",
1473
# we should be able to load this file again
1474
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1475
self.assertEqual(['revid', 'revid2'], knit.versions())
1476
# write a short write to the file and ensure that its ignored
1477
indexfile = file('test.kndx', 'ab')
1478
indexfile.write('\nrevid3 line-delta 166 82 1 2 3 4 5 .phwoar:demo ')
1480
# we should be able to load this file again
1481
knit = KnitVersionedFile('test', get_transport('.'), access_mode='w')
1482
self.assertEqual(['revid', 'revid2'], knit.versions())
1483
# and add a revision with the same id the failed write had
1484
knit.add_lines('revid3', ['revid2'], ['a\n'])
1485
# and when reading it revid3 should now appear.
1486
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1487
self.assertEqual(['revid', 'revid2', 'revid3'], knit.versions())
1488
self.assertEqual(['revid2'], knit.get_parents('revid3'))
1490
def test_delay_create(self):
1491
"""Test that passing delay_create=True creates files late"""
1492
knit = self.make_test_knit(annotate=True, delay_create=True)
1493
self.failIfExists('test.knit')
1494
self.failIfExists('test.kndx')
1495
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1496
self.failUnlessExists('test.knit')
1497
self.assertFileEqual(
1498
"# bzr knit index 8\n"
1500
"revid fulltext 0 84 .a_ghost :",
1503
def test_create_parent_dir(self):
1504
"""create_parent_dir can create knits in nonexistant dirs"""
1505
# Has no effect if we don't set 'delay_create'
1506
trans = get_transport('.')
1507
self.assertRaises(NoSuchFile, KnitVersionedFile, 'dir/test',
1508
trans, access_mode='w', factory=None,
1509
create=True, create_parent_dir=True)
1510
# Nothing should have changed yet
1511
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1512
factory=None, create=True,
1513
create_parent_dir=True,
1515
self.failIfExists('dir/test.knit')
1516
self.failIfExists('dir/test.kndx')
1517
self.failIfExists('dir')
1518
knit.add_lines('revid', [], ['a\n'])
1519
self.failUnlessExists('dir')
1520
self.failUnlessExists('dir/test.knit')
1521
self.assertFileEqual(
1522
"# bzr knit index 8\n"
1524
"revid fulltext 0 84 :",
1527
def test_create_mode_700(self):
1528
trans = get_transport('.')
1529
if not trans._can_roundtrip_unix_modebits():
1530
# Can't roundtrip, so no need to run this test
1532
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1533
factory=None, create=True,
1534
create_parent_dir=True,
1538
knit.add_lines('revid', [], ['a\n'])
1539
self.assertTransportMode(trans, 'dir', 0700)
1540
self.assertTransportMode(trans, 'dir/test.knit', 0600)
1541
self.assertTransportMode(trans, 'dir/test.kndx', 0600)
1543
def test_create_mode_770(self):
1544
trans = get_transport('.')
1545
if not trans._can_roundtrip_unix_modebits():
1546
# Can't roundtrip, so no need to run this test
1548
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1549
factory=None, create=True,
1550
create_parent_dir=True,
1554
knit.add_lines('revid', [], ['a\n'])
1555
self.assertTransportMode(trans, 'dir', 0770)
1556
self.assertTransportMode(trans, 'dir/test.knit', 0660)
1557
self.assertTransportMode(trans, 'dir/test.kndx', 0660)
1559
def test_create_mode_777(self):
1560
trans = get_transport('.')
1561
if not trans._can_roundtrip_unix_modebits():
1562
# Can't roundtrip, so no need to run this test
1564
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1565
factory=None, create=True,
1566
create_parent_dir=True,
1570
knit.add_lines('revid', [], ['a\n'])
1571
self.assertTransportMode(trans, 'dir', 0777)
1572
self.assertTransportMode(trans, 'dir/test.knit', 0666)
1573
self.assertTransportMode(trans, 'dir/test.kndx', 0666)
1575
def test_plan_merge(self):
1576
my_knit = self.make_test_knit(annotate=True)
1577
my_knit.add_lines('text1', [], split_lines(TEXT_1))
1578
my_knit.add_lines('text1a', ['text1'], split_lines(TEXT_1A))
1579
my_knit.add_lines('text1b', ['text1'], split_lines(TEXT_1B))
1580
plan = list(my_knit.plan_merge('text1a', 'text1b'))
1581
for plan_line, expected_line in zip(plan, AB_MERGE):
1582
self.assertEqual(plan_line, expected_line)
1584
def test_get_stream_empty(self):
1585
"""Get a data stream for an empty knit file."""
1586
k1 = self.make_test_knit()
1587
format, data_list, reader_callable = k1.get_data_stream([])
1588
self.assertEqual('knit-plain', format)
1589
self.assertEqual([], data_list)
1590
content = reader_callable(None)
1591
self.assertEqual('', content)
1592
self.assertIsInstance(content, str)
1594
def test_get_stream_one_version(self):
1595
"""Get a data stream for a single record out of a knit containing just
1598
k1 = self.make_test_knit()
1600
('text-a', [], TEXT_1),
1602
expected_data_list = [
1603
# version, options, length, parents
1604
('text-a', ['fulltext'], 122, []),
1606
for version_id, parents, lines in test_data:
1607
k1.add_lines(version_id, parents, split_lines(lines))
1609
format, data_list, reader_callable = k1.get_data_stream(['text-a'])
1610
self.assertEqual('knit-plain', format)
1611
self.assertEqual(expected_data_list, data_list)
1612
# There's only one record in the knit, so the content should be the
1613
# entire knit data file's contents.
1614
self.assertEqual(k1.transport.get_bytes(k1._data._access._filename),
1615
reader_callable(None))
1617
def test_get_stream_get_one_version_of_many(self):
1618
"""Get a data stream for just one version out of a knit containing many
1621
k1 = self.make_test_knit()
1622
# Insert the same data as test_knit_join, as they seem to cover a range
1623
# of cases (no parents, one parent, multiple parents).
1625
('text-a', [], TEXT_1),
1626
('text-b', ['text-a'], TEXT_1),
1627
('text-c', [], TEXT_1),
1628
('text-d', ['text-c'], TEXT_1),
1629
('text-m', ['text-b', 'text-d'], TEXT_1),
1631
expected_data_list = [
1632
# version, options, length, parents
1633
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1635
for version_id, parents, lines in test_data:
1636
k1.add_lines(version_id, parents, split_lines(lines))
1638
format, data_list, reader_callable = k1.get_data_stream(['text-m'])
1639
self.assertEqual('knit-plain', format)
1640
self.assertEqual(expected_data_list, data_list)
1641
self.assertRecordContentEqual(k1, 'text-m', reader_callable(None))
1643
def test_get_data_stream_unordered_index(self):
1644
"""Get a data stream when the knit index reports versions out of order.
1646
https://bugs.launchpad.net/bzr/+bug/164637
1648
k1 = self.make_test_knit()
1650
('text-a', [], TEXT_1),
1651
('text-b', ['text-a'], TEXT_1),
1652
('text-c', [], TEXT_1),
1653
('text-d', ['text-c'], TEXT_1),
1654
('text-m', ['text-b', 'text-d'], TEXT_1),
1656
for version_id, parents, lines in test_data:
1657
k1.add_lines(version_id, parents, split_lines(lines))
1658
# monkey-patch versions method to return out of order, as if coming
1659
# from multiple independently indexed packs
1660
original_versions = k1.versions
1661
k1.versions = lambda: reversed(original_versions())
1662
expected_data_list = [
1663
('text-a', ['fulltext'], 122, []),
1664
('text-b', ['line-delta'], 84, ['text-a'])]
1665
# now check the fulltext is first and the delta second
1666
format, data_list, _ = k1.get_data_stream(['text-a', 'text-b'])
1667
self.assertEqual('knit-plain', format)
1668
self.assertEqual(expected_data_list, data_list)
1669
# and that's true if we ask for them in the opposite order too
1670
format, data_list, _ = k1.get_data_stream(['text-b', 'text-a'])
1671
self.assertEqual(expected_data_list, data_list)
1672
# also try requesting more versions
1673
format, data_list, _ = k1.get_data_stream([
1674
'text-m', 'text-b', 'text-a'])
1676
('text-a', ['fulltext'], 122, []),
1677
('text-b', ['line-delta'], 84, ['text-a']),
1678
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1681
def test_get_stream_ghost_parent(self):
1682
"""Get a data stream for a version with a ghost parent."""
1683
k1 = self.make_test_knit()
1685
k1.add_lines('text-a', [], split_lines(TEXT_1))
1686
k1.add_lines_with_ghosts('text-b', ['text-a', 'text-ghost'],
1687
split_lines(TEXT_1))
1689
expected_data_list = [
1690
# version, options, length, parents
1691
('text-b', ['line-delta'], 84, ['text-a', 'text-ghost']),
1694
format, data_list, reader_callable = k1.get_data_stream(['text-b'])
1695
self.assertEqual('knit-plain', format)
1696
self.assertEqual(expected_data_list, data_list)
1697
self.assertRecordContentEqual(k1, 'text-b', reader_callable(None))
1699
def test_get_stream_get_multiple_records(self):
1700
"""Get a stream for multiple records of a knit."""
1701
k1 = self.make_test_knit()
1702
# Insert the same data as test_knit_join, as they seem to cover a range
1703
# of cases (no parents, one parent, multiple parents).
1705
('text-a', [], TEXT_1),
1706
('text-b', ['text-a'], TEXT_1),
1707
('text-c', [], TEXT_1),
1708
('text-d', ['text-c'], TEXT_1),
1709
('text-m', ['text-b', 'text-d'], TEXT_1),
1711
for version_id, parents, lines in test_data:
1712
k1.add_lines(version_id, parents, split_lines(lines))
1714
# This test is actually a bit strict as the order in which they're
1715
# returned is not defined. This matches the current (deterministic)
1717
expected_data_list = [
1718
# version, options, length, parents
1719
('text-d', ['line-delta'], 84, ['text-c']),
1720
('text-b', ['line-delta'], 84, ['text-a']),
1722
# Note that even though we request the revision IDs in a particular
1723
# order, the data stream may return them in any order it likes. In this
1724
# case, they'll be in the order they were inserted into the knit.
1725
format, data_list, reader_callable = k1.get_data_stream(
1726
['text-d', 'text-b'])
1727
self.assertEqual('knit-plain', format)
1728
self.assertEqual(expected_data_list, data_list)
1729
# must match order they're returned
1730
self.assertRecordContentEqual(k1, 'text-d', reader_callable(84))
1731
self.assertRecordContentEqual(k1, 'text-b', reader_callable(84))
1732
self.assertEqual('', reader_callable(None),
1733
"There should be no more bytes left to read.")
1735
def test_get_stream_all(self):
1736
"""Get a data stream for all the records in a knit.
1738
This exercises fulltext records, line-delta records, records with
1739
various numbers of parents, and reading multiple records out of the
1740
callable. These cases ought to all be exercised individually by the
1741
other test_get_stream_* tests; this test is basically just paranoia.
1743
k1 = self.make_test_knit()
1744
# Insert the same data as test_knit_join, as they seem to cover a range
1745
# of cases (no parents, one parent, multiple parents).
1747
('text-a', [], TEXT_1),
1748
('text-b', ['text-a'], TEXT_1),
1749
('text-c', [], TEXT_1),
1750
('text-d', ['text-c'], TEXT_1),
1751
('text-m', ['text-b', 'text-d'], TEXT_1),
1753
for version_id, parents, lines in test_data:
1754
k1.add_lines(version_id, parents, split_lines(lines))
1756
# This test is actually a bit strict as the order in which they're
1757
# returned is not defined. This matches the current (deterministic)
1759
expected_data_list = [
1760
# version, options, length, parents
1761
('text-a', ['fulltext'], 122, []),
1762
('text-b', ['line-delta'], 84, ['text-a']),
1763
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1764
('text-c', ['fulltext'], 121, []),
1765
('text-d', ['line-delta'], 84, ['text-c']),
1767
format, data_list, reader_callable = k1.get_data_stream(
1768
['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
1769
self.assertEqual('knit-plain', format)
1770
self.assertEqual(expected_data_list, data_list)
1771
for version_id, options, length, parents in expected_data_list:
1772
bytes = reader_callable(length)
1773
self.assertRecordContentEqual(k1, version_id, bytes)
1775
def assertKnitFilesEqual(self, knit1, knit2):
1776
"""Assert that the contents of the index and data files of two knits are
1780
knit1.transport.get_bytes(knit1._data._access._filename),
1781
knit2.transport.get_bytes(knit2._data._access._filename))
1783
knit1.transport.get_bytes(knit1._index._filename),
1784
knit2.transport.get_bytes(knit2._index._filename))
1786
def assertKnitValuesEqual(self, left, right):
1787
"""Assert that the texts, annotations and graph of left and right are
1790
self.assertEqual(set(left.versions()), set(right.versions()))
1791
for version in left.versions():
1792
self.assertEqual(left.get_parents_with_ghosts(version),
1793
right.get_parents_with_ghosts(version))
1794
self.assertEqual(left.get_lines(version),
1795
right.get_lines(version))
1796
self.assertEqual(left.annotate(version),
1797
right.annotate(version))
1799
def test_insert_data_stream_empty(self):
1800
"""Inserting a data stream with no records should not put any data into
1803
k1 = self.make_test_knit()
1804
k1.insert_data_stream(
1805
(k1.get_format_signature(), [], lambda ignored: ''))
1806
self.assertEqual('', k1.transport.get_bytes(k1._data._access._filename),
1807
"The .knit should be completely empty.")
1808
self.assertEqual(k1._index.HEADER,
1809
k1.transport.get_bytes(k1._index._filename),
1810
"The .kndx should have nothing apart from the header.")
1812
def test_insert_data_stream_one_record(self):
1813
"""Inserting a data stream with one record from a knit with one record
1814
results in byte-identical files.
1816
source = self.make_test_knit(name='source')
1817
source.add_lines('text-a', [], split_lines(TEXT_1))
1818
data_stream = source.get_data_stream(['text-a'])
1819
target = self.make_test_knit(name='target')
1820
target.insert_data_stream(data_stream)
1821
self.assertKnitFilesEqual(source, target)
1823
def test_insert_data_stream_annotated_unannotated(self):
1824
"""Inserting an annotated datastream to an unannotated knit works."""
1825
# case one - full texts.
1826
source = self.make_test_knit(name='source', annotate=True)
1827
target = self.make_test_knit(name='target', annotate=False)
1828
source.add_lines('text-a', [], split_lines(TEXT_1))
1829
target.insert_data_stream(source.get_data_stream(['text-a']))
1830
self.assertKnitValuesEqual(source, target)
1831
# case two - deltas.
1832
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1833
target.insert_data_stream(source.get_data_stream(['text-b']))
1834
self.assertKnitValuesEqual(source, target)
1836
def test_insert_data_stream_unannotated_annotated(self):
1837
"""Inserting an unannotated datastream to an annotated knit works."""
1838
# case one - full texts.
1839
source = self.make_test_knit(name='source', annotate=False)
1840
target = self.make_test_knit(name='target', annotate=True)
1841
source.add_lines('text-a', [], split_lines(TEXT_1))
1842
target.insert_data_stream(source.get_data_stream(['text-a']))
1843
self.assertKnitValuesEqual(source, target)
1844
# case two - deltas.
1845
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1846
target.insert_data_stream(source.get_data_stream(['text-b']))
1847
self.assertKnitValuesEqual(source, target)
1849
def test_insert_data_stream_records_already_present(self):
1850
"""Insert a data stream where some records are alreday present in the
1851
target, and some not. Only the new records are inserted.
1853
source = self.make_test_knit(name='source')
1854
target = self.make_test_knit(name='target')
1855
# Insert 'text-a' into both source and target
1856
source.add_lines('text-a', [], split_lines(TEXT_1))
1857
target.insert_data_stream(source.get_data_stream(['text-a']))
1858
# Insert 'text-b' into just the source.
1859
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1860
# Get a data stream of both text-a and text-b, and insert it.
1861
data_stream = source.get_data_stream(['text-a', 'text-b'])
1862
target.insert_data_stream(data_stream)
1863
# The source and target will now be identical. This means the text-a
1864
# record was not added a second time.
1865
self.assertKnitFilesEqual(source, target)
1867
def test_insert_data_stream_multiple_records(self):
1868
"""Inserting a data stream of all records from a knit with multiple
1869
records results in byte-identical files.
1871
source = self.make_test_knit(name='source')
1872
source.add_lines('text-a', [], split_lines(TEXT_1))
1873
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1874
source.add_lines('text-c', [], split_lines(TEXT_1))
1875
data_stream = source.get_data_stream(['text-a', 'text-b', 'text-c'])
1877
target = self.make_test_knit(name='target')
1878
target.insert_data_stream(data_stream)
1880
self.assertKnitFilesEqual(source, target)
1882
def test_insert_data_stream_ghost_parent(self):
1883
"""Insert a data stream with a record that has a ghost parent."""
1884
# Make a knit with a record, text-a, that has a ghost parent.
1885
source = self.make_test_knit(name='source')
1886
source.add_lines_with_ghosts('text-a', ['text-ghost'],
1887
split_lines(TEXT_1))
1888
data_stream = source.get_data_stream(['text-a'])
1890
target = self.make_test_knit(name='target')
1891
target.insert_data_stream(data_stream)
1893
self.assertKnitFilesEqual(source, target)
1895
# The target knit object is in a consistent state, i.e. the record we
1896
# just added is immediately visible.
1897
self.assertTrue(target.has_version('text-a'))
1898
self.assertTrue(target.has_ghost('text-ghost'))
1899
self.assertEqual(split_lines(TEXT_1), target.get_lines('text-a'))
1901
def test_insert_data_stream_inconsistent_version_lines(self):
1902
"""Inserting a data stream which has different content for a version_id
1903
than already exists in the knit will raise KnitCorrupt.
1905
source = self.make_test_knit(name='source')
1906
target = self.make_test_knit(name='target')
1907
# Insert a different 'text-a' into both source and target
1908
source.add_lines('text-a', [], split_lines(TEXT_1))
1909
target.add_lines('text-a', [], split_lines(TEXT_2))
1910
# Insert a data stream with conflicting content into the target
1911
data_stream = source.get_data_stream(['text-a'])
1913
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1915
def test_insert_data_stream_inconsistent_version_parents(self):
1916
"""Inserting a data stream which has different parents for a version_id
1917
than already exists in the knit will raise KnitCorrupt.
1919
source = self.make_test_knit(name='source')
1920
target = self.make_test_knit(name='target')
1921
# Insert a different 'text-a' into both source and target. They differ
1922
# only by the parents list, the content is the same.
1923
source.add_lines_with_ghosts('text-a', [], split_lines(TEXT_1))
1924
target.add_lines_with_ghosts('text-a', ['a-ghost'], split_lines(TEXT_1))
1925
# Insert a data stream with conflicting content into the target
1926
data_stream = source.get_data_stream(['text-a'])
1928
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1930
def test_insert_data_stream_unknown_format(self):
1931
"""A data stream in a different format to the target knit cannot be
1934
It will raise KnitDataStreamUnknown because the fallback code will fail
1935
to make a knit. In future we may need KnitDataStreamIncompatible again,
1936
for more exotic cases.
1938
data_stream = ('fake-format-signature', [], lambda _: '')
1939
target = self.make_test_knit(name='target')
1941
errors.KnitDataStreamUnknown,
1942
target.insert_data_stream, data_stream)
1944
# * test that a stream of "already present version, then new version"
1945
# inserts correctly.
1948
def assertMadeStreamKnit(self, source_knit, versions, target_knit):
1949
"""Assert that a knit made from a stream is as expected."""
1950
a_stream = source_knit.get_data_stream(versions)
1951
expected_data = a_stream[2](None)
1952
a_stream = source_knit.get_data_stream(versions)
1953
a_knit = target_knit._knit_from_datastream(a_stream)
1954
self.assertEqual(source_knit.factory.__class__,
1955
a_knit.factory.__class__)
1956
self.assertIsInstance(a_knit._data._access, _StreamAccess)
1957
self.assertIsInstance(a_knit._index, _StreamIndex)
1958
self.assertEqual(a_knit._index.data_list, a_stream[1])
1959
self.assertEqual(a_knit._data._access.data, expected_data)
1960
self.assertEqual(a_knit.filename, target_knit.filename)
1961
self.assertEqual(a_knit.transport, target_knit.transport)
1962
self.assertEqual(a_knit._index, a_knit._data._access.stream_index)
1963
self.assertEqual(target_knit, a_knit._data._access.backing_knit)
1964
self.assertIsInstance(a_knit._data._access.orig_factory,
1965
source_knit.factory.__class__)
1967
def test__knit_from_data_stream_empty(self):
1968
"""Create a knit object from a datastream."""
1969
annotated = self.make_test_knit(name='source', annotate=True)
1970
plain = self.make_test_knit(name='target', annotate=False)
1971
# case 1: annotated source
1972
self.assertMadeStreamKnit(annotated, [], annotated)
1973
self.assertMadeStreamKnit(annotated, [], plain)
1974
# case 2: plain source
1975
self.assertMadeStreamKnit(plain, [], annotated)
1976
self.assertMadeStreamKnit(plain, [], plain)
1978
def test__knit_from_data_stream_unknown_format(self):
1979
annotated = self.make_test_knit(name='source', annotate=True)
1980
self.assertRaises(errors.KnitDataStreamUnknown,
1981
annotated._knit_from_datastream, ("unknown", None, None))
1993
Banana cup cake recipe
1999
- self-raising flour
2003
Banana cup cake recipe
2005
- bananas (do not use plantains!!!)
2012
Banana cup cake recipe
2015
- self-raising flour
2028
AB_MERGE_TEXT="""unchanged|Banana cup cake recipe
2033
new-b|- bananas (do not use plantains!!!)
2034
unchanged|- broken tea cups
2035
new-a|- self-raising flour
2038
AB_MERGE=[tuple(l.split('|')) for l in AB_MERGE_TEXT.splitlines(True)]
2041
def line_delta(from_lines, to_lines):
2042
"""Generate line-based delta from one text to another"""
2043
s = difflib.SequenceMatcher(None, from_lines, to_lines)
2044
for op in s.get_opcodes():
2045
if op[0] == 'equal':
2047
yield '%d,%d,%d\n' % (op[1], op[2], op[4]-op[3])
2048
for i in range(op[3], op[4]):
2052
def apply_line_delta(basis_lines, delta_lines):
2053
"""Apply a line-based perfect diff
2055
basis_lines -- text to apply the patch to
2056
delta_lines -- diff instructions and content
2058
out = basis_lines[:]
2061
while i < len(delta_lines):
2063
a, b, c = map(long, l.split(','))
2065
out[offset+a:offset+b] = delta_lines[i:i+c]
2067
offset = offset + (b - a) + c
2071
class TestWeaveToKnit(KnitTests):
2073
def test_weave_to_knit_matches(self):
2074
# check that the WeaveToKnit is_compatible function
2075
# registers True for a Weave to a Knit.
2077
k = self.make_test_knit()
2078
self.failUnless(WeaveToKnit.is_compatible(w, k))
2079
self.failIf(WeaveToKnit.is_compatible(k, w))
2080
self.failIf(WeaveToKnit.is_compatible(w, w))
2081
self.failIf(WeaveToKnit.is_compatible(k, k))
2084
class TestKnitCaching(KnitTests):
2086
def create_knit(self):
2087
k = self.make_test_knit(True)
2088
k.add_lines('text-1', [], split_lines(TEXT_1))
2089
k.add_lines('text-2', [], split_lines(TEXT_2))
2092
def test_no_caching(self):
2093
k = self.create_knit()
2094
# Nothing should be cached without setting 'enable_cache'
2095
self.assertEqual({}, k._data._cache)
2097
def test_cache_data_read_raw(self):
2098
k = self.create_knit()
2100
# Now cache and read
2103
def read_one_raw(version):
2104
pos_map = k._get_components_positions([version])
2105
method, index_memo, next = pos_map[version]
2106
lst = list(k._data.read_records_iter_raw([(version, index_memo)]))
2107
self.assertEqual(1, len(lst))
2110
val = read_one_raw('text-1')
2111
self.assertEqual({'text-1':val[1]}, k._data._cache)
2114
# After clear, new reads are not cached
2115
self.assertEqual({}, k._data._cache)
2117
val2 = read_one_raw('text-1')
2118
self.assertEqual(val, val2)
2119
self.assertEqual({}, k._data._cache)
2121
def test_cache_data_read(self):
2122
k = self.create_knit()
2124
def read_one(version):
2125
pos_map = k._get_components_positions([version])
2126
method, index_memo, next = pos_map[version]
2127
lst = list(k._data.read_records_iter([(version, index_memo)]))
2128
self.assertEqual(1, len(lst))
2131
# Now cache and read
2134
val = read_one('text-2')
2135
self.assertEqual(['text-2'], k._data._cache.keys())
2136
self.assertEqual('text-2', val[0])
2137
content, digest = k._data._parse_record('text-2',
2138
k._data._cache['text-2'])
2139
self.assertEqual(content, val[1])
2140
self.assertEqual(digest, val[2])
2143
self.assertEqual({}, k._data._cache)
2145
val2 = read_one('text-2')
2146
self.assertEqual(val, val2)
2147
self.assertEqual({}, k._data._cache)
2149
def test_cache_read(self):
2150
k = self.create_knit()
2153
text = k.get_text('text-1')
2154
self.assertEqual(TEXT_1, text)
2155
self.assertEqual(['text-1'], k._data._cache.keys())
2158
self.assertEqual({}, k._data._cache)
2160
text = k.get_text('text-1')
2161
self.assertEqual(TEXT_1, text)
2162
self.assertEqual({}, k._data._cache)
1529
2165
class TestKnitIndex(KnitTests):
1657
2284
add_callback = self.catch_add
1659
2286
add_callback = None
1660
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
2287
return KnitGraphIndex(combined_index, deltas=deltas,
1661
2288
add_callback=add_callback)
1663
def test_keys(self):
1664
index = self.two_graph_index()
1665
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2290
def test_get_graph(self):
2291
index = self.two_graph_index()
2292
self.assertEqual(set([
2293
('tip', ('parent', )),
2295
('parent', ('tail', 'ghost')),
2297
]), set(index.get_graph()))
2299
def test_get_ancestry(self):
2300
# get_ancestry is defined as eliding ghosts, not erroring.
2301
index = self.two_graph_index()
2302
self.assertEqual([], index.get_ancestry([]))
2303
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2304
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2305
self.assertEqual(['tail', 'parent'], index.get_ancestry(['parent']))
2306
self.assertEqual(['tail', 'parent', 'tip'], index.get_ancestry(['tip']))
2307
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2308
(['tail', 'parent', 'tip', 'separate'],
2309
['separate', 'tail', 'parent', 'tip'],
2311
# and without topo_sort
2312
self.assertEqual(set(['separate']),
2313
set(index.get_ancestry(['separate'], topo_sorted=False)))
2314
self.assertEqual(set(['tail']),
2315
set(index.get_ancestry(['tail'], topo_sorted=False)))
2316
self.assertEqual(set(['tail', 'parent']),
2317
set(index.get_ancestry(['parent'], topo_sorted=False)))
2318
self.assertEqual(set(['tail', 'parent', 'tip']),
2319
set(index.get_ancestry(['tip'], topo_sorted=False)))
2320
self.assertEqual(set(['separate', 'tail', 'parent', 'tip']),
2321
set(index.get_ancestry(['tip', 'separate'])))
2322
# asking for a ghost makes it go boom.
2323
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2325
def test_get_ancestry_with_ghosts(self):
2326
index = self.two_graph_index()
2327
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2328
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2329
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2330
self.assertTrue(index.get_ancestry_with_ghosts(['parent']) in
2331
(['tail', 'ghost', 'parent'],
2332
['ghost', 'tail', 'parent'],
2334
self.assertTrue(index.get_ancestry_with_ghosts(['tip']) in
2335
(['tail', 'ghost', 'parent', 'tip'],
2336
['ghost', 'tail', 'parent', 'tip'],
2338
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2339
(['tail', 'ghost', 'parent', 'tip', 'separate'],
2340
['ghost', 'tail', 'parent', 'tip', 'separate'],
2341
['separate', 'tail', 'ghost', 'parent', 'tip'],
2342
['separate', 'ghost', 'tail', 'parent', 'tip'],
2344
# asking for a ghost makes it go boom.
2345
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2347
def test_num_versions(self):
2348
index = self.two_graph_index()
2349
self.assertEqual(4, index.num_versions())
2351
def test_get_versions(self):
2352
index = self.two_graph_index()
2353
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2354
set(index.get_versions()))
2356
def test_has_version(self):
2357
index = self.two_graph_index()
2358
self.assertTrue(index.has_version('tail'))
2359
self.assertFalse(index.has_version('ghost'))
1668
2361
def test_get_position(self):
1669
2362
index = self.two_graph_index()
1670
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1671
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
2363
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2364
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1673
2366
def test_get_method_deltas(self):
1674
2367
index = self.two_graph_index(deltas=True)
1675
self.assertEqual('fulltext', index.get_method(('tip',)))
1676
self.assertEqual('line-delta', index.get_method(('parent',)))
2368
self.assertEqual('fulltext', index.get_method('tip'))
2369
self.assertEqual('line-delta', index.get_method('parent'))
1678
2371
def test_get_method_no_deltas(self):
1679
2372
# check that the parent-history lookup is ignored with deltas=False.
1680
2373
index = self.two_graph_index(deltas=False)
1681
self.assertEqual('fulltext', index.get_method(('tip',)))
1682
self.assertEqual('fulltext', index.get_method(('parent',)))
2374
self.assertEqual('fulltext', index.get_method('tip'))
2375
self.assertEqual('fulltext', index.get_method('parent'))
1684
2377
def test_get_options_deltas(self):
1685
2378
index = self.two_graph_index(deltas=True)
1686
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1687
self.assertEqual(['line-delta'], index.get_options(('parent',)))
2379
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2380
self.assertEqual(['line-delta'], index.get_options('parent'))
1689
2382
def test_get_options_no_deltas(self):
1690
2383
# check that the parent-history lookup is ignored with deltas=False.
1691
2384
index = self.two_graph_index(deltas=False)
1692
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1693
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1695
def test_get_parent_map(self):
1696
index = self.two_graph_index()
1697
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1698
index.get_parent_map([('parent',), ('ghost',)]))
2385
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2386
self.assertEqual(['fulltext'], index.get_options('parent'))
2388
def test_get_parents(self):
2389
# get_parents ignores ghosts
2390
index = self.two_graph_index()
2391
self.assertEqual(('tail', ), index.get_parents('parent'))
2392
# and errors on ghosts.
2393
self.assertRaises(errors.RevisionNotPresent,
2394
index.get_parents, 'ghost')
2396
def test_get_parents_with_ghosts(self):
2397
index = self.two_graph_index()
2398
self.assertEqual(('tail', 'ghost'), index.get_parents_with_ghosts('parent'))
2399
# and errors on ghosts.
2400
self.assertRaises(errors.RevisionNotPresent,
2401
index.get_parents_with_ghosts, 'ghost')
2403
def test_check_versions_present(self):
2404
# ghosts should not be considered present
2405
index = self.two_graph_index()
2406
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2408
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2410
index.check_versions_present(['tail', 'separate'])
1700
2412
def catch_add(self, entries):
1701
2413
self.caught_entries.append(entries)
1703
2415
def test_add_no_callback_errors(self):
1704
2416
index = self.two_graph_index()
1705
self.assertRaises(errors.ReadOnlyError, index.add_records,
1706
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
2417
self.assertRaises(errors.ReadOnlyError, index.add_version,
2418
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1708
2420
def test_add_version_smoke(self):
1709
2421
index = self.two_graph_index(catch_adds=True)
1710
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
2422
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1712
2423
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
1713
2424
self.caught_entries)
1715
2426
def test_add_version_delta_not_delta_index(self):
1716
2427
index = self.two_graph_index(catch_adds=True)
1717
self.assertRaises(errors.KnitCorrupt, index.add_records,
1718
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2428
self.assertRaises(errors.KnitCorrupt, index.add_version,
2429
'new', 'no-eol,line-delta', (None, 0, 100), ['parent'])
1719
2430
self.assertEqual([], self.caught_entries)
1721
2432
def test_add_version_same_dup(self):
1722
2433
index = self.two_graph_index(catch_adds=True)
1723
2434
# options can be spelt two different ways
1724
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1725
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1726
# position/length are ignored (because each pack could have fulltext or
1727
# delta, and be at a different position.
1728
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1730
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1732
# but neither should have added data:
1733
self.assertEqual([[], [], [], []], self.caught_entries)
2435
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2436
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])
2437
# but neither should have added data.
2438
self.assertEqual([[], []], self.caught_entries)
1735
2440
def test_add_version_different_dup(self):
1736
2441
index = self.two_graph_index(deltas=True, catch_adds=True)
1737
2442
# change options
1738
self.assertRaises(errors.KnitCorrupt, index.add_records,
1739
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1740
self.assertRaises(errors.KnitCorrupt, index.add_records,
1741
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2443
self.assertRaises(errors.KnitCorrupt, index.add_version,
2444
'tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2445
self.assertRaises(errors.KnitCorrupt, index.add_version,
2446
'tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])
2447
self.assertRaises(errors.KnitCorrupt, index.add_version,
2448
'tip', 'fulltext', (None, 0, 100), ['parent'])
2450
self.assertRaises(errors.KnitCorrupt, index.add_version,
2451
'tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])
2452
self.assertRaises(errors.KnitCorrupt, index.add_version,
2453
'tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])
1743
self.assertRaises(errors.KnitCorrupt, index.add_records,
1744
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2455
self.assertRaises(errors.KnitCorrupt, index.add_version,
2456
'tip', 'fulltext,no-eol', (None, 0, 100), [])
1745
2457
self.assertEqual([], self.caught_entries)
1747
2459
def test_add_versions_nodeltas(self):
1748
2460
index = self.two_graph_index(catch_adds=True)
1750
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1751
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
2461
index.add_versions([
2462
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2463
('new2', 'fulltext', (None, 0, 6), ['new']),
1753
2465
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
1754
2466
(('new2', ), ' 0 6', ((('new',),),))],
1769
2481
def test_add_versions_delta_not_delta_index(self):
1770
2482
index = self.two_graph_index(catch_adds=True)
1771
self.assertRaises(errors.KnitCorrupt, index.add_records,
1772
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2483
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2484
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1773
2485
self.assertEqual([], self.caught_entries)
1775
2487
def test_add_versions_random_id_accepted(self):
1776
2488
index = self.two_graph_index(catch_adds=True)
1777
index.add_records([], random_id=True)
2489
index.add_versions([], random_id=True)
1779
2491
def test_add_versions_same_dup(self):
1780
2492
index = self.two_graph_index(catch_adds=True)
1781
2493
# options can be spelt two different ways
1782
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1784
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1786
# position/length are ignored (because each pack could have fulltext or
1787
# delta, and be at a different position.
1788
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1790
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
2494
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2495
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
1792
2496
# but neither should have added data.
1793
self.assertEqual([[], [], [], []], self.caught_entries)
2497
self.assertEqual([[], []], self.caught_entries)
1795
2499
def test_add_versions_different_dup(self):
1796
2500
index = self.two_graph_index(deltas=True, catch_adds=True)
1797
2501
# change options
1798
self.assertRaises(errors.KnitCorrupt, index.add_records,
1799
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1800
self.assertRaises(errors.KnitCorrupt, index.add_records,
1801
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2502
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2503
[('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2504
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2505
[('tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])])
2506
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2507
[('tip', 'fulltext', (None, 0, 100), ['parent'])])
2509
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2510
[('tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])])
2511
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2512
[('tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])])
1803
self.assertRaises(errors.KnitCorrupt, index.add_records,
1804
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2514
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2515
[('tip', 'fulltext,no-eol', (None, 0, 100), [])])
1805
2516
# change options in the second record
1806
self.assertRaises(errors.KnitCorrupt, index.add_records,
1807
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1808
(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
2517
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2518
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent']),
2519
('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1809
2520
self.assertEqual([], self.caught_entries)
1811
def make_g_index_missing_compression_parent(self):
1812
graph_index = self.make_g_index('missing_comp', 2,
1813
[(('tip', ), ' 100 78',
1814
([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1817
def make_g_index_missing_parent(self):
1818
graph_index = self.make_g_index('missing_parent', 2,
1819
[(('parent', ), ' 100 78', ([], [])),
1820
(('tip', ), ' 100 78',
1821
([('parent', ), ('missing-parent', )], [('parent', )])),
1825
def make_g_index_no_external_refs(self):
1826
graph_index = self.make_g_index('no_external_refs', 2,
1827
[(('rev', ), ' 100 78',
1828
([('parent', ), ('ghost', )], []))])
1831
def test_add_good_unvalidated_index(self):
1832
unvalidated = self.make_g_index_no_external_refs()
1833
combined = CombinedGraphIndex([unvalidated])
1834
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1835
index.scan_unvalidated_index(unvalidated)
1836
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1838
def test_add_missing_compression_parent_unvalidated_index(self):
1839
unvalidated = self.make_g_index_missing_compression_parent()
1840
combined = CombinedGraphIndex([unvalidated])
1841
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1842
index.scan_unvalidated_index(unvalidated)
1843
# This also checks that its only the compression parent that is
1844
# examined, otherwise 'ghost' would also be reported as a missing
1847
frozenset([('missing-parent',)]),
1848
index.get_missing_compression_parents())
1850
def test_add_missing_noncompression_parent_unvalidated_index(self):
1851
unvalidated = self.make_g_index_missing_parent()
1852
combined = CombinedGraphIndex([unvalidated])
1853
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1854
track_external_parent_refs=True)
1855
index.scan_unvalidated_index(unvalidated)
1857
frozenset([('missing-parent',)]), index.get_missing_parents())
1859
def test_track_external_parent_refs(self):
1860
g_index = self.make_g_index('empty', 2, [])
1861
combined = CombinedGraphIndex([g_index])
1862
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1863
add_callback=self.catch_add, track_external_parent_refs=True)
1864
self.caught_entries = []
1866
(('new-key',), 'fulltext,no-eol', (None, 50, 60),
1867
[('parent-1',), ('parent-2',)])])
1869
frozenset([('parent-1',), ('parent-2',)]),
1870
index.get_missing_parents())
1872
def test_add_unvalidated_index_with_present_external_references(self):
1873
index = self.two_graph_index(deltas=True)
1874
# Ugly hack to get at one of the underlying GraphIndex objects that
1875
# two_graph_index built.
1876
unvalidated = index._graph_index._indices[1]
1877
# 'parent' is an external ref of _indices[1] (unvalidated), but is
1878
# present in _indices[0].
1879
index.scan_unvalidated_index(unvalidated)
1880
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1882
def make_new_missing_parent_g_index(self, name):
1883
missing_parent = name + '-missing-parent'
1884
graph_index = self.make_g_index(name, 2,
1885
[((name + 'tip', ), ' 100 78',
1886
([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1889
def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1890
g_index_1 = self.make_new_missing_parent_g_index('one')
1891
g_index_2 = self.make_new_missing_parent_g_index('two')
1892
combined = CombinedGraphIndex([g_index_1, g_index_2])
1893
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1894
index.scan_unvalidated_index(g_index_1)
1895
index.scan_unvalidated_index(g_index_2)
1897
frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1898
index.get_missing_compression_parents())
1900
def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1901
graph_index_a = self.make_g_index('one', 2,
1902
[(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
1903
(('child-of-two', ), ' 100 78',
1904
([('parent-two',)], [('parent-two',)]))])
1905
graph_index_b = self.make_g_index('two', 2,
1906
[(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
1907
(('child-of-one', ), ' 100 78',
1908
([('parent-one',)], [('parent-one',)]))])
1909
combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1910
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1911
index.scan_unvalidated_index(graph_index_a)
1912
index.scan_unvalidated_index(graph_index_b)
1914
frozenset([]), index.get_missing_compression_parents())
2522
def test_iter_parents(self):
2523
index1 = self.make_g_index('1', 1, [
2525
(('r0', ), 'N0 100', ([], )),
2527
(('r1', ), '', ([('r0', )], ))])
2528
index2 = self.make_g_index('2', 1, [
2530
(('r2', ), 'N0 100', ([('r1', ), ('r0', )], )),
2532
combined_index = CombinedGraphIndex([index1, index2])
2533
index = KnitGraphIndex(combined_index)
2535
# cases: each sample data individually:
2536
self.assertEqual(set([('r0', ())]),
2537
set(index.iter_parents(['r0'])))
2538
self.assertEqual(set([('r1', ('r0', ))]),
2539
set(index.iter_parents(['r1'])))
2540
self.assertEqual(set([('r2', ('r1', 'r0'))]),
2541
set(index.iter_parents(['r2'])))
2542
# no nodes returned for a missing node
2543
self.assertEqual(set(),
2544
set(index.iter_parents(['missing'])))
2545
# 1 node returned with missing nodes skipped
2546
self.assertEqual(set([('r1', ('r0', ))]),
2547
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
2549
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2550
set(index.iter_parents(['r0', 'r1'])))
2551
# 2 nodes returned, missing skipped
2552
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2553
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
1917
2556
class TestNoParentsGraphIndexKnit(KnitTests):
1918
"""Tests for knits using _KnitGraphIndex with no parents."""
2557
"""Tests for knits using KnitGraphIndex with no parents."""
1920
2559
def make_g_index(self, name, ref_lists=0, nodes=[]):
1921
2560
builder = GraphIndexBuilder(ref_lists)
2048
2742
def test_add_versions_delta_not_delta_index(self):
2049
2743
index = self.two_graph_index(catch_adds=True)
2050
self.assertRaises(errors.KnitCorrupt, index.add_records,
2051
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2744
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2745
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2052
2746
self.assertEqual([], self.caught_entries)
2054
2748
def test_add_versions_parents_not_parents_index(self):
2055
2749
index = self.two_graph_index(catch_adds=True)
2056
self.assertRaises(errors.KnitCorrupt, index.add_records,
2057
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2750
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2751
[('new', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2058
2752
self.assertEqual([], self.caught_entries)
2060
2754
def test_add_versions_random_id_accepted(self):
2061
2755
index = self.two_graph_index(catch_adds=True)
2062
index.add_records([], random_id=True)
2756
index.add_versions([], random_id=True)
2064
2758
def test_add_versions_same_dup(self):
2065
2759
index = self.two_graph_index(catch_adds=True)
2066
2760
# options can be spelt two different ways
2067
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2068
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2069
# position/length are ignored (because each pack could have fulltext or
2070
# delta, and be at a different position.
2071
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2072
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2761
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2762
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), [])])
2073
2763
# but neither should have added data.
2074
self.assertEqual([[], [], [], []], self.caught_entries)
2764
self.assertEqual([[], []], self.caught_entries)
2076
2766
def test_add_versions_different_dup(self):
2077
2767
index = self.two_graph_index(catch_adds=True)
2078
2768
# change options
2079
self.assertRaises(errors.KnitCorrupt, index.add_records,
2080
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2081
self.assertRaises(errors.KnitCorrupt, index.add_records,
2082
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2083
self.assertRaises(errors.KnitCorrupt, index.add_records,
2084
[(('tip',), 'fulltext', (None, 0, 100), [])])
2769
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2770
[('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2771
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2772
[('tip', 'line-delta,no-eol', (None, 0, 100), [])])
2773
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2774
[('tip', 'fulltext', (None, 0, 100), [])])
2776
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2777
[('tip', 'fulltext,no-eol', (None, 50, 100), [])])
2778
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2779
[('tip', 'fulltext,no-eol', (None, 0, 1000), [])])
2086
self.assertRaises(errors.KnitCorrupt, index.add_records,
2087
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2781
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2782
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2088
2783
# change options in the second record
2089
self.assertRaises(errors.KnitCorrupt, index.add_records,
2090
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
2091
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2784
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2785
[('tip', 'fulltext,no-eol', (None, 0, 100), []),
2786
('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2092
2787
self.assertEqual([], self.caught_entries)
2095
class TestKnitVersionedFiles(KnitTests):
2097
def assertGroupKeysForIo(self, exp_groups, keys, non_local_keys,
2098
positions, _min_buffer_size=None):
2099
kvf = self.make_test_knit()
2100
if _min_buffer_size is None:
2101
_min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2102
self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2103
non_local_keys, positions,
2104
_min_buffer_size=_min_buffer_size))
2106
def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2108
split, prefix_order = KnitVersionedFiles._split_by_prefix(keys)
2109
self.assertEqual(expected_map, split)
2110
self.assertEqual(expected_prefix_order, prefix_order)
2112
def test__group_keys_for_io(self):
2113
ft_detail = ('fulltext', False)
2114
ld_detail = ('line-delta', False)
2122
f_a: (ft_detail, (f_a, 0, 100), None),
2123
f_b: (ld_detail, (f_b, 100, 21), f_a),
2124
f_c: (ld_detail, (f_c, 180, 15), f_b),
2125
g_a: (ft_detail, (g_a, 121, 35), None),
2126
g_b: (ld_detail, (g_b, 156, 12), g_a),
2127
g_c: (ld_detail, (g_c, 195, 13), g_a),
2129
self.assertGroupKeysForIo([([f_a], set())],
2130
[f_a], [], positions)
2131
self.assertGroupKeysForIo([([f_a], set([f_a]))],
2132
[f_a], [f_a], positions)
2133
self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2134
[f_a, f_b], [], positions)
2135
self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2136
[f_a, f_b], [f_b], positions)
2137
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2138
[f_a, g_a, f_b, g_b], [], positions)
2139
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2140
[f_a, g_a, f_b, g_b], [], positions,
2141
_min_buffer_size=150)
2142
self.assertGroupKeysForIo([([f_a, f_b], set()), ([g_a, g_b], set())],
2143
[f_a, g_a, f_b, g_b], [], positions,
2144
_min_buffer_size=100)
2145
self.assertGroupKeysForIo([([f_c], set()), ([g_b], set())],
2146
[f_c, g_b], [], positions,
2147
_min_buffer_size=125)
2148
self.assertGroupKeysForIo([([g_b, f_c], set())],
2149
[g_b, f_c], [], positions,
2150
_min_buffer_size=125)
2152
def test__split_by_prefix(self):
2153
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2154
'g': [('g', 'b'), ('g', 'a')],
2156
[('f', 'a'), ('g', 'b'),
2157
('g', 'a'), ('f', 'b')])
2159
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2160
'g': [('g', 'b'), ('g', 'a')],
2162
[('f', 'a'), ('f', 'b'),
2163
('g', 'b'), ('g', 'a')])
2165
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2166
'g': [('g', 'b'), ('g', 'a')],
2168
[('f', 'a'), ('f', 'b'),
2169
('g', 'b'), ('g', 'a')])
2171
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2172
'g': [('g', 'b'), ('g', 'a')],
2173
'': [('a',), ('b',)]
2175
[('f', 'a'), ('g', 'b'),
2177
('g', 'a'), ('f', 'b')])
2180
class TestStacking(KnitTests):
2182
def get_basis_and_test_knit(self):
2183
basis = self.make_test_knit(name='basis')
2184
basis = RecordingVersionedFilesDecorator(basis)
2185
test = self.make_test_knit(name='test')
2186
test.add_fallback_versioned_files(basis)
2189
def test_add_fallback_versioned_files(self):
2190
basis = self.make_test_knit(name='basis')
2191
test = self.make_test_knit(name='test')
2192
# It must not error; other tests test that the fallback is referred to
2193
# when accessing data.
2194
test.add_fallback_versioned_files(basis)
2196
def test_add_lines(self):
2197
# lines added to the test are not added to the basis
2198
basis, test = self.get_basis_and_test_knit()
2200
key_basis = ('bar',)
2201
key_cross_border = ('quux',)
2202
key_delta = ('zaphod',)
2203
test.add_lines(key, (), ['foo\n'])
2204
self.assertEqual({}, basis.get_parent_map([key]))
2205
# lines added to the test that reference across the stack do a
2207
basis.add_lines(key_basis, (), ['foo\n'])
2209
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2210
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2211
# we don't even need to look at the basis to see that this should be
2212
# stored as a fulltext
2213
self.assertEqual([], basis.calls)
2214
# Subsequent adds do delta.
2216
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2217
self.assertEqual('line-delta', test._index.get_method(key_delta))
2218
self.assertEqual([], basis.calls)
2220
def test_annotate(self):
2221
# annotations from the test knit are answered without asking the basis
2222
basis, test = self.get_basis_and_test_knit()
2224
key_basis = ('bar',)
2225
key_missing = ('missing',)
2226
test.add_lines(key, (), ['foo\n'])
2227
details = test.annotate(key)
2228
self.assertEqual([(key, 'foo\n')], details)
2229
self.assertEqual([], basis.calls)
2230
# But texts that are not in the test knit are looked for in the basis
2232
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2234
details = test.annotate(key_basis)
2235
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2236
# Not optimised to date:
2237
# self.assertEqual([("annotate", key_basis)], basis.calls)
2238
self.assertEqual([('get_parent_map', set([key_basis])),
2239
('get_parent_map', set([key_basis])),
2240
('get_record_stream', [key_basis], 'topological', True)],
2243
def test_check(self):
2244
# At the moment checking a stacked knit does implicitly check the
2246
basis, test = self.get_basis_and_test_knit()
2249
def test_get_parent_map(self):
2250
# parents in the test knit are answered without asking the basis
2251
basis, test = self.get_basis_and_test_knit()
2253
key_basis = ('bar',)
2254
key_missing = ('missing',)
2255
test.add_lines(key, (), [])
2256
parent_map = test.get_parent_map([key])
2257
self.assertEqual({key: ()}, parent_map)
2258
self.assertEqual([], basis.calls)
2259
# But parents that are not in the test knit are looked for in the basis
2260
basis.add_lines(key_basis, (), [])
2262
parent_map = test.get_parent_map([key, key_basis, key_missing])
2263
self.assertEqual({key: (),
2264
key_basis: ()}, parent_map)
2265
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
2268
def test_get_record_stream_unordered_fulltexts(self):
2269
# records from the test knit are answered without asking the basis:
2270
basis, test = self.get_basis_and_test_knit()
2272
key_basis = ('bar',)
2273
key_missing = ('missing',)
2274
test.add_lines(key, (), ['foo\n'])
2275
records = list(test.get_record_stream([key], 'unordered', True))
2276
self.assertEqual(1, len(records))
2277
self.assertEqual([], basis.calls)
2278
# Missing (from test knit) objects are retrieved from the basis:
2279
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2281
records = list(test.get_record_stream([key_basis, key_missing],
2283
self.assertEqual(2, len(records))
2284
calls = list(basis.calls)
2285
for record in records:
2286
self.assertSubset([record.key], (key_basis, key_missing))
2287
if record.key == key_missing:
2288
self.assertIsInstance(record, AbsentContentFactory)
2290
reference = list(basis.get_record_stream([key_basis],
2291
'unordered', True))[0]
2292
self.assertEqual(reference.key, record.key)
2293
self.assertEqual(reference.sha1, record.sha1)
2294
self.assertEqual(reference.storage_kind, record.storage_kind)
2295
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2296
record.get_bytes_as(record.storage_kind))
2297
self.assertEqual(reference.get_bytes_as('fulltext'),
2298
record.get_bytes_as('fulltext'))
2299
# It's not strictly minimal, but it seems reasonable for now for it to
2300
# ask which fallbacks have which parents.
2302
("get_parent_map", set([key_basis, key_missing])),
2303
("get_record_stream", [key_basis], 'unordered', True)],
2306
def test_get_record_stream_ordered_fulltexts(self):
2307
# ordering is preserved down into the fallback store.
2308
basis, test = self.get_basis_and_test_knit()
2310
key_basis = ('bar',)
2311
key_basis_2 = ('quux',)
2312
key_missing = ('missing',)
2313
test.add_lines(key, (key_basis,), ['foo\n'])
2314
# Missing (from test knit) objects are retrieved from the basis:
2315
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2316
basis.add_lines(key_basis_2, (), ['quux\n'])
2318
# ask for in non-topological order
2319
records = list(test.get_record_stream(
2320
[key, key_basis, key_missing, key_basis_2], 'topological', True))
2321
self.assertEqual(4, len(records))
2323
for record in records:
2324
self.assertSubset([record.key],
2325
(key_basis, key_missing, key_basis_2, key))
2326
if record.key == key_missing:
2327
self.assertIsInstance(record, AbsentContentFactory)
2329
results.append((record.key, record.sha1, record.storage_kind,
2330
record.get_bytes_as('fulltext')))
2331
calls = list(basis.calls)
2332
order = [record[0] for record in results]
2333
self.assertEqual([key_basis_2, key_basis, key], order)
2334
for result in results:
2335
if result[0] == key:
2339
record = source.get_record_stream([result[0]], 'unordered',
2341
self.assertEqual(record.key, result[0])
2342
self.assertEqual(record.sha1, result[1])
2343
# We used to check that the storage kind matched, but actually it
2344
# depends on whether it was sourced from the basis, or in a single
2345
# group, because asking for full texts returns proxy objects to a
2346
# _ContentMapGenerator object; so checking the kind is unneeded.
2347
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2348
# It's not strictly minimal, but it seems reasonable for now for it to
2349
# ask which fallbacks have which parents.
2351
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2352
# topological is requested from the fallback, because that is what
2353
# was requested at the top level.
2354
("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
2357
def test_get_record_stream_unordered_deltas(self):
2358
# records from the test knit are answered without asking the basis:
2359
basis, test = self.get_basis_and_test_knit()
2361
key_basis = ('bar',)
2362
key_missing = ('missing',)
2363
test.add_lines(key, (), ['foo\n'])
2364
records = list(test.get_record_stream([key], 'unordered', False))
2365
self.assertEqual(1, len(records))
2366
self.assertEqual([], basis.calls)
2367
# Missing (from test knit) objects are retrieved from the basis:
2368
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2370
records = list(test.get_record_stream([key_basis, key_missing],
2371
'unordered', False))
2372
self.assertEqual(2, len(records))
2373
calls = list(basis.calls)
2374
for record in records:
2375
self.assertSubset([record.key], (key_basis, key_missing))
2376
if record.key == key_missing:
2377
self.assertIsInstance(record, AbsentContentFactory)
2379
reference = list(basis.get_record_stream([key_basis],
2380
'unordered', False))[0]
2381
self.assertEqual(reference.key, record.key)
2382
self.assertEqual(reference.sha1, record.sha1)
2383
self.assertEqual(reference.storage_kind, record.storage_kind)
2384
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2385
record.get_bytes_as(record.storage_kind))
2386
# It's not strictly minimal, but it seems reasonable for now for it to
2387
# ask which fallbacks have which parents.
2389
("get_parent_map", set([key_basis, key_missing])),
2390
("get_record_stream", [key_basis], 'unordered', False)],
2393
def test_get_record_stream_ordered_deltas(self):
2394
# ordering is preserved down into the fallback store.
2395
basis, test = self.get_basis_and_test_knit()
2397
key_basis = ('bar',)
2398
key_basis_2 = ('quux',)
2399
key_missing = ('missing',)
2400
test.add_lines(key, (key_basis,), ['foo\n'])
2401
# Missing (from test knit) objects are retrieved from the basis:
2402
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2403
basis.add_lines(key_basis_2, (), ['quux\n'])
2405
# ask for in non-topological order
2406
records = list(test.get_record_stream(
2407
[key, key_basis, key_missing, key_basis_2], 'topological', False))
2408
self.assertEqual(4, len(records))
2410
for record in records:
2411
self.assertSubset([record.key],
2412
(key_basis, key_missing, key_basis_2, key))
2413
if record.key == key_missing:
2414
self.assertIsInstance(record, AbsentContentFactory)
2416
results.append((record.key, record.sha1, record.storage_kind,
2417
record.get_bytes_as(record.storage_kind)))
2418
calls = list(basis.calls)
2419
order = [record[0] for record in results]
2420
self.assertEqual([key_basis_2, key_basis, key], order)
2421
for result in results:
2422
if result[0] == key:
2426
record = source.get_record_stream([result[0]], 'unordered',
2428
self.assertEqual(record.key, result[0])
2429
self.assertEqual(record.sha1, result[1])
2430
self.assertEqual(record.storage_kind, result[2])
2431
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2432
# It's not strictly minimal, but it seems reasonable for now for it to
2433
# ask which fallbacks have which parents.
2435
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2436
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2439
def test_get_sha1s(self):
2440
# sha1's in the test knit are answered without asking the basis
2441
basis, test = self.get_basis_and_test_knit()
2443
key_basis = ('bar',)
2444
key_missing = ('missing',)
2445
test.add_lines(key, (), ['foo\n'])
2446
key_sha1sum = osutils.sha_string('foo\n')
2447
sha1s = test.get_sha1s([key])
2448
self.assertEqual({key: key_sha1sum}, sha1s)
2449
self.assertEqual([], basis.calls)
2450
# But texts that are not in the test knit are looked for in the basis
2451
# directly (rather than via text reconstruction) so that remote servers
2452
# etc don't have to answer with full content.
2453
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2454
basis_sha1sum = osutils.sha_string('foo\nbar\n')
2456
sha1s = test.get_sha1s([key, key_missing, key_basis])
2457
self.assertEqual({key: key_sha1sum,
2458
key_basis: basis_sha1sum}, sha1s)
2459
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
2462
def test_insert_record_stream(self):
2463
# records are inserted as normal; insert_record_stream builds on
2464
# add_lines, so a smoke test should be all that's needed:
2466
key_basis = ('bar',)
2467
key_delta = ('zaphod',)
2468
basis, test = self.get_basis_and_test_knit()
2469
source = self.make_test_knit(name='source')
2470
basis.add_lines(key_basis, (), ['foo\n'])
2472
source.add_lines(key_basis, (), ['foo\n'])
2473
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2474
stream = source.get_record_stream([key_delta], 'unordered', False)
2475
test.insert_record_stream(stream)
2476
# XXX: this does somewhat too many calls in making sure of whether it
2477
# has to recreate the full text.
2478
self.assertEqual([("get_parent_map", set([key_basis])),
2479
('get_parent_map', set([key_basis])),
2480
('get_record_stream', [key_basis], 'unordered', True)],
2482
self.assertEqual({key_delta:(key_basis,)},
2483
test.get_parent_map([key_delta]))
2484
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2485
'unordered', True).next().get_bytes_as('fulltext'))
2487
def test_iter_lines_added_or_present_in_keys(self):
2488
# Lines from the basis are returned, and lines for a given key are only
2492
# all sources are asked for keys:
2493
basis, test = self.get_basis_and_test_knit()
2494
basis.add_lines(key1, (), ["foo"])
2496
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2497
self.assertEqual([("foo\n", key1)], lines)
2498
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
2500
# keys in both are not duplicated:
2501
test.add_lines(key2, (), ["bar\n"])
2502
basis.add_lines(key2, (), ["bar\n"])
2504
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2505
self.assertEqual([("bar\n", key2)], lines)
2506
self.assertEqual([], basis.calls)
2508
def test_keys(self):
2511
# all sources are asked for keys:
2512
basis, test = self.get_basis_and_test_knit()
2514
self.assertEqual(set(), set(keys))
2515
self.assertEqual([("keys",)], basis.calls)
2516
# keys from a basis are returned:
2517
basis.add_lines(key1, (), [])
2520
self.assertEqual(set([key1]), set(keys))
2521
self.assertEqual([("keys",)], basis.calls)
2522
# keys in both are not duplicated:
2523
test.add_lines(key2, (), [])
2524
basis.add_lines(key2, (), [])
2527
self.assertEqual(2, len(keys))
2528
self.assertEqual(set([key1, key2]), set(keys))
2529
self.assertEqual([("keys",)], basis.calls)
2531
def test_add_mpdiffs(self):
2532
# records are inserted as normal; add_mpdiff builds on
2533
# add_lines, so a smoke test should be all that's needed:
2535
key_basis = ('bar',)
2536
key_delta = ('zaphod',)
2537
basis, test = self.get_basis_and_test_knit()
2538
source = self.make_test_knit(name='source')
2539
basis.add_lines(key_basis, (), ['foo\n'])
2541
source.add_lines(key_basis, (), ['foo\n'])
2542
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2543
diffs = source.make_mpdiffs([key_delta])
2544
test.add_mpdiffs([(key_delta, (key_basis,),
2545
source.get_sha1s([key_delta])[key_delta], diffs[0])])
2546
self.assertEqual([("get_parent_map", set([key_basis])),
2547
('get_record_stream', [key_basis], 'unordered', True),],
2549
self.assertEqual({key_delta:(key_basis,)},
2550
test.get_parent_map([key_delta]))
2551
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2552
'unordered', True).next().get_bytes_as('fulltext'))
2554
def test_make_mpdiffs(self):
2555
# Generating an mpdiff across a stacking boundary should detect parent
2559
key_right = ('zaphod',)
2560
basis, test = self.get_basis_and_test_knit()
2561
basis.add_lines(key_left, (), ['bar\n'])
2562
basis.add_lines(key_right, (), ['zaphod\n'])
2564
test.add_lines(key, (key_left, key_right),
2565
['bar\n', 'foo\n', 'zaphod\n'])
2566
diffs = test.make_mpdiffs([key])
2568
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2569
multiparent.NewText(['foo\n']),
2570
multiparent.ParentText(1, 0, 2, 1)])],
2572
self.assertEqual(3, len(basis.calls))
2574
("get_parent_map", set([key_left, key_right])),
2575
("get_parent_map", set([key_left, key_right])),
2578
last_call = basis.calls[-1]
2579
self.assertEqual('get_record_stream', last_call[0])
2580
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2581
self.assertEqual('topological', last_call[2])
2582
self.assertEqual(True, last_call[3])
2585
class TestNetworkBehaviour(KnitTests):
2586
"""Tests for getting data out of/into knits over the network."""
2588
def test_include_delta_closure_generates_a_knit_delta_closure(self):
2589
vf = self.make_test_knit(name='test')
2590
# put in three texts, giving ft, delta, delta
2591
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2592
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2593
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2594
# But heuristics could interfere, so check what happened:
2595
self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2596
[record.storage_kind for record in
2597
vf.get_record_stream([('base',), ('d1',), ('d2',)],
2598
'topological', False)])
2599
# generate a stream of just the deltas include_delta_closure=True,
2600
# serialise to the network, and check that we get a delta closure on the wire.
2601
stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2602
netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2603
# The first bytes should be a memo from _ContentMapGenerator, and the
2604
# second bytes should be empty (because its a API proxy not something
2605
# for wire serialisation.
2606
self.assertEqual('', netb[1])
2608
kind, line_end = network_bytes_to_kind_and_offset(bytes)
2609
self.assertEqual('knit-delta-closure', kind)
2612
class TestContentMapGenerator(KnitTests):
2613
"""Tests for ContentMapGenerator"""
2615
def test_get_record_stream_gives_records(self):
2616
vf = self.make_test_knit(name='test')
2617
# put in three texts, giving ft, delta, delta
2618
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2619
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2620
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2621
keys = [('d1',), ('d2',)]
2622
generator = _VFContentMapGenerator(vf, keys,
2623
global_map=vf.get_parent_map(keys))
2624
for record in generator.get_record_stream():
2625
if record.key == ('d1',):
2626
self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2628
self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2630
def test_get_record_stream_kinds_are_raw(self):
2631
vf = self.make_test_knit(name='test')
2632
# put in three texts, giving ft, delta, delta
2633
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2634
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2635
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2636
keys = [('base',), ('d1',), ('d2',)]
2637
generator = _VFContentMapGenerator(vf, keys,
2638
global_map=vf.get_parent_map(keys))
2639
kinds = {('base',): 'knit-delta-closure',
2640
('d1',): 'knit-delta-closure-ref',
2641
('d2',): 'knit-delta-closure-ref',
2643
for record in generator.get_record_stream():
2644
self.assertEqual(kinds[record.key], record.storage_kind)
2789
def test_iter_parents(self):
2790
index = self.two_graph_index()
2791
self.assertEqual(set([
2792
('tip', ()), ('tail', ()), ('parent', ()), ('separate', ())
2794
set(index.iter_parents(['tip', 'tail', 'ghost', 'parent', 'separate'])))
2795
self.assertEqual(set([('tip', ())]),
2796
set(index.iter_parents(['tip'])))
2797
self.assertEqual(set(),
2798
set(index.iter_parents([])))
2801
class TestPackKnits(KnitTests):
2802
"""Tests that use a _PackAccess and KnitGraphIndex."""
2804
def test_get_data_stream_packs_ignores_pack_overhead(self):
2805
# Packs have an encoding overhead that should not be included in the
2806
# 'size' field of a data stream, because it is not returned by the
2807
# raw_reading functions - it is why index_memo's are opaque, and
2808
# get_data_stream was abusing this.
2809
packname = 'test.pack'
2810
transport = self.get_transport()
2811
def write_data(bytes):
2812
transport.append_bytes(packname, bytes)
2813
writer = pack.ContainerWriter(write_data)
2815
index = InMemoryGraphIndex(2)
2816
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
2818
indices = {index:(transport, packname)}
2819
access = _PackAccess(indices, writer=(writer, index))
2820
k = KnitVersionedFile('test', get_transport('.'),
2821
delta=True, create=True, index=knit_index, access_method=access)
2822
# insert something into the knit
2823
k.add_lines('text-1', [], ["foo\n"])
2824
# get a data stream for it
2825
stream = k.get_data_stream(['text-1'])
2826
# if the stream has been incorrectly assembled, we will get a short read
2827
# reading from the stream (as streams have no trailer)
2828
expected_length = stream[1][0][2]
2829
# we use -1 to do the read, so that if a trailer is added this test
2830
# will fail and we'll adjust it to handle that case correctly, rather
2831
# than allowing an over-read that is bogus.
2832
self.assertEqual(expected_length, len(stream[2](-1)))
2835
class Test_StreamIndex(KnitTests):
2837
def get_index(self, knit, stream):
2838
"""Get a _StreamIndex from knit and stream."""
2839
return knit._knit_from_datastream(stream)._index
2841
def assertIndexVersions(self, knit, versions):
2842
"""Check that the _StreamIndex versions are those of the stream."""
2843
index = self.get_index(knit, knit.get_data_stream(versions))
2844
self.assertEqual(set(index.get_versions()), set(versions))
2845
# check we didn't get duplicates
2846
self.assertEqual(len(index.get_versions()), len(versions))
2848
def assertIndexAncestry(self, knit, ancestry_versions, versions, result):
2849
"""Check the result of a get_ancestry call on knit."""
2850
index = self.get_index(knit, knit.get_data_stream(versions))
2853
set(index.get_ancestry(ancestry_versions, False)))
2855
def assertIterParents(self, knit, versions, parent_versions, result):
2856
"""Check the result of an iter_parents call on knit."""
2857
index = self.get_index(knit, knit.get_data_stream(versions))
2858
self.assertEqual(result, index.iter_parents(parent_versions))
2860
def assertGetMethod(self, knit, versions, version, result):
2861
index = self.get_index(knit, knit.get_data_stream(versions))
2862
self.assertEqual(result, index.get_method(version))
2864
def assertGetOptions(self, knit, version, options):
2865
index = self.get_index(knit, knit.get_data_stream(version))
2866
self.assertEqual(options, index.get_options(version))
2868
def assertGetPosition(self, knit, versions, version, result):
2869
index = self.get_index(knit, knit.get_data_stream(versions))
2870
if result[1] is None:
2871
result = (result[0], index, result[2], result[3])
2872
self.assertEqual(result, index.get_position(version))
2874
def assertGetParentsWithGhosts(self, knit, versions, version, parents):
2875
index = self.get_index(knit, knit.get_data_stream(versions))
2876
self.assertEqual(parents, index.get_parents_with_ghosts(version))
2878
def make_knit_with_4_versions_2_dags(self):
2879
knit = self.make_test_knit()
2880
knit.add_lines('a', [], ["foo"])
2881
knit.add_lines('b', [], [])
2882
knit.add_lines('c', ['b', 'a'], [])
2883
knit.add_lines_with_ghosts('d', ['e', 'f'], [])
2886
def test_versions(self):
2887
"""The versions of a StreamIndex are those of the datastream."""
2888
knit = self.make_knit_with_4_versions_2_dags()
2889
# ask for most permutations, which catches bugs like falling back to the
2890
# target knit, or showing ghosts, etc.
2891
self.assertIndexVersions(knit, [])
2892
self.assertIndexVersions(knit, ['a'])
2893
self.assertIndexVersions(knit, ['b'])
2894
self.assertIndexVersions(knit, ['c'])
2895
self.assertIndexVersions(knit, ['d'])
2896
self.assertIndexVersions(knit, ['a', 'b'])
2897
self.assertIndexVersions(knit, ['b', 'c'])
2898
self.assertIndexVersions(knit, ['a', 'c'])
2899
self.assertIndexVersions(knit, ['a', 'b', 'c'])
2900
self.assertIndexVersions(knit, ['a', 'b', 'c', 'd'])
2902
def test_construct(self):
2903
"""Constructing a StreamIndex generates index data."""
2904
data_list = [('text-a', ['fulltext'], 127, []),
2905
('text-b', ['option'], 128, ['text-c'])]
2906
index = _StreamIndex(data_list)
2907
self.assertEqual({'text-a':(['fulltext'], (0, 127), []),
2908
'text-b':(['option'], (127, 127 + 128), ['text-c'])},
2911
def test_get_ancestry(self):
2912
knit = self.make_knit_with_4_versions_2_dags()
2913
self.assertIndexAncestry(knit, ['a'], ['a'], ['a'])
2914
self.assertIndexAncestry(knit, ['b'], ['b'], ['b'])
2915
self.assertIndexAncestry(knit, ['c'], ['c'], ['c'])
2916
self.assertIndexAncestry(knit, ['c'], ['a', 'b', 'c'],
2917
set(['a', 'b', 'c']))
2918
self.assertIndexAncestry(knit, ['c', 'd'], ['a', 'b', 'c', 'd'],
2919
set(['a', 'b', 'c', 'd']))
2921
def test_get_method(self):
2922
knit = self.make_knit_with_4_versions_2_dags()
2923
self.assertGetMethod(knit, ['a'], 'a', 'fulltext')
2924
self.assertGetMethod(knit, ['c'], 'c', 'line-delta')
2925
# get_method on a basis that is not in the datastream (but in the
2926
# backing knit) returns 'fulltext', because thats what we'll create as
2928
self.assertGetMethod(knit, ['c'], 'b', 'fulltext')
2930
def test_iter_parents(self):
2931
knit = self.make_knit_with_4_versions_2_dags()
2932
self.assertIterParents(knit, ['a'], ['a'], [('a', [])])
2933
self.assertIterParents(knit, ['a', 'b'], ['a', 'b'],
2934
[('a', []), ('b', [])])
2935
self.assertIterParents(knit, ['a', 'b', 'c'], ['a', 'b', 'c'],
2936
[('a', []), ('b', []), ('c', ['b', 'a'])])
2937
self.assertIterParents(knit, ['a', 'b', 'c', 'd'],
2938
['a', 'b', 'c', 'd'],
2939
[('a', []), ('b', []), ('c', ['b', 'a']), ('d', ['e', 'f'])])
2940
self.assertIterParents(knit, ['c'], ['a', 'b', 'c'],
2941
[('c', ['b', 'a'])])
2943
def test_get_options(self):
2944
knit = self.make_knit_with_4_versions_2_dags()
2945
self.assertGetOptions(knit, 'a', ['no-eol', 'fulltext'])
2946
self.assertGetOptions(knit, 'c', ['line-delta'])
2948
def test_get_parents_with_ghosts(self):
2949
knit = self.make_knit_with_4_versions_2_dags()
2950
self.assertGetParentsWithGhosts(knit, ['a'], 'a', [])
2951
self.assertGetParentsWithGhosts(knit, ['c'], 'c', ['b', 'a'])
2952
self.assertGetParentsWithGhosts(knit, ['d'], 'd', ['e', 'f'])
2954
def test_get_position(self):
2955
knit = self.make_knit_with_4_versions_2_dags()
2956
# get_position returns (thunk_flag, index(can be None), start, end) for
2957
# _StreamAccess to use.
2958
self.assertGetPosition(knit, ['a'], 'a', (False, None, 0, 78))
2959
self.assertGetPosition(knit, ['a', 'c'], 'c', (False, None, 78, 156))
2960
# get_position on a text that is not in the datastream (but in the
2961
# backing knit) returns (True, 'versionid', None, None) - and then the
2962
# access object can construct the relevant data as needed.
2963
self.assertGetPosition(knit, ['a', 'c'], 'b', (True, 'b', None, None))
2966
class Test_StreamAccess(KnitTests):
2968
def get_index_access(self, knit, stream):
2969
"""Get a _StreamAccess from knit and stream."""
2970
knit = knit._knit_from_datastream(stream)
2971
return knit._index, knit._data._access
2973
def assertGetRawRecords(self, knit, versions):
2974
index, access = self.get_index_access(knit,
2975
knit.get_data_stream(versions))
2976
# check that every version asked for can be obtained from the resulting
2980
for version in versions:
2981
memos.append(knit._index.get_position(version))
2983
for version, data in zip(
2984
versions, knit._data._access.get_raw_records(memos)):
2985
original[version] = data
2987
for version in versions:
2988
memos.append(index.get_position(version))
2990
for version, data in zip(versions, access.get_raw_records(memos)):
2991
streamed[version] = data
2992
self.assertEqual(original, streamed)
2994
for version in versions:
2995
data = list(access.get_raw_records(
2996
[index.get_position(version)]))[0]
2997
self.assertEqual(original[version], data)
2999
def make_knit_with_two_versions(self):
3000
knit = self.make_test_knit()
3001
knit.add_lines('a', [], ["foo"])
3002
knit.add_lines('b', [], ["bar"])
3005
def test_get_raw_records(self):
3006
knit = self.make_knit_with_two_versions()
3007
self.assertGetRawRecords(knit, ['a', 'b'])
3008
self.assertGetRawRecords(knit, ['a'])
3009
self.assertGetRawRecords(knit, ['b'])
3011
def test_get_raw_record_from_backing_knit(self):
3012
# the thunk layer should create an artificial A on-demand when needed.
3013
source_knit = self.make_test_knit(name='plain', annotate=False)
3014
target_knit = self.make_test_knit(name='annotated', annotate=True)
3015
source_knit.add_lines("A", [], ["Foo\n"])
3016
# Give the target A, so we can try to thunk across to it.
3017
target_knit.join(source_knit)
3018
index, access = self.get_index_access(target_knit,
3019
source_knit.get_data_stream([]))
3020
raw_data = list(access.get_raw_records([(True, "A", None, None)]))[0]
3021
df = GzipFile(mode='rb', fileobj=StringIO(raw_data))
3023
'version A 1 5d36b88bb697a2d778f024048bafabd443d74503\n'
3027
def test_asking_for_thunk_stream_is_not_plain_errors(self):
3028
knit = self.make_test_knit(name='annotated', annotate=True)
3029
knit.add_lines("A", [], ["Foo\n"])
3030
index, access = self.get_index_access(knit,
3031
knit.get_data_stream([]))
3032
self.assertRaises(errors.KnitCorrupt,
3033
list, access.get_raw_records([(True, "A", None, None)]))