122
119
'different\n', sha1_2),
123
120
compressor.extract(('newlabel',)))
125
def test_pop_last(self):
126
compressor = self.compressor()
127
_, _, _, _ = compressor.compress(('key1',),
128
'some text\nfor the first entry\n', None)
129
expected_lines = list(compressor.chunks)
130
_, _, _, _ = compressor.compress(('key2',),
131
'some text\nfor the second entry\n', None)
132
compressor.pop_last()
133
self.assertEqual(expected_lines, compressor.chunks)
136
123
class TestPyrexGroupCompressor(TestGroupCompressor):
459
437
# And the decompressor is finalized
460
438
self.assertIs(None, block._z_content_decompressor)
462
def test__dump(self):
463
dup_content = 'some duplicate content\nwhich is sufficiently long\n'
464
key_to_text = {('1',): dup_content + '1 unique\n',
465
('2',): dup_content + '2 extra special\n'}
466
locs, block = self.make_block(key_to_text)
467
self.assertEqual([('f', len(key_to_text[('1',)])),
468
('d', 21, len(key_to_text[('2',)]),
469
[('c', 2, len(dup_content)),
470
('i', len('2 extra special\n'), '')
475
441
class TestCaseWithGroupCompressVersionedFiles(tests.TestCaseWithTransport):
477
443
def make_test_vf(self, create_graph, keylength=1, do_cleanup=True,
478
dir='.', inconsistency_fatal=True):
479
445
t = self.get_transport(dir)
481
447
vf = groupcompress.make_pack_factory(graph=create_graph,
482
delta=False, keylength=keylength,
483
inconsistency_fatal=inconsistency_fatal)(t)
448
delta=False, keylength=keylength)(t)
485
450
self.addCleanup(groupcompress.cleanup_pack_group, vf)
489
454
class TestGroupCompressVersionedFiles(TestCaseWithGroupCompressVersionedFiles):
491
def make_g_index(self, name, ref_lists=0, nodes=[]):
492
builder = btree_index.BTreeBuilder(ref_lists)
493
for node, references, value in nodes:
494
builder.add_node(node, references, value)
495
stream = builder.finish()
496
trans = self.get_transport()
497
size = trans.put_file(name, stream)
498
return btree_index.BTreeGraphIndex(trans, name, size)
500
def make_g_index_missing_parent(self):
501
graph_index = self.make_g_index('missing_parent', 1,
502
[(('parent', ), '2 78 2 10', ([],)),
503
(('tip', ), '2 78 2 10',
504
([('parent', ), ('missing-parent', )],)),
508
456
def test_get_record_stream_as_requested(self):
509
457
# Consider promoting 'as-requested' to general availability, and
510
458
# make this a VF interface test
637
585
self.assertIs(block, record._manager._block)
639
def test_add_missing_noncompression_parent_unvalidated_index(self):
640
unvalidated = self.make_g_index_missing_parent()
641
combined = _mod_index.CombinedGraphIndex([unvalidated])
642
index = groupcompress._GCGraphIndex(combined,
643
is_locked=lambda: True, parents=True,
644
track_external_parent_refs=True)
645
index.scan_unvalidated_index(unvalidated)
647
frozenset([('missing-parent',)]), index.get_missing_parents())
649
def test_track_external_parent_refs(self):
650
g_index = self.make_g_index('empty', 1, [])
651
mod_index = btree_index.BTreeBuilder(1, 1)
652
combined = _mod_index.CombinedGraphIndex([g_index, mod_index])
653
index = groupcompress._GCGraphIndex(combined,
654
is_locked=lambda: True, parents=True,
655
add_callback=mod_index.add_nodes,
656
track_external_parent_refs=True)
658
(('new-key',), '2 10 2 10', [(('parent-1',), ('parent-2',))])])
660
frozenset([('parent-1',), ('parent-2',)]),
661
index.get_missing_parents())
663
def make_source_with_b(self, a_parent, path):
664
source = self.make_test_vf(True, dir=path)
665
source.add_lines(('a',), (), ['lines\n'])
667
b_parents = (('a',),)
670
source.add_lines(('b',), b_parents, ['lines\n'])
673
def do_inconsistent_inserts(self, inconsistency_fatal):
674
target = self.make_test_vf(True, dir='target',
675
inconsistency_fatal=inconsistency_fatal)
677
source = self.make_source_with_b(x==1, 'source%s' % x)
678
target.insert_record_stream(source.get_record_stream(
679
[('b',)], 'unordered', False))
681
def test_inconsistent_redundant_inserts_warn(self):
682
"""Should not insert a record that is already present."""
684
def warning(template, args):
685
warnings.append(template % args)
686
_trace_warning = trace.warning
687
trace.warning = warning
689
self.do_inconsistent_inserts(inconsistency_fatal=False)
691
trace.warning = _trace_warning
692
self.assertEqual(["inconsistent details in skipped record: ('b',)"
693
" ('42 32 0 8', ((),)) ('74 32 0 8', ((('a',),),))"],
696
def test_inconsistent_redundant_inserts_raises(self):
697
e = self.assertRaises(errors.KnitCorrupt, self.do_inconsistent_inserts,
698
inconsistency_fatal=True)
699
self.assertContainsRe(str(e), "Knit.* corrupt: inconsistent details"
701
" \('b',\) \('42 32 0 8', \(\(\),\)\) \('74 32"
702
" 0 8', \(\(\('a',\),\),\)\)")
705
588
class TestLazyGroupCompress(tests.TestCaseWithTransport):