184
def create_basic_dirstate(self):
185
"""Create a dirstate with a few files and directories.
195
tree = self.make_branch_and_tree('tree')
196
paths = ['a', 'b/', 'b/c', 'b/d/', 'b/d/e', 'b-c', 'f']
197
file_ids = ['a-id', 'b-id', 'c-id', 'd-id', 'e-id', 'b-c-id', 'f-id']
198
self.build_tree(['tree/' + p for p in paths])
199
tree.set_root_id('TREE_ROOT')
200
tree.add([p.rstrip('/') for p in paths], file_ids)
201
tree.commit('initial', rev_id='rev-1')
202
revision_id = 'rev-1'
203
# a_packed_stat = dirstate.pack_stat(os.stat('tree/a'))
204
t = self.get_transport('tree')
205
a_text = t.get_bytes('a')
206
a_sha = osutils.sha_string(a_text)
208
# b_packed_stat = dirstate.pack_stat(os.stat('tree/b'))
209
# c_packed_stat = dirstate.pack_stat(os.stat('tree/b/c'))
210
c_text = t.get_bytes('b/c')
211
c_sha = osutils.sha_string(c_text)
213
# d_packed_stat = dirstate.pack_stat(os.stat('tree/b/d'))
214
# e_packed_stat = dirstate.pack_stat(os.stat('tree/b/d/e'))
215
e_text = t.get_bytes('b/d/e')
216
e_sha = osutils.sha_string(e_text)
218
b_c_text = t.get_bytes('b-c')
219
b_c_sha = osutils.sha_string(b_c_text)
220
b_c_len = len(b_c_text)
221
# f_packed_stat = dirstate.pack_stat(os.stat('tree/f'))
222
f_text = t.get_bytes('f')
223
f_sha = osutils.sha_string(f_text)
225
null_stat = dirstate.DirState.NULLSTAT
227
'':(('', '', 'TREE_ROOT'), [
228
('d', '', 0, False, null_stat),
229
('d', '', 0, False, revision_id),
231
'a':(('', 'a', 'a-id'), [
232
('f', '', 0, False, null_stat),
233
('f', a_sha, a_len, False, revision_id),
235
'b':(('', 'b', 'b-id'), [
236
('d', '', 0, False, null_stat),
237
('d', '', 0, False, revision_id),
239
'b/c':(('b', 'c', 'c-id'), [
240
('f', '', 0, False, null_stat),
241
('f', c_sha, c_len, False, revision_id),
243
'b/d':(('b', 'd', 'd-id'), [
244
('d', '', 0, False, null_stat),
245
('d', '', 0, False, revision_id),
247
'b/d/e':(('b/d', 'e', 'e-id'), [
248
('f', '', 0, False, null_stat),
249
('f', e_sha, e_len, False, revision_id),
251
'b-c':(('', 'b-c', 'b-c-id'), [
252
('f', '', 0, False, null_stat),
253
('f', b_c_sha, b_c_len, False, revision_id),
255
'f':(('', 'f', 'f-id'), [
256
('f', '', 0, False, null_stat),
257
('f', f_sha, f_len, False, revision_id),
260
state = dirstate.DirState.from_tree(tree, 'dirstate')
265
# Use a different object, to make sure nothing is pre-cached in memory.
266
state = dirstate.DirState.on_file('dirstate')
268
self.addCleanup(state.unlock)
269
self.assertEqual(dirstate.DirState.NOT_IN_MEMORY,
270
state._dirblock_state)
271
# This is code is only really tested if we actually have to make more
272
# than one read, so set the page size to something smaller.
273
# We want it to contain about 2.2 records, so that we have a couple
274
# records that we can read per attempt
275
state._bisect_page_size = 200
276
return tree, state, expected
278
def create_duplicated_dirstate(self):
279
"""Create a dirstate with a deleted and added entries.
281
This grabs a basic_dirstate, and then removes and re adds every entry
284
tree, state, expected = self.create_basic_dirstate()
285
# Now we will just remove and add every file so we get an extra entry
286
# per entry. Unversion in reverse order so we handle subdirs
287
tree.unversion(['f-id', 'b-c-id', 'e-id', 'd-id', 'c-id', 'b-id', 'a-id'])
288
tree.add(['a', 'b', 'b/c', 'b/d', 'b/d/e', 'b-c', 'f'],
289
['a-id2', 'b-id2', 'c-id2', 'd-id2', 'e-id2', 'b-c-id2', 'f-id2'])
291
# Update the expected dictionary.
292
for path in ['a', 'b', 'b/c', 'b/d', 'b/d/e', 'b-c', 'f']:
293
orig = expected[path]
295
# This record was deleted in the current tree
296
expected[path] = (orig[0], [dirstate.DirState.NULL_PARENT_DETAILS,
298
new_key = (orig[0][0], orig[0][1], orig[0][2]+'2')
299
# And didn't exist in the basis tree
300
expected[path2] = (new_key, [orig[1][0],
301
dirstate.DirState.NULL_PARENT_DETAILS])
303
# We will replace the 'dirstate' file underneath 'state', but that is
304
# okay as lock as we unlock 'state' first.
307
new_state = dirstate.DirState.from_tree(tree, 'dirstate')
313
# But we need to leave state in a read-lock because we already have
314
# a cleanup scheduled
316
return tree, state, expected
318
def create_renamed_dirstate(self):
319
"""Create a dirstate with a few internal renames.
321
This takes the basic dirstate, and moves the paths around.
323
tree, state, expected = self.create_basic_dirstate()
325
tree.rename_one('a', 'b/g')
327
tree.rename_one('b/d', 'h')
329
old_a = expected['a']
330
expected['a'] = (old_a[0], [('r', 'b/g', 0, False, ''), old_a[1][1]])
331
expected['b/g'] = (('b', 'g', 'a-id'), [old_a[1][0],
332
('r', 'a', 0, False, '')])
333
old_d = expected['b/d']
334
expected['b/d'] = (old_d[0], [('r', 'h', 0, False, ''), old_d[1][1]])
335
expected['h'] = (('', 'h', 'd-id'), [old_d[1][0],
336
('r', 'b/d', 0, False, '')])
338
old_e = expected['b/d/e']
339
expected['b/d/e'] = (old_e[0], [('r', 'h/e', 0, False, ''),
341
expected['h/e'] = (('h', 'e', 'e-id'), [old_e[1][0],
342
('r', 'b/d/e', 0, False, '')])
346
new_state = dirstate.DirState.from_tree(tree, 'dirstate')
353
return tree, state, expected
356
183
class TestTreeToDirState(TestCaseWithDirState):
739
514
# This will unlock it
740
515
self.check_state_with_reopen(expected_result, state)
742
def test_set_state_from_inventory_preserves_hashcache(self):
743
# https://bugs.launchpad.net/bzr/+bug/146176
744
# set_state_from_inventory should preserve the stat and hash value for
745
# workingtree files that are not changed by the inventory.
747
tree = self.make_branch_and_tree('.')
748
# depends on the default format using dirstate...
751
# make a dirstate with some valid hashcache data
752
# file on disk, but that's not needed for this test
753
foo_contents = 'contents of foo'
754
self.build_tree_contents([('foo', foo_contents)])
755
tree.add('foo', 'foo-id')
757
foo_stat = os.stat('foo')
758
foo_packed = dirstate.pack_stat(foo_stat)
759
foo_sha = osutils.sha_string(foo_contents)
760
foo_size = len(foo_contents)
762
# should not be cached yet, because the file's too fresh
764
(('', 'foo', 'foo-id',),
765
[('f', '', 0, False, dirstate.DirState.NULLSTAT)]),
766
tree._dirstate._get_entry(0, 'foo-id'))
767
# poke in some hashcache information - it wouldn't normally be
768
# stored because it's too fresh
769
tree._dirstate.update_minimal(
770
('', 'foo', 'foo-id'),
771
'f', False, foo_sha, foo_packed, foo_size, 'foo')
772
# now should be cached
774
(('', 'foo', 'foo-id',),
775
[('f', foo_sha, foo_size, False, foo_packed)]),
776
tree._dirstate._get_entry(0, 'foo-id'))
778
# extract the inventory, and add something to it
779
inv = tree._get_inventory()
780
# should see the file we poked in...
781
self.assertTrue(inv.has_id('foo-id'))
782
self.assertTrue(inv.has_filename('foo'))
783
inv.add_path('bar', 'file', 'bar-id')
784
tree._dirstate._validate()
785
# this used to cause it to lose its hashcache
786
tree._dirstate.set_state_from_inventory(inv)
787
tree._dirstate._validate()
793
# now check that the state still has the original hashcache value
794
state = tree._dirstate
796
foo_tuple = state._get_entry(0, path_utf8='foo')
798
(('', 'foo', 'foo-id',),
799
[('f', foo_sha, len(foo_contents), False,
800
dirstate.pack_stat(foo_stat))]),
806
def test_set_state_from_inventory_mixed_paths(self):
807
tree1 = self.make_branch_and_tree('tree1')
808
self.build_tree(['tree1/a/', 'tree1/a/b/', 'tree1/a-b/',
809
'tree1/a/b/foo', 'tree1/a-b/bar'])
812
tree1.add(['a', 'a/b', 'a-b', 'a/b/foo', 'a-b/bar'],
813
['a-id', 'b-id', 'a-b-id', 'foo-id', 'bar-id'])
814
tree1.commit('rev1', rev_id='rev1')
815
root_id = tree1.get_root_id()
816
inv = tree1.inventory
819
expected_result1 = [('', '', root_id, 'd'),
820
('', 'a', 'a-id', 'd'),
821
('', 'a-b', 'a-b-id', 'd'),
822
('a', 'b', 'b-id', 'd'),
823
('a/b', 'foo', 'foo-id', 'f'),
824
('a-b', 'bar', 'bar-id', 'f'),
826
expected_result2 = [('', '', root_id, 'd'),
827
('', 'a', 'a-id', 'd'),
828
('', 'a-b', 'a-b-id', 'd'),
829
('a-b', 'bar', 'bar-id', 'f'),
831
state = dirstate.DirState.initialize('dirstate')
833
state.set_state_from_inventory(inv)
835
for entry in state._iter_entries():
836
values.append(entry[0] + entry[1][0][:1])
837
self.assertEqual(expected_result1, values)
839
state.set_state_from_inventory(inv)
841
for entry in state._iter_entries():
842
values.append(entry[0] + entry[1][0][:1])
843
self.assertEqual(expected_result2, values)
847
517
def test_set_path_id_no_parents(self):
848
518
"""The id of a path can be changed trivally with no parents."""
849
519
state = dirstate.DirState.initialize('dirstate')
1435
class TestIterChildEntries(TestCaseWithDirState):
1437
def create_dirstate_with_two_trees(self):
1438
"""This dirstate contains multiple files and directories.
1448
b/h\xc3\xa5 h-\xc3\xa5-file #This is u'\xe5' encoded into utf-8
1450
Notice that a/e is an empty directory.
1452
There is one parent tree, which has the same shape with the following variations:
1453
b/g in the parent is gone.
1454
b/h in the parent has a different id
1455
b/i is new in the parent
1456
c is renamed to b/j in the parent
1458
:return: The dirstate, still write-locked.
1460
packed_stat = 'AAAAREUHaIpFB2iKAAADAQAtkqUAAIGk'
1461
null_sha = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
1462
NULL_PARENT_DETAILS = dirstate.DirState.NULL_PARENT_DETAILS
1463
root_entry = ('', '', 'a-root-value'), [
1464
('d', '', 0, False, packed_stat),
1465
('d', '', 0, False, 'parent-revid'),
1467
a_entry = ('', 'a', 'a-dir'), [
1468
('d', '', 0, False, packed_stat),
1469
('d', '', 0, False, 'parent-revid'),
1471
b_entry = ('', 'b', 'b-dir'), [
1472
('d', '', 0, False, packed_stat),
1473
('d', '', 0, False, 'parent-revid'),
1475
c_entry = ('', 'c', 'c-file'), [
1476
('f', null_sha, 10, False, packed_stat),
1477
('r', 'b/j', 0, False, ''),
1479
d_entry = ('', 'd', 'd-file'), [
1480
('f', null_sha, 20, False, packed_stat),
1481
('f', 'd', 20, False, 'parent-revid'),
1483
e_entry = ('a', 'e', 'e-dir'), [
1484
('d', '', 0, False, packed_stat),
1485
('d', '', 0, False, 'parent-revid'),
1487
f_entry = ('a', 'f', 'f-file'), [
1488
('f', null_sha, 30, False, packed_stat),
1489
('f', 'f', 20, False, 'parent-revid'),
1491
g_entry = ('b', 'g', 'g-file'), [
1492
('f', null_sha, 30, False, packed_stat),
1493
NULL_PARENT_DETAILS,
1495
h_entry1 = ('b', 'h\xc3\xa5', 'h-\xc3\xa5-file1'), [
1496
('f', null_sha, 40, False, packed_stat),
1497
NULL_PARENT_DETAILS,
1499
h_entry2 = ('b', 'h\xc3\xa5', 'h-\xc3\xa5-file2'), [
1500
NULL_PARENT_DETAILS,
1501
('f', 'h', 20, False, 'parent-revid'),
1503
i_entry = ('b', 'i', 'i-file'), [
1504
NULL_PARENT_DETAILS,
1505
('f', 'h', 20, False, 'parent-revid'),
1507
j_entry = ('b', 'j', 'c-file'), [
1508
('r', 'c', 0, False, ''),
1509
('f', 'j', 20, False, 'parent-revid'),
1512
dirblocks.append(('', [root_entry]))
1513
dirblocks.append(('', [a_entry, b_entry, c_entry, d_entry]))
1514
dirblocks.append(('a', [e_entry, f_entry]))
1515
dirblocks.append(('b', [g_entry, h_entry1, h_entry2, i_entry, j_entry]))
1516
state = dirstate.DirState.initialize('dirstate')
1519
state._set_data(['parent'], dirblocks)
1523
return state, dirblocks
1525
def test_iter_children_b(self):
1526
state, dirblocks = self.create_dirstate_with_two_trees()
1527
self.addCleanup(state.unlock)
1528
expected_result = []
1529
expected_result.append(dirblocks[3][1][2]) # h2
1530
expected_result.append(dirblocks[3][1][3]) # i
1531
expected_result.append(dirblocks[3][1][4]) # j
1532
self.assertEqual(expected_result,
1533
list(state._iter_child_entries(1, 'b')))
1535
def test_iter_child_root(self):
1536
state, dirblocks = self.create_dirstate_with_two_trees()
1537
self.addCleanup(state.unlock)
1538
expected_result = []
1539
expected_result.append(dirblocks[1][1][0]) # a
1540
expected_result.append(dirblocks[1][1][1]) # b
1541
expected_result.append(dirblocks[1][1][3]) # d
1542
expected_result.append(dirblocks[2][1][0]) # e
1543
expected_result.append(dirblocks[2][1][1]) # f
1544
expected_result.append(dirblocks[3][1][2]) # h2
1545
expected_result.append(dirblocks[3][1][3]) # i
1546
expected_result.append(dirblocks[3][1][4]) # j
1547
self.assertEqual(expected_result,
1548
list(state._iter_child_entries(1, '')))
1551
1106
class TestDirstateSortOrder(TestCaseWithTransport):
1552
1107
"""Test that DirState adds entries in the right order."""
1777
1329
stat_value=stat_value)
1778
1330
self.assertEqual('target', link_or_sha1)
1779
1331
self.assertEqual([('read_link', 'a', ''),
1780
('read_link', 'a', ''),
1332
('read_link', 'a', 'target'),
1782
self.assertEqual([('l', '', 6, False, dirstate.DirState.NULLSTAT)],
1784
1334
state.adjust_time(+20) # Skip into the future, all files look old
1785
1335
link_or_sha1 = state.update_entry(entry, abspath='a',
1786
1336
stat_value=stat_value)
1787
1337
self.assertEqual('target', link_or_sha1)
1788
# We need to re-read the link because only now can we cache it
1789
self.assertEqual([('read_link', 'a', ''),
1790
('read_link', 'a', ''),
1791
('read_link', 'a', ''),
1793
self.assertEqual([('l', 'target', 6, False, packed_stat)],
1796
# Another call won't re-read the link
1797
self.assertEqual([('read_link', 'a', ''),
1798
('read_link', 'a', ''),
1799
('read_link', 'a', ''),
1801
link_or_sha1 = state.update_entry(entry, abspath='a',
1802
stat_value=stat_value)
1803
self.assertEqual('target', link_or_sha1)
1804
self.assertEqual([('l', 'target', 6, False, packed_stat)],
1807
def do_update_entry(self, state, entry, abspath):
1808
stat_value = os.lstat(abspath)
1809
return state.update_entry(entry, abspath, stat_value)
1338
# There should not be a new read_link call.
1339
# (this is a weak assertion, because read_link is fairly inexpensive,
1340
# versus the number of symlinks that we would have)
1341
self.assertEqual([('read_link', 'a', ''),
1342
('read_link', 'a', 'target'),
1811
1345
def test_update_entry_dir(self):
1812
1346
state, entry = self.get_state_with_a()
1813
1347
self.build_tree(['a/'])
1814
self.assertIs(None, self.do_update_entry(state, entry, 'a'))
1816
def test_update_entry_dir_unchanged(self):
1817
state, entry = self.get_state_with_a()
1818
self.build_tree(['a/'])
1819
state.adjust_time(+20)
1820
self.assertIs(None, self.do_update_entry(state, entry, 'a'))
1821
self.assertEqual(dirstate.DirState.IN_MEMORY_MODIFIED,
1822
state._dirblock_state)
1824
self.assertEqual(dirstate.DirState.IN_MEMORY_UNMODIFIED,
1825
state._dirblock_state)
1826
self.assertIs(None, self.do_update_entry(state, entry, 'a'))
1827
self.assertEqual(dirstate.DirState.IN_MEMORY_UNMODIFIED,
1828
state._dirblock_state)
1830
def test_update_entry_file_unchanged(self):
1831
state, entry = self.get_state_with_a()
1832
self.build_tree(['a'])
1833
sha1sum = 'b50e5406bb5e153ebbeb20268fcf37c87e1ecfb6'
1834
state.adjust_time(+20)
1835
self.assertEqual(sha1sum, self.do_update_entry(state, entry, 'a'))
1836
self.assertEqual(dirstate.DirState.IN_MEMORY_MODIFIED,
1837
state._dirblock_state)
1839
self.assertEqual(dirstate.DirState.IN_MEMORY_UNMODIFIED,
1840
state._dirblock_state)
1841
self.assertEqual(sha1sum, self.do_update_entry(state, entry, 'a'))
1842
self.assertEqual(dirstate.DirState.IN_MEMORY_UNMODIFIED,
1843
state._dirblock_state)
1348
self.assertIs(None, state.update_entry(entry, 'a'))
1845
1350
def create_and_test_file(self, state, entry):
1846
1351
"""Create a file at 'a' and verify the state finds it.
2036
1563
self.assertPackStat('AAAbWEXm4FxF5uBmAAADCQBjLNIAAIGk', st)
2039
class TestBisect(TestCaseWithDirState):
1566
class TestBisect(TestCaseWithTransport):
2040
1567
"""Test the ability to bisect into the disk format."""
1569
def create_basic_dirstate(self):
1570
"""Create a dirstate with a few files and directories.
1579
tree = self.make_branch_and_tree('tree')
1580
paths = ['a', 'b/', 'b/c', 'b/d/', 'b/d/e', 'f']
1581
file_ids = ['a-id', 'b-id', 'c-id', 'd-id', 'e-id', 'f-id']
1582
self.build_tree(['tree/' + p for p in paths])
1583
tree.set_root_id('TREE_ROOT')
1584
tree.add([p.rstrip('/') for p in paths], file_ids)
1585
tree.commit('initial', rev_id='rev-1')
1586
revision_id = 'rev-1'
1587
# a_packed_stat = dirstate.pack_stat(os.stat('tree/a'))
1588
t = self.get_transport().clone('tree')
1589
a_text = t.get_bytes('a')
1590
a_sha = osutils.sha_string(a_text)
1592
# b_packed_stat = dirstate.pack_stat(os.stat('tree/b'))
1593
# c_packed_stat = dirstate.pack_stat(os.stat('tree/b/c'))
1594
c_text = t.get_bytes('b/c')
1595
c_sha = osutils.sha_string(c_text)
1597
# d_packed_stat = dirstate.pack_stat(os.stat('tree/b/d'))
1598
# e_packed_stat = dirstate.pack_stat(os.stat('tree/b/d/e'))
1599
e_text = t.get_bytes('b/d/e')
1600
e_sha = osutils.sha_string(e_text)
1602
# f_packed_stat = dirstate.pack_stat(os.stat('tree/f'))
1603
f_text = t.get_bytes('f')
1604
f_sha = osutils.sha_string(f_text)
1606
null_stat = dirstate.DirState.NULLSTAT
1608
'':(('', '', 'TREE_ROOT'), [
1609
('d', '', 0, False, null_stat),
1610
('d', '', 0, False, revision_id),
1612
'a':(('', 'a', 'a-id'), [
1613
('f', '', 0, False, null_stat),
1614
('f', a_sha, a_len, False, revision_id),
1616
'b':(('', 'b', 'b-id'), [
1617
('d', '', 0, False, null_stat),
1618
('d', '', 0, False, revision_id),
1620
'b/c':(('b', 'c', 'c-id'), [
1621
('f', '', 0, False, null_stat),
1622
('f', c_sha, c_len, False, revision_id),
1624
'b/d':(('b', 'd', 'd-id'), [
1625
('d', '', 0, False, null_stat),
1626
('d', '', 0, False, revision_id),
1628
'b/d/e':(('b/d', 'e', 'e-id'), [
1629
('f', '', 0, False, null_stat),
1630
('f', e_sha, e_len, False, revision_id),
1632
'f':(('', 'f', 'f-id'), [
1633
('f', '', 0, False, null_stat),
1634
('f', f_sha, f_len, False, revision_id),
1637
state = dirstate.DirState.from_tree(tree, 'dirstate')
1642
# Use a different object, to make sure nothing is pre-cached in memory.
1643
state = dirstate.DirState.on_file('dirstate')
1645
self.addCleanup(state.unlock)
1646
self.assertEqual(dirstate.DirState.NOT_IN_MEMORY,
1647
state._dirblock_state)
1648
# This is code is only really tested if we actually have to make more
1649
# than one read, so set the page size to something smaller.
1650
# We want it to contain about 2.2 records, so that we have a couple
1651
# records that we can read per attempt
1652
state._bisect_page_size = 200
1653
return tree, state, expected
1655
def create_duplicated_dirstate(self):
1656
"""Create a dirstate with a deleted and added entries.
1658
This grabs a basic_dirstate, and then removes and re adds every entry
1661
tree, state, expected = self.create_basic_dirstate()
1662
# Now we will just remove and add every file so we get an extra entry
1663
# per entry. Unversion in reverse order so we handle subdirs
1664
tree.unversion(['f-id', 'e-id', 'd-id', 'c-id', 'b-id', 'a-id'])
1665
tree.add(['a', 'b', 'b/c', 'b/d', 'b/d/e', 'f'],
1666
['a-id2', 'b-id2', 'c-id2', 'd-id2', 'e-id2', 'f-id2'])
1668
# Update the expected dictionary.
1669
for path in ['a', 'b', 'b/c', 'b/d', 'b/d/e', 'f']:
1670
orig = expected[path]
1672
# This record was deleted in the current tree
1673
expected[path] = (orig[0], [dirstate.DirState.NULL_PARENT_DETAILS,
1675
new_key = (orig[0][0], orig[0][1], orig[0][2]+'2')
1676
# And didn't exist in the basis tree
1677
expected[path2] = (new_key, [orig[1][0],
1678
dirstate.DirState.NULL_PARENT_DETAILS])
1680
# We will replace the 'dirstate' file underneath 'state', but that is
1681
# okay as lock as we unlock 'state' first.
1684
new_state = dirstate.DirState.from_tree(tree, 'dirstate')
1690
# But we need to leave state in a read-lock because we already have
1691
# a cleanup scheduled
1693
return tree, state, expected
1695
def create_renamed_dirstate(self):
1696
"""Create a dirstate with a few internal renames.
1698
This takes the basic dirstate, and moves the paths around.
1700
tree, state, expected = self.create_basic_dirstate()
1702
tree.rename_one('a', 'b/g')
1704
tree.rename_one('b/d', 'h')
1706
old_a = expected['a']
1707
expected['a'] = (old_a[0], [('r', 'b/g', 0, False, ''), old_a[1][1]])
1708
expected['b/g'] = (('b', 'g', 'a-id'), [old_a[1][0],
1709
('r', 'a', 0, False, '')])
1710
old_d = expected['b/d']
1711
expected['b/d'] = (old_d[0], [('r', 'h', 0, False, ''), old_d[1][1]])
1712
expected['h'] = (('', 'h', 'd-id'), [old_d[1][0],
1713
('r', 'b/d', 0, False, '')])
1715
old_e = expected['b/d/e']
1716
expected['b/d/e'] = (old_e[0], [('r', 'h/e', 0, False, ''),
1718
expected['h/e'] = (('h', 'e', 'e-id'), [old_e[1][0],
1719
('r', 'b/d/e', 0, False, '')])
1723
new_state = dirstate.DirState.from_tree(tree, 'dirstate')
1730
return tree, state, expected
2042
1732
def assertBisect(self, expected_map, map_keys, state, paths):
2043
1733
"""Assert that bisecting for paths returns the right result.
2278
class TestDirstateValidation(TestCaseWithDirState):
2280
def test_validate_correct_dirstate(self):
2281
state = self.create_complex_dirstate()
2284
# and make sure we can also validate with a read lock
2291
def test_dirblock_not_sorted(self):
2292
tree, state, expected = self.create_renamed_dirstate()
2293
state._read_dirblocks_if_needed()
2294
last_dirblock = state._dirblocks[-1]
2295
# we're appending to the dirblock, but this name comes before some of
2296
# the existing names; that's wrong
2297
last_dirblock[1].append(
2298
(('h', 'aaaa', 'a-id'),
2299
[('a', '', 0, False, ''),
2300
('a', '', 0, False, '')]))
2301
e = self.assertRaises(AssertionError,
2303
self.assertContainsRe(str(e), 'not sorted')
2305
def test_dirblock_name_mismatch(self):
2306
tree, state, expected = self.create_renamed_dirstate()
2307
state._read_dirblocks_if_needed()
2308
last_dirblock = state._dirblocks[-1]
2309
# add an entry with the wrong directory name
2310
last_dirblock[1].append(
2312
[('a', '', 0, False, ''),
2313
('a', '', 0, False, '')]))
2314
e = self.assertRaises(AssertionError,
2316
self.assertContainsRe(str(e),
2317
"doesn't match directory name")
2319
def test_dirblock_missing_rename(self):
2320
tree, state, expected = self.create_renamed_dirstate()
2321
state._read_dirblocks_if_needed()
2322
last_dirblock = state._dirblocks[-1]
2323
# make another entry for a-id, without a correct 'r' pointer to
2324
# the real occurrence in the working tree
2325
last_dirblock[1].append(
2326
(('h', 'z', 'a-id'),
2327
[('a', '', 0, False, ''),
2328
('a', '', 0, False, '')]))
2329
e = self.assertRaises(AssertionError,
2331
self.assertContainsRe(str(e),
2332
'file a-id is absent in row')
2335
class TestDirstateTreeReference(TestCaseWithDirState):
2337
def test_reference_revision_is_none(self):
2338
tree = self.make_branch_and_tree('tree', format='dirstate-with-subtree')
2339
subtree = self.make_branch_and_tree('tree/subtree',
2340
format='dirstate-with-subtree')
2341
subtree.set_root_id('subtree')
2342
tree.add_reference(subtree)
2344
state = dirstate.DirState.from_tree(tree, 'dirstate')
2345
key = ('', 'subtree', 'subtree')
2346
expected = ('', [(key,
2347
[('t', '', 0, False, 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')])])
2350
self.assertEqual(expected, state._find_block(key))
2355
class TestDiscardMergeParents(TestCaseWithDirState):
2357
def test_discard_no_parents(self):
2358
# This should be a no-op
2359
state = self.create_empty_dirstate()
2360
self.addCleanup(state.unlock)
2361
state._discard_merge_parents()
2364
def test_discard_one_parent(self):
2366
packed_stat = 'AAAAREUHaIpFB2iKAAADAQAtkqUAAIGk'
2367
root_entry_direntry = ('', '', 'a-root-value'), [
2368
('d', '', 0, False, packed_stat),
2369
('d', '', 0, False, packed_stat),
2372
dirblocks.append(('', [root_entry_direntry]))
2373
dirblocks.append(('', []))
2375
state = self.create_empty_dirstate()
2376
self.addCleanup(state.unlock)
2377
state._set_data(['parent-id'], dirblocks[:])
2380
state._discard_merge_parents()
2382
self.assertEqual(dirblocks, state._dirblocks)
2384
def test_discard_simple(self):
2386
packed_stat = 'AAAAREUHaIpFB2iKAAADAQAtkqUAAIGk'
2387
root_entry_direntry = ('', '', 'a-root-value'), [
2388
('d', '', 0, False, packed_stat),
2389
('d', '', 0, False, packed_stat),
2390
('d', '', 0, False, packed_stat),
2392
expected_root_entry_direntry = ('', '', 'a-root-value'), [
2393
('d', '', 0, False, packed_stat),
2394
('d', '', 0, False, packed_stat),
2397
dirblocks.append(('', [root_entry_direntry]))
2398
dirblocks.append(('', []))
2400
state = self.create_empty_dirstate()
2401
self.addCleanup(state.unlock)
2402
state._set_data(['parent-id', 'merged-id'], dirblocks[:])
2405
# This should strip of the extra column
2406
state._discard_merge_parents()
2408
expected_dirblocks = [('', [expected_root_entry_direntry]), ('', [])]
2409
self.assertEqual(expected_dirblocks, state._dirblocks)
2411
def test_discard_absent(self):
2412
"""If entries are only in a merge, discard should remove the entries"""
2413
null_stat = dirstate.DirState.NULLSTAT
2414
present_dir = ('d', '', 0, False, null_stat)
2415
present_file = ('f', '', 0, False, null_stat)
2416
absent = dirstate.DirState.NULL_PARENT_DETAILS
2417
root_key = ('', '', 'a-root-value')
2418
file_in_root_key = ('', 'file-in-root', 'a-file-id')
2419
file_in_merged_key = ('', 'file-in-merged', 'b-file-id')
2420
dirblocks = [('', [(root_key, [present_dir, present_dir, present_dir])]),
2421
('', [(file_in_merged_key,
2422
[absent, absent, present_file]),
2424
[present_file, present_file, present_file]),
2428
state = self.create_empty_dirstate()
2429
self.addCleanup(state.unlock)
2430
state._set_data(['parent-id', 'merged-id'], dirblocks[:])
2433
exp_dirblocks = [('', [(root_key, [present_dir, present_dir])]),
2434
('', [(file_in_root_key,
2435
[present_file, present_file]),
2438
state._discard_merge_parents()
2440
self.assertEqual(exp_dirblocks, state._dirblocks)
2442
def test_discard_renamed(self):
2443
null_stat = dirstate.DirState.NULLSTAT
2444
present_dir = ('d', '', 0, False, null_stat)
2445
present_file = ('f', '', 0, False, null_stat)
2446
absent = dirstate.DirState.NULL_PARENT_DETAILS
2447
root_key = ('', '', 'a-root-value')
2448
file_in_root_key = ('', 'file-in-root', 'a-file-id')
2449
# Renamed relative to parent
2450
file_rename_s_key = ('', 'file-s', 'b-file-id')
2451
file_rename_t_key = ('', 'file-t', 'b-file-id')
2452
# And one that is renamed between the parents, but absent in this
2453
key_in_1 = ('', 'file-in-1', 'c-file-id')
2454
key_in_2 = ('', 'file-in-2', 'c-file-id')
2457
('', [(root_key, [present_dir, present_dir, present_dir])]),
2459
[absent, present_file, ('r', 'file-in-2', 'c-file-id')]),
2461
[absent, ('r', 'file-in-1', 'c-file-id'), present_file]),
2463
[present_file, present_file, present_file]),
2465
[('r', 'file-t', 'b-file-id'), absent, present_file]),
2467
[present_file, absent, ('r', 'file-s', 'b-file-id')]),
2471
('', [(root_key, [present_dir, present_dir])]),
2472
('', [(key_in_1, [absent, present_file]),
2473
(file_in_root_key, [present_file, present_file]),
2474
(file_rename_t_key, [present_file, absent]),
2477
state = self.create_empty_dirstate()
2478
self.addCleanup(state.unlock)
2479
state._set_data(['parent-id', 'merged-id'], dirblocks[:])
2482
state._discard_merge_parents()
2484
self.assertEqual(exp_dirblocks, state._dirblocks)
2486
def test_discard_all_subdir(self):
2487
null_stat = dirstate.DirState.NULLSTAT
2488
present_dir = ('d', '', 0, False, null_stat)
2489
present_file = ('f', '', 0, False, null_stat)
2490
absent = dirstate.DirState.NULL_PARENT_DETAILS
2491
root_key = ('', '', 'a-root-value')
2492
subdir_key = ('', 'sub', 'dir-id')
2493
child1_key = ('sub', 'child1', 'child1-id')
2494
child2_key = ('sub', 'child2', 'child2-id')
2495
child3_key = ('sub', 'child3', 'child3-id')
2498
('', [(root_key, [present_dir, present_dir, present_dir])]),
2499
('', [(subdir_key, [present_dir, present_dir, present_dir])]),
2500
('sub', [(child1_key, [absent, absent, present_file]),
2501
(child2_key, [absent, absent, present_file]),
2502
(child3_key, [absent, absent, present_file]),
2506
('', [(root_key, [present_dir, present_dir])]),
2507
('', [(subdir_key, [present_dir, present_dir])]),
2510
state = self.create_empty_dirstate()
2511
self.addCleanup(state.unlock)
2512
state._set_data(['parent-id', 'merged-id'], dirblocks[:])
2515
state._discard_merge_parents()
2517
self.assertEqual(exp_dirblocks, state._dirblocks)
2520
class Test_InvEntryToDetails(TestCaseWithDirState):
2522
def assertDetails(self, expected, inv_entry):
2523
details = dirstate.DirState._inv_entry_to_details(inv_entry)
2524
self.assertEqual(expected, details)
2525
# details should always allow join() and always be a plain str when
2527
(minikind, fingerprint, size, executable, tree_data) = details
2528
self.assertIsInstance(minikind, str)
2529
self.assertIsInstance(fingerprint, str)
2530
self.assertIsInstance(tree_data, str)
2532
def test_unicode_symlink(self):
2533
# In general, the code base doesn't support a target that contains
2534
# non-ascii characters. So we just assert tha
2535
inv_entry = inventory.InventoryLink('link-file-id', 'name',
2537
inv_entry.revision = 'link-revision-id'
2538
inv_entry.symlink_target = u'link-target'
2539
details = self.assertDetails(('l', 'link-target', 0, False,
2540
'link-revision-id'), inv_entry)
1960
class TestBisectDirblock(TestCase):
1961
"""Test that bisect_dirblock() returns the expected values.
1963
bisect_dirblock is intended to work like bisect.bisect_left() except it
1964
knows it is working on dirblocks and that dirblocks are sorted by ('path',
1965
'to', 'foo') chunks rather than by raw 'path/to/foo'.
1968
def assertBisect(self, dirblocks, split_dirblocks, path, *args, **kwargs):
1969
"""Assert that bisect_split works like bisect_left on the split paths.
1971
:param dirblocks: A list of (path, [info]) pairs.
1972
:param split_dirblocks: A list of ((split, path), [info]) pairs.
1973
:param path: The path we are indexing.
1975
All other arguments will be passed along.
1977
bisect_split_idx = dirstate.bisect_dirblock(dirblocks, path,
1979
split_dirblock = (path.split('/'), [])
1980
bisect_left_idx = bisect.bisect_left(split_dirblocks, split_dirblock,
1982
self.assertEqual(bisect_left_idx, bisect_split_idx,
1983
'bisect_split disagreed. %s != %s'
1985
% (bisect_left_idx, bisect_split_idx, path)
1988
def paths_to_dirblocks(self, paths):
1989
"""Convert a list of paths into dirblock form.
1991
Also, ensure that the paths are in proper sorted order.
1993
dirblocks = [(path, []) for path in paths]
1994
split_dirblocks = [(path.split('/'), []) for path in paths]
1995
self.assertEqual(sorted(split_dirblocks), split_dirblocks)
1996
return dirblocks, split_dirblocks
1998
def test_simple(self):
1999
"""In the simple case it works just like bisect_left"""
2000
paths = ['', 'a', 'b', 'c', 'd']
2001
dirblocks, split_dirblocks = self.paths_to_dirblocks(paths)
2003
self.assertBisect(dirblocks, split_dirblocks, path)
2004
self.assertBisect(dirblocks, split_dirblocks, '_')
2005
self.assertBisect(dirblocks, split_dirblocks, 'aa')
2006
self.assertBisect(dirblocks, split_dirblocks, 'bb')
2007
self.assertBisect(dirblocks, split_dirblocks, 'cc')
2008
self.assertBisect(dirblocks, split_dirblocks, 'dd')
2009
self.assertBisect(dirblocks, split_dirblocks, 'a/a')
2010
self.assertBisect(dirblocks, split_dirblocks, 'b/b')
2011
self.assertBisect(dirblocks, split_dirblocks, 'c/c')
2012
self.assertBisect(dirblocks, split_dirblocks, 'd/d')
2014
def test_involved(self):
2015
"""This is where bisect_left diverges slightly."""
2017
'a/a', 'a/a/a', 'a/a/z', 'a/a-a', 'a/a-z',
2018
'a/z', 'a/z/a', 'a/z/z', 'a/z-a', 'a/z-z',
2020
'z', 'z/a/a', 'z/a/z', 'z/a-a', 'z/a-z',
2021
'z/z', 'z/z/a', 'z/z/z', 'z/z-a', 'z/z-z',
2024
dirblocks, split_dirblocks = self.paths_to_dirblocks(paths)
2026
self.assertBisect(dirblocks, split_dirblocks, path)
2028
def test_involved_cached(self):
2029
"""This is where bisect_left diverges slightly."""
2031
'a/a', 'a/a/a', 'a/a/z', 'a/a-a', 'a/a-z',
2032
'a/z', 'a/z/a', 'a/z/z', 'a/z-a', 'a/z-z',
2034
'z', 'z/a/a', 'z/a/z', 'z/a-a', 'z/a-z',
2035
'z/z', 'z/z/a', 'z/z/z', 'z/z-a', 'z/z-z',
2039
dirblocks, split_dirblocks = self.paths_to_dirblocks(paths)
2041
self.assertBisect(dirblocks, split_dirblocks, path, cache=cache)