1279
1277
def update_by_delta(self, delta):
1280
1278
"""Apply an inventory delta to the dirstate for tree 0
1282
This is the workhorse for apply_inventory_delta in dirstate based
1285
1280
:param delta: An inventory delta. See Inventory.apply_delta for
1288
1283
self._read_dirblocks_if_needed()
1289
encode = cache_utf8.encode
1290
1284
insertions = {}
1292
# Accumulate parent references (path_utf8, id), to check for parentless
1293
# items or items placed under files/links/tree-references. We get
1294
# references from every item in the delta that is not a deletion and
1295
# is not itself the root.
1297
# Added ids must not be in the dirstate already. This set holds those
1300
# This loop transforms the delta to single atomic operations that can
1301
# be executed and validated.
1302
for old_path, new_path, file_id, inv_entry in sorted(
1303
inventory._check_delta_unique_old_paths(
1304
inventory._check_delta_unique_new_paths(
1305
inventory._check_delta_ids_match_entry(
1306
inventory._check_delta_ids_are_valid(
1307
inventory._check_delta_new_path_entry_both_or_None(delta))))),
1286
for old_path, new_path, file_id, inv_entry in sorted(delta, reverse=True):
1309
1287
if (file_id in insertions) or (file_id in removals):
1310
raise errors.InconsistentDelta(old_path or new_path, file_id,
1288
raise AssertionError("repeated file id in delta %r" % (file_id,))
1312
1289
if old_path is not None:
1313
1290
old_path = old_path.encode('utf-8')
1314
1291
removals[file_id] = old_path
1316
new_ids.add(file_id)
1317
1292
if new_path is not None:
1318
if inv_entry is None:
1319
raise errors.InconsistentDelta(new_path, file_id,
1320
"new_path with no entry")
1321
1293
new_path = new_path.encode('utf-8')
1322
dirname_utf8, basename = osutils.split(new_path)
1324
parents.add((dirname_utf8, inv_entry.parent_id))
1325
key = (dirname_utf8, basename, file_id)
1294
dirname, basename = osutils.split(new_path)
1295
key = (dirname, basename, file_id)
1326
1296
minikind = DirState._kind_to_minikind[inv_entry.kind]
1327
1297
if minikind == 't':
1328
fingerprint = inv_entry.reference_revision or ''
1298
fingerprint = inv_entry.reference_revision
1330
1300
fingerprint = ''
1331
1301
insertions[file_id] = (key, minikind, inv_entry.executable,
1340
1310
minikind = child[1][0][0]
1341
1311
fingerprint = child[1][0][4]
1342
1312
executable = child[1][0][3]
1343
old_child_path = osutils.pathjoin(child_dirname,
1313
old_child_path = osutils.pathjoin(child[0][0],
1345
1315
removals[child[0][2]] = old_child_path
1346
1316
child_suffix = child_dirname[len(old_path):]
1347
1317
new_child_dirname = (new_path + child_suffix)
1348
1318
key = (new_child_dirname, child_basename, child[0][2])
1349
new_child_path = osutils.pathjoin(new_child_dirname,
1319
new_child_path = os.path.join(new_child_dirname,
1351
1321
insertions[child[0][2]] = (key, minikind, executable,
1352
1322
fingerprint, new_child_path)
1353
self._check_delta_ids_absent(new_ids, delta, 0)
1355
self._apply_removals(removals.iteritems())
1356
self._apply_insertions(insertions.values())
1358
self._after_delta_check_parents(parents, 0)
1359
except errors.BzrError, e:
1360
self._changes_aborted = True
1361
if 'integrity error' not in str(e):
1363
# _get_entry raises BzrError when a request is inconsistent; we
1364
# want such errors to be shown as InconsistentDelta - and that
1365
# fits the behaviour we trigger.
1366
raise errors.InconsistentDeltaDelta(delta, "error from _get_entry.")
1323
self._apply_removals(removals.values())
1324
self._apply_insertions(insertions.values())
1368
1326
def _apply_removals(self, removals):
1369
for file_id, path in sorted(removals, reverse=True,
1370
key=operator.itemgetter(1)):
1327
for path in sorted(removals, reverse=True):
1371
1328
dirname, basename = osutils.split(path)
1372
1329
block_i, entry_i, d_present, f_present = \
1373
1330
self._get_block_entry_index(dirname, basename, 0)
1375
entry = self._dirblocks[block_i][1][entry_i]
1377
self._changes_aborted = True
1378
raise errors.InconsistentDelta(path, file_id,
1379
"Wrong path for old path.")
1380
if not f_present or entry[1][0][0] in 'ar':
1381
self._changes_aborted = True
1382
raise errors.InconsistentDelta(path, file_id,
1383
"Wrong path for old path.")
1384
if file_id != entry[0][2]:
1385
self._changes_aborted = True
1386
raise errors.InconsistentDelta(path, file_id,
1387
"Attempt to remove path has wrong id - found %r."
1331
entry = self._dirblocks[block_i][1][entry_i]
1389
1332
self._make_absent(entry)
1390
1333
# See if we have a malformed delta: deleting a directory must not
1391
1334
# leave crud behind. This increases the number of bisects needed
1399
1342
# be due to it being in a parent tree, or a corrupt delta.
1400
1343
for child_entry in self._dirblocks[block_i][1]:
1401
1344
if child_entry[1][0][0] not in ('r', 'a'):
1402
self._changes_aborted = True
1403
1345
raise errors.InconsistentDelta(path, entry[0][2],
1404
1346
"The file id was deleted but its children were "
1405
1347
"not deleted.")
1407
1349
def _apply_insertions(self, adds):
1409
for key, minikind, executable, fingerprint, path_utf8 in sorted(adds):
1410
self.update_minimal(key, minikind, executable, fingerprint,
1411
path_utf8=path_utf8)
1412
except errors.NotVersionedError:
1413
self._changes_aborted = True
1414
raise errors.InconsistentDelta(path_utf8.decode('utf8'), key[2],
1350
for key, minikind, executable, fingerprint, path_utf8 in sorted(adds):
1351
self.update_minimal(key, minikind, executable, fingerprint,
1352
path_utf8=path_utf8)
1417
1354
def update_basis_by_delta(self, delta, new_revid):
1418
1355
"""Update the parents of this tree after a commit.
1462
1399
# At the same time, to reduce interface friction we convert the input
1463
1400
# inventory entries to dirstate.
1464
1401
root_only = ('', '')
1465
# Accumulate parent references (path_utf8, id), to check for parentless
1466
# items or items placed under files/links/tree-references. We get
1467
# references from every item in the delta that is not a deletion and
1468
# is not itself the root.
1470
# Added ids must not be in the dirstate already. This set holds those
1473
1402
for old_path, new_path, file_id, inv_entry in delta:
1474
if inv_entry is not None and file_id != inv_entry.file_id:
1475
raise errors.InconsistentDelta(new_path, file_id,
1476
"mismatched entry file_id %r" % inv_entry)
1477
if new_path is not None:
1478
if inv_entry is None:
1479
raise errors.InconsistentDelta(new_path, file_id,
1480
"new_path with no entry")
1481
new_path_utf8 = encode(new_path)
1482
# note the parent for validation
1483
dirname_utf8, basename_utf8 = osutils.split(new_path_utf8)
1485
parents.add((dirname_utf8, inv_entry.parent_id))
1486
1403
if old_path is None:
1487
1404
adds.append((None, encode(new_path), file_id,
1488
1405
inv_to_entry(inv_entry), True))
1489
new_ids.add(file_id)
1490
1406
elif new_path is None:
1491
1407
deletes.append((encode(old_path), None, file_id, None, True))
1492
1408
elif (old_path, new_path) != root_only:
1535
1452
# of everything.
1536
1453
changes.append((encode(old_path), encode(new_path), file_id,
1537
1454
inv_to_entry(inv_entry)))
1538
self._check_delta_ids_absent(new_ids, delta, 1)
1540
# Finish expunging deletes/first half of renames.
1541
self._update_basis_apply_deletes(deletes)
1542
# Reinstate second half of renames and new paths.
1543
self._update_basis_apply_adds(adds)
1544
# Apply in-situ changes.
1545
self._update_basis_apply_changes(changes)
1547
self._after_delta_check_parents(parents, 1)
1548
except errors.BzrError, e:
1549
self._changes_aborted = True
1550
if 'integrity error' not in str(e):
1552
# _get_entry raises BzrError when a request is inconsistent; we
1553
# want such errors to be shown as InconsistentDelta - and that
1554
# fits the behaviour we trigger. Partof this is driven by dirstate
1555
# only supporting deltas that turn the basis into a closer fit to
1557
raise errors.InconsistentDeltaDelta(delta, "error from _get_entry.")
1456
# Finish expunging deletes/first half of renames.
1457
self._update_basis_apply_deletes(deletes)
1458
# Reinstate second half of renames and new paths.
1459
self._update_basis_apply_adds(adds)
1460
# Apply in-situ changes.
1461
self._update_basis_apply_changes(changes)
1559
1463
self._dirblock_state = DirState.IN_MEMORY_MODIFIED
1560
1464
self._header_state = DirState.IN_MEMORY_MODIFIED
1561
1465
self._id_index = None
1564
def _check_delta_ids_absent(self, new_ids, delta, tree_index):
1565
"""Check that none of the file_ids in new_ids are present in a tree."""
1568
id_index = self._get_id_index()
1569
for file_id in new_ids:
1570
for key in id_index.get(file_id, ()):
1571
block_i, entry_i, d_present, f_present = \
1572
self._get_block_entry_index(key[0], key[1], tree_index)
1574
# In a different tree
1576
entry = self._dirblocks[block_i][1][entry_i]
1577
if entry[0][2] != file_id:
1578
# Different file_id, so not what we want.
1580
# NB: No changes made before this helper is called, so no need
1581
# to set the _changes_aborted flag.
1582
raise errors.InconsistentDelta(
1583
("%s/%s" % key[0:2]).decode('utf8'), file_id,
1584
"This file_id is new in the delta but already present in "
1587
1468
def _update_basis_apply_adds(self, adds):
1588
1469
"""Apply a sequence of adds to tree 1 during update_basis_by_delta.
1693
1573
# it is being resurrected here, so blank it out temporarily.
1694
1574
self._dirblocks[block_index][1][entry_index][1][1] = null
1696
def _after_delta_check_parents(self, parents, index):
1697
"""Check that parents required by the delta are all intact.
1699
:param parents: An iterable of (path_utf8, file_id) tuples which are
1700
required to be present in tree 'index' at path_utf8 with id file_id
1702
:param index: The column in the dirstate to check for parents in.
1704
for dirname_utf8, file_id in parents:
1705
# Get the entry - the ensures that file_id, dirname_utf8 exists and
1706
# has the right file id.
1707
entry = self._get_entry(index, file_id, dirname_utf8)
1708
if entry[1] is None:
1709
self._changes_aborted = True
1710
raise errors.InconsistentDelta(dirname_utf8.decode('utf8'),
1711
file_id, "This parent is not present.")
1712
# Parents of things must be directories
1713
if entry[1][index][0] != 'd':
1714
self._changes_aborted = True
1715
raise errors.InconsistentDelta(dirname_utf8.decode('utf8'),
1716
file_id, "This parent is not a directory.")
1718
1576
def _observed_sha1(self, entry, sha1, stat_value,
1719
1577
_stat_to_minikind=_stat_to_minikind, _pack_stat=pack_stat):
1720
1578
"""Note the sha1 of a file.
2146
2002
def _get_id_index(self):
2147
"""Get an id index of self._dirblocks.
2149
This maps from file_id => [(directory, name, file_id)] entries where
2150
that file_id appears in one of the trees.
2003
"""Get an id index of self._dirblocks."""
2152
2004
if self._id_index is None:
2154
2006
for key, tree_details in self._iter_entries():
2155
self._add_to_id_index(id_index, key)
2007
id_index.setdefault(key[2], set()).add(key)
2156
2008
self._id_index = id_index
2157
2009
return self._id_index
2159
def _add_to_id_index(self, id_index, entry_key):
2160
"""Add this entry to the _id_index mapping."""
2161
# This code used to use a set for every entry in the id_index. However,
2162
# it is *rare* to have more than one entry. So a set is a large
2163
# overkill. And even when we do, we won't ever have more than the
2164
# number of parent trees. Which is still a small number (rarely >2). As
2165
# such, we use a simple tuple, and do our own uniqueness checks. While
2166
# the 'in' check is O(N) since N is nicely bounded it shouldn't ever
2167
# cause quadratic failure.
2168
# TODO: This should use StaticTuple
2169
file_id = entry_key[2]
2170
entry_key = static_tuple.StaticTuple.from_sequence(entry_key)
2171
if file_id not in id_index:
2172
id_index[file_id] = static_tuple.StaticTuple(entry_key,)
2174
entry_keys = id_index[file_id]
2175
if entry_key not in entry_keys:
2176
id_index[file_id] = entry_keys + (entry_key,)
2178
def _remove_from_id_index(self, id_index, entry_key):
2179
"""Remove this entry from the _id_index mapping.
2181
It is an programming error to call this when the entry_key is not
2184
file_id = entry_key[2]
2185
entry_keys = list(id_index[file_id])
2186
entry_keys.remove(entry_key)
2187
id_index[file_id] = static_tuple.StaticTuple.from_sequence(entry_keys)
2189
2011
def _get_output_lines(self, lines):
2190
2012
"""Format lines for final output.
2658
2462
and new_entry_key[1:] < current_old[0][1:])):
2659
2463
# new comes before:
2660
2464
# add a entry for this and advance new
2662
trace.mutter("Inserting from new '%s'.",
2663
new_path_utf8.decode('utf8'))
2664
2465
self.update_minimal(new_entry_key, current_new_minikind,
2665
2466
executable=current_new[1].executable,
2666
path_utf8=new_path_utf8, fingerprint=fingerprint,
2467
path_utf8=new_path_utf8, fingerprint=fingerprint)
2668
2468
current_new = advance(new_iterator)
2670
2470
# we've advanced past the place where the old key would be,
2671
2471
# without seeing it in the new list. so it must be gone.
2673
trace.mutter("Deleting from old '%s/%s'.",
2674
current_old[0][0].decode('utf8'),
2675
current_old[0][1].decode('utf8'))
2676
2472
self._make_absent(current_old)
2677
2473
current_old = advance(old_iterator)
2678
2474
self._dirblock_state = DirState.IN_MEMORY_MODIFIED
2679
2475
self._id_index = None
2680
2476
self._packed_stat_index = None
2682
trace.mutter("set_state_from_inventory complete.")
2684
2478
def _make_absent(self, current_old):
2685
2479
"""Mark current_old - an entry - as absent for tree 0.
2768
2559
new_details = (minikind, fingerprint, size, executable, packed_stat)
2769
2560
id_index = self._get_id_index()
2770
2561
if not present:
2771
# New record. Check there isn't a entry at this path already.
2773
low_index, _ = self._find_entry_index(key[0:2] + ('',), block)
2774
while low_index < len(block):
2775
entry = block[low_index]
2776
if entry[0][0:2] == key[0:2]:
2777
if entry[1][0][0] not in 'ar':
2778
# This entry has the same path (but a different id) as
2779
# the new entry we're adding, and is present in ths
2781
raise errors.InconsistentDelta(
2782
("%s/%s" % key[0:2]).decode('utf8'), key[2],
2783
"Attempt to add item at path already occupied by "
2784
"id %r" % entry[0][2])
2788
2562
# new entry, synthesis cross reference here,
2789
existing_keys = id_index.get(key[2], ())
2563
existing_keys = id_index.setdefault(key[2], set())
2790
2564
if not existing_keys:
2791
2565
# not currently in the state, simplest case
2792
2566
new_entry = key, [new_details] + self._empty_parent_info()
2795
2569
# grab one of them and use it to generate parent
2796
2570
# relocation/absent entries.
2797
2571
new_entry = key, [new_details]
2798
# existing_keys can be changed as we iterate.
2799
for other_key in tuple(existing_keys):
2572
for other_key in existing_keys:
2800
2573
# change the record at other to be a pointer to this new
2801
2574
# record. The loop looks similar to the change to
2802
2575
# relocations when updating an existing record but its not:
2803
2576
# the test for existing kinds is different: this can be
2804
2577
# factored out to a helper though.
2805
other_block_index, present = self._find_block_index_from_key(
2808
raise AssertionError('could not find block for %s' % (
2810
other_block = self._dirblocks[other_block_index][1]
2811
other_entry_index, present = self._find_entry_index(
2812
other_key, other_block)
2814
raise AssertionError(
2815
'update_minimal: could not find other entry for %s'
2578
other_block_index, present = self._find_block_index_from_key(other_key)
2580
raise AssertionError('could not find block for %s' % (other_key,))
2581
other_entry_index, present = self._find_entry_index(other_key,
2582
self._dirblocks[other_block_index][1])
2584
raise AssertionError('could not find entry for %s' % (other_key,))
2817
2585
if path_utf8 is None:
2818
2586
raise AssertionError('no path')
2819
# Turn this other location into a reference to the new
2820
# location. This also updates the aliased iterator
2821
# (current_old in set_state_from_inventory) so that the old
2822
# entry, if not already examined, is skipped over by that
2824
other_entry = other_block[other_entry_index]
2825
other_entry[1][0] = ('r', path_utf8, 0, False, '')
2826
if self._maybe_remove_row(other_block, other_entry_index,
2828
# If the row holding this was removed, we need to
2829
# recompute where this entry goes
2830
entry_index, _ = self._find_entry_index(key, block)
2587
self._dirblocks[other_block_index][1][other_entry_index][1][0] = \
2588
('r', path_utf8, 0, False, '')
2833
# adds a tuple to the new details for each column
2834
# - either by copying an existing relocation pointer inside that column
2835
# - or by creating a new pointer to the right row inside that column
2836
2590
num_present_parents = self._num_present_parents()
2837
if num_present_parents:
2838
# TODO: This re-evaluates the existing_keys set, do we need
2839
# to do that ourselves?
2840
other_key = list(existing_keys)[0]
2841
2591
for lookup_index in xrange(1, num_present_parents + 1):
2842
2592
# grab any one entry, use it to find the right path.
2843
2593
# TODO: optimise this to reduce memory use in highly
3222
2938
False, DirState.NULLSTAT)
3223
2939
state._dirblock_state = DirState.IN_MEMORY_MODIFIED
3224
2940
return link_or_sha1
2941
update_entry = py_update_entry
3227
2944
class ProcessEntryPython(object):
3229
__slots__ = ["old_dirname_to_file_id", "new_dirname_to_file_id",
2946
__slots__ = ["old_dirname_to_file_id", "new_dirname_to_file_id", "uninteresting",
3230
2947
"last_source_parent", "last_target_parent", "include_unchanged",
3231
"partial", "use_filesystem_for_exec", "utf8_decode",
3232
"searched_specific_files", "search_specific_files",
3233
"searched_exact_paths", "search_specific_file_parents", "seen_ids",
3234
"state", "source_index", "target_index", "want_unversioned", "tree"]
2948
"use_filesystem_for_exec", "utf8_decode", "searched_specific_files",
2949
"search_specific_files", "state", "source_index", "target_index",
2950
"want_unversioned", "tree"]
3236
2952
def __init__(self, include_unchanged, use_filesystem_for_exec,
3237
2953
search_specific_files, state, source_index, target_index,
3238
2954
want_unversioned, tree):
3239
2955
self.old_dirname_to_file_id = {}
3240
2956
self.new_dirname_to_file_id = {}
3241
# Are we doing a partial iter_changes?
3242
self.partial = search_specific_files != set([''])
2957
# Just a sentry, so that _process_entry can say that this
2958
# record is handled, but isn't interesting to process (unchanged)
2959
self.uninteresting = object()
3243
2960
# Using a list so that we can access the values and change them in
3244
2961
# nested scope. Each one is [path, file_id, entry]
3245
2962
self.last_source_parent = [None, None]
3248
2965
self.use_filesystem_for_exec = use_filesystem_for_exec
3249
2966
self.utf8_decode = cache_utf8._utf8_decode
3250
2967
# for all search_indexs in each path at or under each element of
3251
# search_specific_files, if the detail is relocated: add the id, and
3252
# add the relocated path as one to search if its not searched already.
3253
# If the detail is not relocated, add the id.
2968
# search_specific_files, if the detail is relocated: add the id, and add the
2969
# relocated path as one to search if its not searched already. If the
2970
# detail is not relocated, add the id.
3254
2971
self.searched_specific_files = set()
3255
# When we search exact paths without expanding downwards, we record
3257
self.searched_exact_paths = set()
3258
2972
self.search_specific_files = search_specific_files
3259
# The parents up to the root of the paths we are searching.
3260
# After all normal paths are returned, these specific items are returned.
3261
self.search_specific_file_parents = set()
3262
# The ids we've sent out in the delta.
3263
self.seen_ids = set()
3264
2973
self.state = state
3265
2974
self.source_index = source_index
3266
2975
self.target_index = target_index
3267
if target_index != 0:
3268
# A lot of code in here depends on target_index == 0
3269
raise errors.BzrError('unsupported target index')
3270
2976
self.want_unversioned = want_unversioned
3271
2977
self.tree = tree
3274
2980
"""Compare an entry and real disk to generate delta information.
3276
2982
:param path_info: top_relpath, basename, kind, lstat, abspath for
3277
the path of entry. If None, then the path is considered absent in
3278
the target (Perhaps we should pass in a concrete entry for this ?)
2983
the path of entry. If None, then the path is considered absent.
2984
(Perhaps we should pass in a concrete entry for this ?)
3279
2985
Basename is returned as a utf8 string because we expect this
3280
2986
tuple will be ignored, and don't want to take the time to
3282
:return: (iter_changes_result, changed). If the entry has not been
3283
handled then changed is None. Otherwise it is False if no content
3284
or metadata changes have occurred, and True if any content or
3285
metadata change has occurred. If self.include_unchanged is True then
3286
if changed is not None, iter_changes_result will always be a result
3287
tuple. Otherwise, iter_changes_result is None unless changed is
2988
:return: None if these don't match
2989
A tuple of information about the change, or
2990
the object 'uninteresting' if these match, but are
2991
basically identical.
3290
2993
if self.source_index is None:
3291
2994
source_details = DirState.NULL_PARENT_DETAILS
3359
3062
if source_minikind != 'f':
3360
3063
content_change = True
3362
# Check the sha. We can't just rely on the size as
3363
# content filtering may mean differ sizes actually
3364
# map to the same content
3365
if link_or_sha1 is None:
3367
statvalue, link_or_sha1 = \
3368
self.state._sha1_provider.stat_and_sha1(
3370
self.state._observed_sha1(entry, link_or_sha1,
3372
content_change = (link_or_sha1 != source_details[1])
3065
# If the size is the same, check the sha:
3066
if target_details[2] == source_details[2]:
3067
if link_or_sha1 is None:
3069
statvalue, link_or_sha1 = \
3070
self.state._sha1_provider.stat_and_sha1(
3072
self.state._observed_sha1(entry, link_or_sha1,
3074
content_change = (link_or_sha1 != source_details[1])
3076
# Size changed, so must be different
3077
content_change = True
3373
3078
# Target details is updated at update_entry time
3374
3079
if self.use_filesystem_for_exec:
3375
3080
# We don't need S_ISREG here, because we are sure
3536
3239
"source_minikind=%r, target_minikind=%r"
3537
3240
% (source_minikind, target_minikind))
3538
3241
## import pdb;pdb.set_trace()
3541
3244
def __iter__(self):
3544
def _gather_result_for_consistency(self, result):
3545
"""Check a result we will yield to make sure we are consistent later.
3547
This gathers result's parents into a set to output later.
3549
:param result: A result tuple.
3551
if not self.partial or not result[0]:
3553
self.seen_ids.add(result[0])
3554
new_path = result[1][1]
3556
# Not the root and not a delete: queue up the parents of the path.
3557
self.search_specific_file_parents.update(
3558
osutils.parent_directories(new_path.encode('utf8')))
3559
# Add the root directory which parent_directories does not
3561
self.search_specific_file_parents.add('')
3563
3247
def iter_changes(self):
3564
3248
"""Iterate over the changes."""
3565
3249
utf8_decode = cache_utf8._utf8_decode
3566
3250
_cmp_by_dirs = cmp_by_dirs
3567
3251
_process_entry = self._process_entry
3252
uninteresting = self.uninteresting
3568
3253
search_specific_files = self.search_specific_files
3569
3254
searched_specific_files = self.searched_specific_files
3570
3255
splitpath = osutils.splitpath
3902
3577
current_dir_info = dir_iterator.next()
3903
3578
except StopIteration:
3904
3579
current_dir_info = None
3905
for result in self._iter_specific_file_parents():
3908
def _iter_specific_file_parents(self):
3909
"""Iter over the specific file parents."""
3910
while self.search_specific_file_parents:
3911
# Process the parent directories for the paths we were iterating.
3912
# Even in extremely large trees this should be modest, so currently
3913
# no attempt is made to optimise.
3914
path_utf8 = self.search_specific_file_parents.pop()
3915
if osutils.is_inside_any(self.searched_specific_files, path_utf8):
3916
# We've examined this path.
3918
if path_utf8 in self.searched_exact_paths:
3919
# We've examined this path.
3921
path_entries = self.state._entries_for_path(path_utf8)
3922
# We need either one or two entries. If the path in
3923
# self.target_index has moved (so the entry in source_index is in
3924
# 'ar') then we need to also look for the entry for this path in
3925
# self.source_index, to output the appropriate delete-or-rename.
3926
selected_entries = []
3928
for candidate_entry in path_entries:
3929
# Find entries present in target at this path:
3930
if candidate_entry[1][self.target_index][0] not in 'ar':
3932
selected_entries.append(candidate_entry)
3933
# Find entries present in source at this path:
3934
elif (self.source_index is not None and
3935
candidate_entry[1][self.source_index][0] not in 'ar'):
3937
if candidate_entry[1][self.target_index][0] == 'a':
3938
# Deleted, emit it here.
3939
selected_entries.append(candidate_entry)
3941
# renamed, emit it when we process the directory it
3943
self.search_specific_file_parents.add(
3944
candidate_entry[1][self.target_index][1])
3946
raise AssertionError(
3947
"Missing entry for specific path parent %r, %r" % (
3948
path_utf8, path_entries))
3949
path_info = self._path_info(path_utf8, path_utf8.decode('utf8'))
3950
for entry in selected_entries:
3951
if entry[0][2] in self.seen_ids:
3953
result, changed = self._process_entry(entry, path_info)
3955
raise AssertionError(
3956
"Got entry<->path mismatch for specific path "
3957
"%r entry %r path_info %r " % (
3958
path_utf8, entry, path_info))
3959
# Only include changes - we're outside the users requested
3962
self._gather_result_for_consistency(result)
3963
if (result[6][0] == 'directory' and
3964
result[6][1] != 'directory'):
3965
# This stopped being a directory, the old children have
3967
if entry[1][self.source_index][0] == 'r':
3968
# renamed, take the source path
3969
entry_path_utf8 = entry[1][self.source_index][1]
3971
entry_path_utf8 = path_utf8
3972
initial_key = (entry_path_utf8, '', '')
3973
block_index, _ = self.state._find_block_index_from_key(
3975
if block_index == 0:
3976
# The children of the root are in block index 1.
3978
current_block = None
3979
if block_index < len(self.state._dirblocks):
3980
current_block = self.state._dirblocks[block_index]
3981
if not osutils.is_inside(
3982
entry_path_utf8, current_block[0]):
3983
# No entries for this directory at all.
3984
current_block = None
3985
if current_block is not None:
3986
for entry in current_block[1]:
3987
if entry[1][self.source_index][0] in 'ar':
3988
# Not in the source tree, so doesn't have to be
3991
# Path of the entry itself.
3993
self.search_specific_file_parents.add(
3994
osutils.pathjoin(*entry[0][:2]))
3995
if changed or self.include_unchanged:
3997
self.searched_exact_paths.add(path_utf8)
3999
def _path_info(self, utf8_path, unicode_path):
4000
"""Generate path_info for unicode_path.
4002
:return: None if unicode_path does not exist, or a path_info tuple.
4004
abspath = self.tree.abspath(unicode_path)
4006
stat = os.lstat(abspath)
4008
if e.errno == errno.ENOENT:
4009
# the path does not exist.
4013
utf8_basename = utf8_path.rsplit('/', 1)[-1]
4014
dir_info = (utf8_path, utf8_basename,
4015
osutils.file_kind_from_stat_mode(stat.st_mode), stat,
4017
if dir_info[2] == 'directory':
4018
if self.tree._directory_is_tree_reference(
4020
self.root_dir_info = self.root_dir_info[:2] + \
4021
('tree-reference',) + self.root_dir_info[3:]
3580
_process_entry = ProcessEntryPython
4025
3583
# Try to load the compiled form if possible
4027
from bzrlib._dirstate_helpers_pyx import (
3585
from bzrlib._dirstate_helpers_c import (
3586
_read_dirblocks_c as _read_dirblocks,
3587
bisect_dirblock_c as bisect_dirblock,
3588
_bisect_path_left_c as _bisect_path_left,
3589
_bisect_path_right_c as _bisect_path_right,
3590
cmp_by_dirs_c as cmp_by_dirs,
4033
3591
ProcessEntryC as _process_entry,
4034
3592
update_entry as update_entry,
4036
except ImportError, e:
4037
osutils.failed_to_load_extension(e)
4038
3595
from bzrlib._dirstate_helpers_py import (
3596
_read_dirblocks_py as _read_dirblocks,
3597
bisect_dirblock_py as bisect_dirblock,
3598
_bisect_path_left_py as _bisect_path_left,
3599
_bisect_path_right_py as _bisect_path_right,
3600
cmp_by_dirs_py as cmp_by_dirs,
4045
# FIXME: It would be nice to be able to track moved lines so that the
4046
# corresponding python code can be moved to the _dirstate_helpers_py
4047
# module. I don't want to break the history for this important piece of
4048
# code so I left the code here -- vila 20090622
4049
update_entry = py_update_entry
4050
_process_entry = ProcessEntryPython