170
165
return self.bzrdir.is_control_filename(filename)
173
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
168
def iter_entries_by_dir(self, specific_file_ids=None):
174
169
"""Walk the tree in 'by_dir' order.
176
This will yield each entry in the tree as a (path, entry) tuple.
177
The order that they are yielded is:
179
Directories are walked in a depth-first lexicographical order,
180
however, whenever a directory is reached, all of its direct child
181
nodes are yielded in lexicographical order before yielding the
184
For example, in the tree::
194
The yield order (ignoring root) would be::
195
a, f, a/b, a/d, a/b/c, a/d/e, f/g
197
:param yield_parents: If True, yield the parents from the root leading
198
down to specific_file_ids that have been requested. This has no
199
impact if specific_file_ids is None.
171
This will yield each entry in the tree as a (path, entry) tuple. The
172
order that they are yielded is: the contents of a directory are
173
preceeded by the parent of a directory, and all the contents of a
174
directory are grouped together.
201
176
return self.inventory.iter_entries_by_dir(
202
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
177
specific_file_ids=specific_file_ids)
204
179
def iter_references(self):
205
if self.supports_tree_reference():
206
for path, entry in self.iter_entries_by_dir():
207
if entry.kind == 'tree-reference':
208
yield path, entry.file_id
180
for path, entry in self.iter_entries_by_dir():
181
if entry.kind == 'tree-reference':
182
yield path, entry.file_id
210
184
def kind(self, file_id):
211
185
raise NotImplementedError("Tree subclass %s must implement kind"
212
186
% self.__class__.__name__)
214
def stored_kind(self, file_id):
215
"""File kind stored for this file_id.
217
May not match kind on disk for working trees. Always available
218
for versioned files, even when the file itself is missing.
220
return self.kind(file_id)
222
188
def path_content_summary(self, path):
223
189
"""Get a summary of the information about path.
225
All the attributes returned are for the canonical form, not the
226
convenient form (if content filters are in use.)
228
191
:param path: A relative path within the tree.
229
192
:return: A tuple containing kind, size, exec, sha1-or-link.
230
193
Kind is always present (see tree.kind()).
231
size is present if kind is file and the size of the
232
canonical form can be cheaply determined, None otherwise.
194
size is present if kind is file, None otherwise.
233
195
exec is None unless kind is file and the platform supports the 'x'
235
197
sha1-or-link is the link target if kind is symlink, or the sha1 if
363
278
raise NotImplementedError(self.get_symlink_target)
365
def get_canonical_inventory_paths(self, paths):
366
"""Like get_canonical_inventory_path() but works on multiple items.
368
:param paths: A sequence of paths relative to the root of the tree.
369
:return: A list of paths, with each item the corresponding input path
370
adjusted to account for existing elements that match case
373
return list(self._yield_canonical_inventory_paths(paths))
375
def get_canonical_inventory_path(self, path):
376
"""Returns the first inventory item that case-insensitively matches path.
378
If a path matches exactly, it is returned. If no path matches exactly
379
but more than one path matches case-insensitively, it is implementation
380
defined which is returned.
382
If no path matches case-insensitively, the input path is returned, but
383
with as many path entries that do exist changed to their canonical
386
If you need to resolve many names from the same tree, you should
387
use get_canonical_inventory_paths() to avoid O(N) behaviour.
389
:param path: A paths relative to the root of the tree.
390
:return: The input path adjusted to account for existing elements
391
that match case insensitively.
393
return self._yield_canonical_inventory_paths([path]).next()
395
def _yield_canonical_inventory_paths(self, paths):
397
# First, if the path as specified exists exactly, just use it.
398
if self.path2id(path) is not None:
402
cur_id = self.get_root_id()
404
bit_iter = iter(path.split("/"))
407
for child in self.iter_children(cur_id):
409
child_base = os.path.basename(self.id2path(child))
410
if child_base.lower() == lelt:
412
cur_path = osutils.pathjoin(cur_path, child_base)
415
# before a change is committed we can see this error...
418
# got to the end of this directory and no entries matched.
419
# Return what matched so far, plus the rest as specified.
420
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
425
280
def get_root_id(self):
426
281
"""Return the file_id for the root of this tree."""
427
282
raise NotImplementedError(self.get_root_id)
429
def annotate_iter(self, file_id,
430
default_revision=_mod_revision.CURRENT_REVISION):
284
def annotate_iter(self, file_id):
431
285
"""Return an iterator of revision_id, line tuples.
433
287
For working trees (and mutable trees in general), the special
434
288
revision_id 'current:' will be used for lines that are new in this
435
289
tree, e.g. uncommitted changes.
436
290
:param file_id: The file to produce an annotated version from
437
:param default_revision: For lines that don't match a basis, mark them
438
with this revision id. Not all implementations will make use of
441
292
raise NotImplementedError(self.annotate_iter)
443
def _get_plan_merge_data(self, file_id, other, base):
444
from bzrlib import versionedfile
294
def plan_file_merge(self, file_id, other):
295
"""Generate a merge plan based on annotations.
297
If the file contains uncommitted changes in this tree, they will be
298
attributed to the 'current:' pseudo-revision. If the file contains
299
uncommitted changes in the other tree, they will be assigned to the
300
'other:' pseudo-revision.
302
from bzrlib import merge, versionedfile
445
303
vf = versionedfile._PlanMergeVersionedFile(file_id)
446
304
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
447
305
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
449
last_revision_base = None
451
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
452
return vf, last_revision_a, last_revision_b, last_revision_base
454
def plan_file_merge(self, file_id, other, base=None):
455
"""Generate a merge plan based on annotations.
457
If the file contains uncommitted changes in this tree, they will be
458
attributed to the 'current:' pseudo-revision. If the file contains
459
uncommitted changes in the other tree, they will be assigned to the
460
'other:' pseudo-revision.
462
data = self._get_plan_merge_data(file_id, other, base)
463
vf, last_revision_a, last_revision_b, last_revision_base = data
464
return vf.plan_merge(last_revision_a, last_revision_b,
467
def plan_file_lca_merge(self, file_id, other, base=None):
468
"""Generate a merge plan based lca-newness.
470
If the file contains uncommitted changes in this tree, they will be
471
attributed to the 'current:' pseudo-revision. If the file contains
472
uncommitted changes in the other tree, they will be assigned to the
473
'other:' pseudo-revision.
475
data = self._get_plan_merge_data(file_id, other, base)
476
vf, last_revision_a, last_revision_b, last_revision_base = data
477
return vf.plan_lca_merge(last_revision_a, last_revision_b,
480
def _iter_parent_trees(self):
481
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
482
for revision_id in self.get_parent_ids():
484
yield self.revision_tree(revision_id)
485
except errors.NoSuchRevisionInTree:
486
yield self.repository.revision_tree(revision_id)
489
def _file_revision(revision_tree, file_id):
490
"""Determine the revision associated with a file in a given tree."""
491
revision_tree.lock_read()
493
return revision_tree.inventory[file_id].revision
495
revision_tree.unlock()
306
return vf.plan_merge(last_revision_a, last_revision_b)
497
308
def _get_file_revision(self, file_id, vf, tree_revision):
498
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
500
if getattr(self, '_repository', None) is None:
309
def file_revision(revision_tree):
310
revision_tree.lock_read()
312
return revision_tree.inventory[file_id].revision
314
revision_tree.unlock()
316
def iter_parent_trees():
317
for revision_id in self.get_parent_ids():
319
yield self.revision_tree(revision_id)
321
yield self.repository.revision_tree(revision_id)
323
if getattr(self, '_get_weave', None) is None:
501
324
last_revision = tree_revision
502
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
503
self._iter_parent_trees()]
504
vf.add_lines((file_id, last_revision), parent_keys,
325
parent_revisions = [file_revision(t) for t in iter_parent_trees()]
326
vf.add_lines(last_revision, parent_revisions,
505
327
self.get_file(file_id).readlines())
506
328
repo = self.branch.repository
329
transaction = repo.get_transaction()
330
base_vf = repo.weave_store.get_weave(file_id, transaction)
509
last_revision = self._file_revision(self, file_id)
510
base_vf = self._repository.texts
511
if base_vf not in vf.fallback_versionedfiles:
512
vf.fallback_versionedfiles.append(base_vf)
332
last_revision = file_revision(self)
333
base_vf = self._get_weave(file_id)
334
vf.fallback_versionedfiles.append(base_vf)
513
335
return last_revision
515
337
inventory = property(_get_inventory,
636
458
raise NotImplementedError(self.walkdirs)
638
def supports_content_filtering(self):
461
class EmptyTree(Tree):
464
self._inventory = Inventory(root_id=None)
465
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
466
' use repository.revision_tree instead.',
467
DeprecationWarning, stacklevel=2)
469
def get_parent_ids(self):
472
def get_symlink_target(self, file_id):
475
def has_filename(self, filename):
641
def _content_filter_stack(self, path=None, file_id=None):
642
"""The stack of content filters for a path if filtering is supported.
644
Readers will be applied in first-to-last order.
645
Writers will be applied in last-to-first order.
646
Either the path or the file-id needs to be provided.
648
:param path: path relative to the root of the tree
650
:param file_id: file_id or None if unknown
651
:return: the list of filters - [] if there are none
653
filter_pref_names = filters._get_registered_names()
654
if len(filter_pref_names) == 0:
657
path = self.id2path(file_id)
658
prefs = self.iter_search_rules([path], filter_pref_names).next()
659
stk = filters._get_filter_stack_for(prefs)
660
if 'filters' in debug.debug_flags:
661
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
664
def _content_filter_stack_provider(self):
665
"""A function that returns a stack of ContentFilters.
667
The function takes a path (relative to the top of the tree) and a
668
file-id as parameters.
670
:return: None if content filtering is not supported by this tree.
672
if self.supports_content_filtering():
673
return lambda path, file_id: \
674
self._content_filter_stack(path, file_id)
678
def iter_search_rules(self, path_names, pref_names=None,
679
_default_searcher=None):
680
"""Find the preferences for filenames in a tree.
682
:param path_names: an iterable of paths to find attributes for.
683
Paths are given relative to the root of the tree.
684
:param pref_names: the list of preferences to lookup - None for all
685
:param _default_searcher: private parameter to assist testing - don't use
686
:return: an iterator of tuple sequences, one per path-name.
687
See _RulesSearcher.get_items for details on the tuple sequence.
689
if _default_searcher is None:
690
_default_searcher = rules._per_user_searcher
691
searcher = self._get_rules_searcher(_default_searcher)
692
if searcher is not None:
693
if pref_names is not None:
694
for path in path_names:
695
yield searcher.get_selected_items(path, pref_names)
697
for path in path_names:
698
yield searcher.get_items(path)
701
def _get_rules_searcher(self, default_searcher):
702
"""Get the RulesSearcher for this tree given the default one."""
703
searcher = default_searcher
478
def kind(self, file_id):
479
assert self._inventory[file_id].kind == "directory"
482
def list_files(self, include_root=False):
485
def __contains__(self, file_id):
486
return (file_id in self._inventory)
488
def get_file_sha1(self, file_id, path=None, stat_value=None):
707
492
######################################################################
857
def _changes_from_entries(self, source_entry, target_entry,
858
source_path=None, target_path=None):
859
"""Generate a iter_changes tuple between source_entry and target_entry.
861
:param source_entry: An inventory entry from self.source, or None.
862
:param target_entry: An inventory entry from self.target, or None.
863
:param source_path: The path of source_entry, if known. If not known
864
it will be looked up.
865
:param target_path: The path of target_entry, if known. If not known
866
it will be looked up.
867
:return: A tuple, item 0 of which is an iter_changes result tuple, and
868
item 1 is True if there are any changes in the result tuple.
870
if source_entry is None:
871
if target_entry is None:
873
file_id = target_entry.file_id
875
file_id = source_entry.file_id
876
if source_entry is not None:
877
source_versioned = True
878
source_name = source_entry.name
879
source_parent = source_entry.parent_id
880
if source_path is None:
881
source_path = self.source.id2path(file_id)
882
source_kind, source_executable, source_stat = \
883
self.source._comparison_data(source_entry, source_path)
885
source_versioned = False
889
source_executable = None
890
if target_entry is not None:
891
target_versioned = True
892
target_name = target_entry.name
893
target_parent = target_entry.parent_id
894
if target_path is None:
895
target_path = self.target.id2path(file_id)
896
target_kind, target_executable, target_stat = \
897
self.target._comparison_data(target_entry, target_path)
899
target_versioned = False
903
target_executable = None
904
versioned = (source_versioned, target_versioned)
905
kind = (source_kind, target_kind)
906
changed_content = False
907
if source_kind != target_kind:
908
changed_content = True
909
elif source_kind == 'file':
910
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
911
self.target.get_file_sha1(file_id, target_path, target_stat)):
912
changed_content = True
913
elif source_kind == 'symlink':
914
if (self.source.get_symlink_target(file_id) !=
915
self.target.get_symlink_target(file_id)):
916
changed_content = True
917
# XXX: Yes, the indentation below is wrong. But fixing it broke
918
# test_merge.TestMergerEntriesLCAOnDisk.
919
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
920
# the fix from bzr.dev -- vila 2009026
921
elif source_kind == 'tree-reference':
922
if (self.source.get_reference_revision(file_id, source_path)
923
!= self.target.get_reference_revision(file_id, target_path)):
924
changed_content = True
925
parent = (source_parent, target_parent)
926
name = (source_name, target_name)
927
executable = (source_executable, target_executable)
928
if (changed_content is not False or versioned[0] != versioned[1]
929
or parent[0] != parent[1] or name[0] != name[1] or
930
executable[0] != executable[1]):
934
return (file_id, (source_path, target_path), changed_content,
935
versioned, parent, name, kind, executable), changes
938
645
def compare(self, want_unchanged=False, specific_files=None,
939
646
extra_trees=None, require_versioned=False, include_root=False,
1042
732
all_unversioned = deque()
1044
from_entries_by_dir = list(self.source.iter_entries_by_dir(
734
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1045
735
specific_file_ids=specific_file_ids))
1046
736
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1047
to_entries_by_dir = list(self.target.iter_entries_by_dir(
737
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1048
738
specific_file_ids=specific_file_ids))
1049
739
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1051
# the unversioned path lookup only occurs on real trees - where there
741
# the unversioned path lookup only occurs on real trees - where there
1052
742
# can be extras. So the fake_entry is solely used to look up
1053
743
# executable it values when execute is not supported.
1054
744
fake_entry = InventoryFile('unused', 'unused', 'unused')
1055
for target_path, target_entry in to_entries_by_dir:
1056
while (all_unversioned and
1057
all_unversioned[0][0] < target_path.split('/')):
745
for to_path, to_entry in to_entries_by_dir:
746
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1058
747
unversioned_path = all_unversioned.popleft()
1059
target_kind, target_executable, target_stat = \
748
to_kind, to_executable, to_stat = \
1060
749
self.target._comparison_data(fake_entry, unversioned_path[1])
1061
750
yield (None, (None, unversioned_path[1]), True, (False, False),
1063
752
(None, unversioned_path[0][-1]),
1064
(None, target_kind),
1065
(None, target_executable))
1066
source_path, source_entry = from_data.get(target_entry.file_id,
1068
result, changes = self._changes_from_entries(source_entry,
1069
target_entry, source_path=source_path, target_path=target_path)
1070
to_paths[result[0]] = result[1][1]
754
(None, to_executable))
755
file_id = to_entry.file_id
756
to_paths[file_id] = to_path
1071
757
entry_count += 1
758
changed_content = False
759
from_path, from_entry = from_data.get(file_id, (None, None))
760
from_versioned = (from_entry is not None)
761
if from_entry is not None:
762
from_versioned = True
763
from_name = from_entry.name
764
from_parent = from_entry.parent_id
765
from_kind, from_executable, from_stat = \
766
self.source._comparison_data(from_entry, from_path)
1073
767
entry_count += 1
769
from_versioned = False
773
from_executable = None
774
versioned = (from_versioned, True)
775
to_kind, to_executable, to_stat = \
776
self.target._comparison_data(to_entry, to_path)
777
kind = (from_kind, to_kind)
778
if kind[0] != kind[1]:
779
changed_content = True
780
elif from_kind == 'file':
781
from_size = self.source._file_size(from_entry, from_stat)
782
to_size = self.target._file_size(to_entry, to_stat)
783
if from_size != to_size:
784
changed_content = True
785
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
786
self.target.get_file_sha1(file_id, to_path, to_stat)):
787
changed_content = True
788
elif from_kind == 'symlink':
789
if (self.source.get_symlink_target(file_id) !=
790
self.target.get_symlink_target(file_id)):
791
changed_content = True
792
elif from_kind == 'tree-reference':
793
if (self.source.get_reference_revision(file_id, from_path)
794
!= self.target.get_reference_revision(file_id, to_path)):
795
changed_content = True
796
parent = (from_parent, to_entry.parent_id)
797
name = (from_name, to_entry.name)
798
executable = (from_executable, to_executable)
1074
799
if pb is not None:
1075
800
pb.update('comparing files', entry_count, num_entries)
1076
if changes or include_unchanged:
1077
if specific_file_ids is not None:
1078
new_parent_id = result[4][1]
1079
precise_file_ids.add(new_parent_id)
1080
changed_file_ids.append(result[0])
1082
# Ensure correct behaviour for reparented/added specific files.
1083
if specific_files is not None:
1084
# Record output dirs
1085
if result[6][1] == 'directory':
1086
seen_dirs.add(result[0])
1087
# Record parents of reparented/added entries.
1088
versioned = result[3]
1090
if not versioned[0] or parents[0] != parents[1]:
1091
seen_parents.add(parents[1])
801
if (changed_content is not False or versioned[0] != versioned[1]
802
or parent[0] != parent[1] or name[0] != name[1] or
803
executable[0] != executable[1] or include_unchanged):
804
yield (file_id, (from_path, to_path), changed_content,
805
versioned, parent, name, kind, executable)
1092
807
while all_unversioned:
1093
808
# yield any trailing unversioned paths
1094
809
unversioned_path = all_unversioned.popleft()
1121
848
self.source._comparison_data(from_entry, path)
1122
849
kind = (from_kind, None)
1123
850
executable = (from_executable, None)
1124
changed_content = from_kind is not None
851
changed_content = True
1125
852
# the parent's path is necessarily known at this point.
1126
changed_file_ids.append(file_id)
1127
853
yield(file_id, (path, to_path), changed_content, versioned, parent,
1128
854
name, kind, executable)
1129
changed_file_ids = set(changed_file_ids)
1130
if specific_file_ids is not None:
1131
for result in self._handle_precise_ids(precise_file_ids,
1135
def _get_entry(self, tree, file_id):
1136
"""Get an inventory entry from a tree, with missing entries as None.
1138
If the tree raises NotImplementedError on accessing .inventory, then
1139
this is worked around using iter_entries_by_dir on just the file id
1142
:param tree: The tree to lookup the entry in.
1143
:param file_id: The file_id to lookup.
1146
inventory = tree.inventory
1147
except NotImplementedError:
1148
# No inventory available.
1150
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1151
return iterator.next()[1]
1152
except StopIteration:
1156
return inventory[file_id]
1157
except errors.NoSuchId:
1160
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1161
discarded_changes=None):
1162
"""Fill out a partial iter_changes to be consistent.
1164
:param precise_file_ids: The file ids of parents that were seen during
1166
:param changed_file_ids: The file ids of already emitted items.
1167
:param discarded_changes: An optional dict of precalculated
1168
iter_changes items which the partial iter_changes had not output
1170
:return: A generator of iter_changes items to output.
1172
# process parents of things that had changed under the users
1173
# requested paths to prevent incorrect paths or parent ids which
1174
# aren't in the tree.
1175
while precise_file_ids:
1176
precise_file_ids.discard(None)
1177
# Don't emit file_ids twice
1178
precise_file_ids.difference_update(changed_file_ids)
1179
if not precise_file_ids:
1181
# If the there was something at a given output path in source, we
1182
# have to include the entry from source in the delta, or we would
1183
# be putting this entry into a used path.
1185
for parent_id in precise_file_ids:
1187
paths.append(self.target.id2path(parent_id))
1188
except errors.NoSuchId:
1189
# This id has been dragged in from the source by delta
1190
# expansion and isn't present in target at all: we don't
1191
# need to check for path collisions on it.
1194
old_id = self.source.path2id(path)
1195
precise_file_ids.add(old_id)
1196
precise_file_ids.discard(None)
1197
current_ids = precise_file_ids
1198
precise_file_ids = set()
1199
# We have to emit all of precise_file_ids that have been altered.
1200
# We may have to output the children of some of those ids if any
1201
# directories have stopped being directories.
1202
for file_id in current_ids:
1204
if discarded_changes:
1205
result = discarded_changes.get(file_id)
1210
old_entry = self._get_entry(self.source, file_id)
1211
new_entry = self._get_entry(self.target, file_id)
1212
result, changes = self._changes_from_entries(
1213
old_entry, new_entry)
1216
# Get this parents parent to examine.
1217
new_parent_id = result[4][1]
1218
precise_file_ids.add(new_parent_id)
1220
if (result[6][0] == 'directory' and
1221
result[6][1] != 'directory'):
1222
# This stopped being a directory, the old children have
1224
if old_entry is None:
1225
# Reusing a discarded change.
1226
old_entry = self._get_entry(self.source, file_id)
1227
for child in old_entry.children.values():
1228
precise_file_ids.add(child.file_id)
1229
changed_file_ids.add(result[0])
1233
class MultiWalker(object):
1234
"""Walk multiple trees simultaneously, getting combined results."""
1236
# Note: This could be written to not assume you can do out-of-order
1237
# lookups. Instead any nodes that don't match in all trees could be
1238
# marked as 'deferred', and then returned in the final cleanup loop.
1239
# For now, I think it is "nicer" to return things as close to the
1240
# "master_tree" order as we can.
1242
def __init__(self, master_tree, other_trees):
1243
"""Create a new MultiWalker.
1245
All trees being walked must implement "iter_entries_by_dir()", such
1246
that they yield (path, object) tuples, where that object will have a
1247
'.file_id' member, that can be used to check equality.
1249
:param master_tree: All trees will be 'slaved' to the master_tree such
1250
that nodes in master_tree will be used as 'first-pass' sync points.
1251
Any nodes that aren't in master_tree will be merged in a second
1253
:param other_trees: A list of other trees to walk simultaneously.
1255
self._master_tree = master_tree
1256
self._other_trees = other_trees
1258
# Keep track of any nodes that were properly processed just out of
1259
# order, that way we don't return them at the end, we don't have to
1260
# track *all* processed file_ids, just the out-of-order ones
1261
self._out_of_order_processed = set()
1264
def _step_one(iterator):
1265
"""Step an iter_entries_by_dir iterator.
1267
:return: (has_more, path, ie)
1268
If has_more is False, path and ie will be None.
1271
path, ie = iterator.next()
1272
except StopIteration:
1273
return False, None, None
1275
return True, path, ie
1278
def _cmp_path_by_dirblock(path1, path2):
1279
"""Compare two paths based on what directory they are in.
1281
This generates a sort order, such that all children of a directory are
1282
sorted together, and grandchildren are in the same order as the
1283
children appear. But all grandchildren come after all children.
1285
:param path1: first path
1286
:param path2: the second path
1287
:return: negative number if ``path1`` comes first,
1288
0 if paths are equal
1289
and a positive number if ``path2`` sorts first
1291
# Shortcut this special case
1294
# This is stolen from _dirstate_helpers_py.py, only switching it to
1295
# Unicode objects. Consider using encode_utf8() and then using the
1296
# optimized versions, or maybe writing optimized unicode versions.
1297
if not isinstance(path1, unicode):
1298
raise TypeError("'path1' must be a unicode string, not %s: %r"
1299
% (type(path1), path1))
1300
if not isinstance(path2, unicode):
1301
raise TypeError("'path2' must be a unicode string, not %s: %r"
1302
% (type(path2), path2))
1303
return cmp(MultiWalker._path_to_key(path1),
1304
MultiWalker._path_to_key(path2))
1307
def _path_to_key(path):
1308
dirname, basename = osutils.split(path)
1309
return (dirname.split(u'/'), basename)
1311
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1312
"""Lookup an inventory entry by file_id.
1314
This is called when an entry is missing in the normal order.
1315
Generally this is because a file was either renamed, or it was
1316
deleted/added. If the entry was found in the inventory and not in
1317
extra_entries, it will be added to self._out_of_order_processed
1319
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1320
should be filled with entries that were found before they were
1321
used. If file_id is present, it will be removed from the
1323
:param other_tree: The Tree to search, in case we didn't find the entry
1325
:param file_id: The file_id to look for
1326
:return: (path, ie) if found or (None, None) if not present.
1328
if file_id in extra_entries:
1329
return extra_entries.pop(file_id)
1330
# TODO: Is id2path better as the first call, or is
1331
# inventory[file_id] better as a first check?
1333
cur_path = other_tree.id2path(file_id)
1334
except errors.NoSuchId:
1336
if cur_path is None:
1339
self._out_of_order_processed.add(file_id)
1340
cur_ie = other_tree.inventory[file_id]
1341
return (cur_path, cur_ie)
1344
"""Match up the values in the different trees."""
1345
for result in self._walk_master_tree():
1347
self._finish_others()
1348
for result in self._walk_others():
1351
def _walk_master_tree(self):
1352
"""First pass, walk all trees in lock-step.
1354
When we are done, all nodes in the master_tree will have been
1355
processed. _other_walkers, _other_entries, and _others_extra will be
1356
set on 'self' for future processing.
1358
# This iterator has the most "inlining" done, because it tends to touch
1359
# every file in the tree, while the others only hit nodes that don't
1361
master_iterator = self._master_tree.iter_entries_by_dir()
1363
other_walkers = [other.iter_entries_by_dir()
1364
for other in self._other_trees]
1365
other_entries = [self._step_one(walker) for walker in other_walkers]
1366
# Track extra nodes in the other trees
1367
others_extra = [{} for i in xrange(len(self._other_trees))]
1369
master_has_more = True
1370
step_one = self._step_one
1371
lookup_by_file_id = self._lookup_by_file_id
1372
out_of_order_processed = self._out_of_order_processed
1374
while master_has_more:
1375
(master_has_more, path, master_ie) = step_one(master_iterator)
1376
if not master_has_more:
1379
file_id = master_ie.file_id
1381
other_values_append = other_values.append
1382
next_other_entries = []
1383
next_other_entries_append = next_other_entries.append
1384
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1385
if not other_has_more:
1386
other_values_append(lookup_by_file_id(
1387
others_extra[idx], self._other_trees[idx], file_id))
1388
next_other_entries_append((False, None, None))
1389
elif file_id == other_ie.file_id:
1390
# This is the critical code path, as most of the entries
1391
# should match between most trees.
1392
other_values_append((other_path, other_ie))
1393
next_other_entries_append(step_one(other_walkers[idx]))
1395
# This walker did not match, step it until it either
1396
# matches, or we know we are past the current walker.
1397
other_walker = other_walkers[idx]
1398
other_extra = others_extra[idx]
1399
while (other_has_more and
1400
self._cmp_path_by_dirblock(other_path, path) < 0):
1401
other_file_id = other_ie.file_id
1402
if other_file_id not in out_of_order_processed:
1403
other_extra[other_file_id] = (other_path, other_ie)
1404
other_has_more, other_path, other_ie = \
1405
step_one(other_walker)
1406
if other_has_more and other_ie.file_id == file_id:
1407
# We ended up walking to this point, match and step
1409
other_values_append((other_path, other_ie))
1410
other_has_more, other_path, other_ie = \
1411
step_one(other_walker)
1413
# This record isn't in the normal order, see if it
1415
other_values_append(lookup_by_file_id(
1416
other_extra, self._other_trees[idx], file_id))
1417
next_other_entries_append((other_has_more, other_path,
1419
other_entries = next_other_entries
1421
# We've matched all the walkers, yield this datapoint
1422
yield path, file_id, master_ie, other_values
1423
self._other_walkers = other_walkers
1424
self._other_entries = other_entries
1425
self._others_extra = others_extra
1427
def _finish_others(self):
1428
"""Finish walking the other iterators, so we get all entries."""
1429
for idx, info in enumerate(self._other_entries):
1430
other_extra = self._others_extra[idx]
1431
(other_has_more, other_path, other_ie) = info
1432
while other_has_more:
1433
other_file_id = other_ie.file_id
1434
if other_file_id not in self._out_of_order_processed:
1435
other_extra[other_file_id] = (other_path, other_ie)
1436
other_has_more, other_path, other_ie = \
1437
self._step_one(self._other_walkers[idx])
1438
del self._other_entries
1440
def _walk_others(self):
1441
"""Finish up by walking all the 'deferred' nodes."""
1442
# TODO: One alternative would be to grab all possible unprocessed
1443
# file_ids, and then sort by path, and then yield them. That
1444
# might ensure better ordering, in case a caller strictly
1445
# requires parents before children.
1446
for idx, other_extra in enumerate(self._others_extra):
1447
others = sorted(other_extra.itervalues(),
1448
key=lambda x: self._path_to_key(x[0]))
1449
for other_path, other_ie in others:
1450
file_id = other_ie.file_id
1451
# We don't need to check out_of_order_processed here, because
1452
# the lookup_by_file_id will be removing anything processed
1453
# from the extras cache
1454
other_extra.pop(file_id)
1455
other_values = [(None, None) for i in xrange(idx)]
1456
other_values.append((other_path, other_ie))
1457
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1458
alt_idx = alt_idx + idx + 1
1459
alt_extra = self._others_extra[alt_idx]
1460
alt_tree = self._other_trees[alt_idx]
1461
other_values.append(self._lookup_by_file_id(
1462
alt_extra, alt_tree, file_id))
1463
yield other_path, file_id, None, other_values
857
# This was deprecated before 0.12, but did not have an official warning
858
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
859
def RevisionTree(*args, **kwargs):
860
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
862
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
865
from bzrlib.revisiontree import RevisionTree as _RevisionTree
866
return _RevisionTree(*args, **kwargs)