171
167
return self.bzrdir.is_control_filename(filename)
174
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
170
def iter_entries_by_dir(self, specific_file_ids=None):
175
171
"""Walk the tree in 'by_dir' order.
177
This will yield each entry in the tree as a (path, entry) tuple.
178
The order that they are yielded is:
180
Directories are walked in a depth-first lexicographical order,
181
however, whenever a directory is reached, all of its direct child
182
nodes are yielded in lexicographical order before yielding the
185
For example, in the tree::
195
The yield order (ignoring root) would be::
196
a, f, a/b, a/d, a/b/c, a/d/e, f/g
198
:param yield_parents: If True, yield the parents from the root leading
199
down to specific_file_ids that have been requested. This has no
200
impact if specific_file_ids is None.
173
This will yield each entry in the tree as a (path, entry) tuple. The
174
order that they are yielded is: the contents of a directory are
175
preceeded by the parent of a directory, and all the contents of a
176
directory are grouped together.
202
178
return self.inventory.iter_entries_by_dir(
203
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
179
specific_file_ids=specific_file_ids)
205
181
def iter_references(self):
206
if self.supports_tree_reference():
207
for path, entry in self.iter_entries_by_dir():
208
if entry.kind == 'tree-reference':
209
yield path, entry.file_id
182
for path, entry in self.iter_entries_by_dir():
183
if entry.kind == 'tree-reference':
184
yield path, entry.file_id
211
186
def kind(self, file_id):
212
187
raise NotImplementedError("Tree subclass %s must implement kind"
213
188
% self.__class__.__name__)
215
def stored_kind(self, file_id):
216
"""File kind stored for this file_id.
218
May not match kind on disk for working trees. Always available
219
for versioned files, even when the file itself is missing.
221
return self.kind(file_id)
223
def path_content_summary(self, path):
224
"""Get a summary of the information about path.
226
All the attributes returned are for the canonical form, not the
227
convenient form (if content filters are in use.)
229
:param path: A relative path within the tree.
230
:return: A tuple containing kind, size, exec, sha1-or-link.
231
Kind is always present (see tree.kind()).
232
size is present if kind is file and the size of the
233
canonical form can be cheaply determined, None otherwise.
234
exec is None unless kind is file and the platform supports the 'x'
236
sha1-or-link is the link target if kind is symlink, or the sha1 if
237
it can be obtained without reading the file.
239
raise NotImplementedError(self.path_content_summary)
241
190
def get_reference_revision(self, file_id, path=None):
242
191
raise NotImplementedError("Tree subclass %s must implement "
243
192
"get_reference_revision"
364
236
raise NotImplementedError(self.get_symlink_target)
366
def get_canonical_inventory_paths(self, paths):
367
"""Like get_canonical_inventory_path() but works on multiple items.
369
:param paths: A sequence of paths relative to the root of the tree.
370
:return: A list of paths, with each item the corresponding input path
371
adjusted to account for existing elements that match case
374
return list(self._yield_canonical_inventory_paths(paths))
376
def get_canonical_inventory_path(self, path):
377
"""Returns the first inventory item that case-insensitively matches path.
379
If a path matches exactly, it is returned. If no path matches exactly
380
but more than one path matches case-insensitively, it is implementation
381
defined which is returned.
383
If no path matches case-insensitively, the input path is returned, but
384
with as many path entries that do exist changed to their canonical
387
If you need to resolve many names from the same tree, you should
388
use get_canonical_inventory_paths() to avoid O(N) behaviour.
390
:param path: A paths relative to the root of the tree.
391
:return: The input path adjusted to account for existing elements
392
that match case insensitively.
394
return self._yield_canonical_inventory_paths([path]).next()
396
def _yield_canonical_inventory_paths(self, paths):
398
# First, if the path as specified exists exactly, just use it.
399
if self.path2id(path) is not None:
403
cur_id = self.get_root_id()
405
bit_iter = iter(path.split("/"))
409
for child in self.iter_children(cur_id):
411
# XXX: it seem like if the child is known to be in the
412
# tree, we shouldn't need to go from its id back to
413
# its path -- mbp 2010-02-11
415
# XXX: it seems like we could be more efficient
416
# by just directly looking up the original name and
417
# only then searching all children; also by not
418
# chopping paths so much. -- mbp 2010-02-11
419
child_base = os.path.basename(self.id2path(child))
420
if (child_base == elt):
421
# if we found an exact match, we can stop now; if
422
# we found an approximate match we need to keep
423
# searching because there might be an exact match
426
new_path = osutils.pathjoin(cur_path, child_base)
428
elif child_base.lower() == lelt:
430
new_path = osutils.pathjoin(cur_path, child_base)
432
# before a change is committed we can see this error...
437
# got to the end of this directory and no entries matched.
438
# Return what matched so far, plus the rest as specified.
439
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
444
def get_root_id(self):
445
"""Return the file_id for the root of this tree."""
446
raise NotImplementedError(self.get_root_id)
448
def annotate_iter(self, file_id,
449
default_revision=_mod_revision.CURRENT_REVISION):
450
"""Return an iterator of revision_id, line tuples.
238
def annotate_iter(self, file_id):
239
"""Return an iterator of revision_id, line tuples
452
241
For working trees (and mutable trees in general), the special
453
242
revision_id 'current:' will be used for lines that are new in this
454
243
tree, e.g. uncommitted changes.
455
244
:param file_id: The file to produce an annotated version from
456
:param default_revision: For lines that don't match a basis, mark them
457
with this revision id. Not all implementations will make use of
460
246
raise NotImplementedError(self.annotate_iter)
462
def _get_plan_merge_data(self, file_id, other, base):
463
from bzrlib import versionedfile
464
vf = versionedfile._PlanMergeVersionedFile(file_id)
465
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
466
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
468
last_revision_base = None
470
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
471
return vf, last_revision_a, last_revision_b, last_revision_base
473
def plan_file_merge(self, file_id, other, base=None):
474
"""Generate a merge plan based on annotations.
476
If the file contains uncommitted changes in this tree, they will be
477
attributed to the 'current:' pseudo-revision. If the file contains
478
uncommitted changes in the other tree, they will be assigned to the
479
'other:' pseudo-revision.
481
data = self._get_plan_merge_data(file_id, other, base)
482
vf, last_revision_a, last_revision_b, last_revision_base = data
483
return vf.plan_merge(last_revision_a, last_revision_b,
486
def plan_file_lca_merge(self, file_id, other, base=None):
487
"""Generate a merge plan based lca-newness.
489
If the file contains uncommitted changes in this tree, they will be
490
attributed to the 'current:' pseudo-revision. If the file contains
491
uncommitted changes in the other tree, they will be assigned to the
492
'other:' pseudo-revision.
494
data = self._get_plan_merge_data(file_id, other, base)
495
vf, last_revision_a, last_revision_b, last_revision_base = data
496
return vf.plan_lca_merge(last_revision_a, last_revision_b,
499
def _iter_parent_trees(self):
500
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
501
for revision_id in self.get_parent_ids():
503
yield self.revision_tree(revision_id)
504
except errors.NoSuchRevisionInTree:
505
yield self.repository.revision_tree(revision_id)
508
def _file_revision(revision_tree, file_id):
509
"""Determine the revision associated with a file in a given tree."""
510
revision_tree.lock_read()
512
return revision_tree.inventory[file_id].revision
514
revision_tree.unlock()
516
def _get_file_revision(self, file_id, vf, tree_revision):
517
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
519
if getattr(self, '_repository', None) is None:
520
last_revision = tree_revision
521
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
522
self._iter_parent_trees()]
523
vf.add_lines((file_id, last_revision), parent_keys,
524
self.get_file(file_id).readlines())
525
repo = self.branch.repository
528
last_revision = self._file_revision(self, file_id)
529
base_vf = self._repository.texts
530
if base_vf not in vf.fallback_versionedfiles:
531
vf.fallback_versionedfiles.append(base_vf)
248
def plan_file_merge(self, file_id, other):
249
"""Generate a merge plan based on annotations
251
If the file contains uncommitted changes in this tree, they will be
252
attributed to the 'current:' pseudo-revision. If the file contains
253
uncommitted changes in the other tree, they will be assigned to the
254
'other:' pseudo-revision.
256
from bzrlib import merge
257
annotated_a = list(self.annotate_iter(file_id,
258
_mod_revision.CURRENT_REVISION))
259
annotated_b = list(other.annotate_iter(file_id, 'other:'))
260
ancestors_a = self._get_ancestors(_mod_revision.CURRENT_REVISION)
261
ancestors_b = other._get_ancestors('other:')
262
return merge._plan_annotate_merge(annotated_a, annotated_b,
263
ancestors_a, ancestors_b)
534
265
inventory = property(_get_inventory,
535
266
doc="Inventory of this Tree")
537
268
def _check_retrieved(self, ie, f):
538
269
if not __debug__:
540
271
fp = fingerprint_file(f)
543
274
if ie.text_size is not None:
544
275
if ie.text_size != fp['size']:
545
276
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
655
387
raise NotImplementedError(self.walkdirs)
657
def supports_content_filtering(self):
390
class EmptyTree(Tree):
393
self._inventory = Inventory(root_id=None)
394
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
395
' use repository.revision_tree instead.',
396
DeprecationWarning, stacklevel=2)
398
def get_parent_ids(self):
401
def get_symlink_target(self, file_id):
404
def has_filename(self, filename):
660
def _content_filter_stack(self, path=None, file_id=None):
661
"""The stack of content filters for a path if filtering is supported.
663
Readers will be applied in first-to-last order.
664
Writers will be applied in last-to-first order.
665
Either the path or the file-id needs to be provided.
667
:param path: path relative to the root of the tree
669
:param file_id: file_id or None if unknown
670
:return: the list of filters - [] if there are none
672
filter_pref_names = filters._get_registered_names()
673
if len(filter_pref_names) == 0:
676
path = self.id2path(file_id)
677
prefs = self.iter_search_rules([path], filter_pref_names).next()
678
stk = filters._get_filter_stack_for(prefs)
679
if 'filters' in debug.debug_flags:
680
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
683
def _content_filter_stack_provider(self):
684
"""A function that returns a stack of ContentFilters.
686
The function takes a path (relative to the top of the tree) and a
687
file-id as parameters.
689
:return: None if content filtering is not supported by this tree.
691
if self.supports_content_filtering():
692
return lambda path, file_id: \
693
self._content_filter_stack(path, file_id)
697
def iter_search_rules(self, path_names, pref_names=None,
698
_default_searcher=None):
699
"""Find the preferences for filenames in a tree.
701
:param path_names: an iterable of paths to find attributes for.
702
Paths are given relative to the root of the tree.
703
:param pref_names: the list of preferences to lookup - None for all
704
:param _default_searcher: private parameter to assist testing - don't use
705
:return: an iterator of tuple sequences, one per path-name.
706
See _RulesSearcher.get_items for details on the tuple sequence.
708
if _default_searcher is None:
709
_default_searcher = rules._per_user_searcher
710
searcher = self._get_rules_searcher(_default_searcher)
711
if searcher is not None:
712
if pref_names is not None:
713
for path in path_names:
714
yield searcher.get_selected_items(path, pref_names)
716
for path in path_names:
717
yield searcher.get_items(path)
719
def _get_rules_searcher(self, default_searcher):
720
"""Get the RulesSearcher for this tree given the default one."""
721
searcher = default_searcher
407
def kind(self, file_id):
408
file_id = osutils.safe_file_id(file_id)
409
assert self._inventory[file_id].kind == "directory"
412
def list_files(self, include_root=False):
415
def __contains__(self, file_id):
416
file_id = osutils.safe_file_id(file_id)
417
return (file_id in self._inventory)
419
def get_file_sha1(self, file_id, path=None, stat_value=None):
725
423
######################################################################
865
565
Its instances have methods like 'compare' and contain references to the
866
566
source and target trees these operations are to be carried out on.
868
Clients of bzrlib should not need to use InterTree directly, rather they
568
clients of bzrlib should not need to use InterTree directly, rather they
869
569
should use the convenience methods on Tree such as 'Tree.compare()' which
870
570
will pass through to InterTree as appropriate.
873
# Formats that will be used to test this InterTree. If both are
874
# None, this InterTree will not be tested (e.g. because a complex
876
_matching_from_tree_format = None
877
_matching_to_tree_format = None
881
def _changes_from_entries(self, source_entry, target_entry,
882
source_path=None, target_path=None):
883
"""Generate a iter_changes tuple between source_entry and target_entry.
885
:param source_entry: An inventory entry from self.source, or None.
886
:param target_entry: An inventory entry from self.target, or None.
887
:param source_path: The path of source_entry, if known. If not known
888
it will be looked up.
889
:param target_path: The path of target_entry, if known. If not known
890
it will be looked up.
891
:return: A tuple, item 0 of which is an iter_changes result tuple, and
892
item 1 is True if there are any changes in the result tuple.
894
if source_entry is None:
895
if target_entry is None:
897
file_id = target_entry.file_id
899
file_id = source_entry.file_id
900
if source_entry is not None:
901
source_versioned = True
902
source_name = source_entry.name
903
source_parent = source_entry.parent_id
904
if source_path is None:
905
source_path = self.source.id2path(file_id)
906
source_kind, source_executable, source_stat = \
907
self.source._comparison_data(source_entry, source_path)
909
source_versioned = False
913
source_executable = None
914
if target_entry is not None:
915
target_versioned = True
916
target_name = target_entry.name
917
target_parent = target_entry.parent_id
918
if target_path is None:
919
target_path = self.target.id2path(file_id)
920
target_kind, target_executable, target_stat = \
921
self.target._comparison_data(target_entry, target_path)
923
target_versioned = False
927
target_executable = None
928
versioned = (source_versioned, target_versioned)
929
kind = (source_kind, target_kind)
930
changed_content = False
931
if source_kind != target_kind:
932
changed_content = True
933
elif source_kind == 'file':
934
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
935
self.target.get_file_sha1(file_id, target_path, target_stat)):
936
changed_content = True
937
elif source_kind == 'symlink':
938
if (self.source.get_symlink_target(file_id) !=
939
self.target.get_symlink_target(file_id)):
940
changed_content = True
941
# XXX: Yes, the indentation below is wrong. But fixing it broke
942
# test_merge.TestMergerEntriesLCAOnDisk.
943
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
944
# the fix from bzr.dev -- vila 2009026
945
elif source_kind == 'tree-reference':
946
if (self.source.get_reference_revision(file_id, source_path)
947
!= self.target.get_reference_revision(file_id, target_path)):
948
changed_content = True
949
parent = (source_parent, target_parent)
950
name = (source_name, target_name)
951
executable = (source_executable, target_executable)
952
if (changed_content is not False or versioned[0] != versioned[1]
953
or parent[0] != parent[1] or name[0] != name[1] or
954
executable[0] != executable[1]):
958
return (file_id, (source_path, target_path), changed_content,
959
versioned, parent, name, kind, executable), changes
962
576
def compare(self, want_unchanged=False, specific_files=None,
963
577
extra_trees=None, require_versioned=False, include_root=False,
1024
640
:param require_versioned: Raise errors.PathsNotVersionedError if a
1025
641
path in the specific_files list is not versioned in one of
1026
642
source, target or extra_trees.
1027
:param specific_files: An optional list of file paths to restrict the
1028
comparison to. When mapping filenames to ids, all matches in all
1029
trees (including optional extra_trees) are used, and all children
1030
of matched directories are included. The parents in the target tree
1031
of the specific files up to and including the root of the tree are
1032
always evaluated for changes too.
1033
643
:param want_unversioned: Should unversioned files be returned in the
1034
644
output. An unversioned file is defined as one with (False, False)
1035
645
for the versioned pair.
1037
648
lookup_trees = [self.source]
1039
650
lookup_trees.extend(extra_trees)
1040
# The ids of items we need to examine to insure delta consistency.
1041
precise_file_ids = set()
1042
changed_file_ids = []
1043
if specific_files == []:
1044
specific_file_ids = []
1046
specific_file_ids = self.target.paths2ids(specific_files,
1047
lookup_trees, require_versioned=require_versioned)
1048
if specific_files is not None:
1049
# reparented or added entries must have their parents included
1050
# so that valid deltas can be created. The seen_parents set
1051
# tracks the parents that we need to have.
1052
# The seen_dirs set tracks directory entries we've yielded.
1053
# After outputting version object in to_entries we set difference
1054
# the two seen sets and start checking parents.
1055
seen_parents = set()
651
specific_file_ids = self.target.paths2ids(specific_files,
652
lookup_trees, require_versioned=require_versioned)
1057
653
if want_unversioned:
1058
all_unversioned = sorted([(p.split('/'), p) for p in
1059
self.target.extras()
1060
if specific_files is None or
654
all_unversioned = sorted([(p.split('/'), p) for p in self.target.extras()
655
if not specific_files or
1061
656
osutils.is_inside_any(specific_files, p)])
1062
657
all_unversioned = deque(all_unversioned)
1064
659
all_unversioned = deque()
1066
from_entries_by_dir = list(self.source.iter_entries_by_dir(
661
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1067
662
specific_file_ids=specific_file_ids))
1068
663
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1069
to_entries_by_dir = list(self.target.iter_entries_by_dir(
664
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1070
665
specific_file_ids=specific_file_ids))
1071
666
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1073
# the unversioned path lookup only occurs on real trees - where there
668
# the unversioned path lookup only occurs on real trees - where there
1074
669
# can be extras. So the fake_entry is solely used to look up
1075
670
# executable it values when execute is not supported.
1076
671
fake_entry = InventoryFile('unused', 'unused', 'unused')
1077
for target_path, target_entry in to_entries_by_dir:
1078
while (all_unversioned and
1079
all_unversioned[0][0] < target_path.split('/')):
672
for to_path, to_entry in to_entries_by_dir:
673
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1080
674
unversioned_path = all_unversioned.popleft()
1081
target_kind, target_executable, target_stat = \
675
to_kind, to_executable, to_stat = \
1082
676
self.target._comparison_data(fake_entry, unversioned_path[1])
1083
677
yield (None, (None, unversioned_path[1]), True, (False, False),
1085
679
(None, unversioned_path[0][-1]),
1086
(None, target_kind),
1087
(None, target_executable))
1088
source_path, source_entry = from_data.get(target_entry.file_id,
1090
result, changes = self._changes_from_entries(source_entry,
1091
target_entry, source_path=source_path, target_path=target_path)
1092
to_paths[result[0]] = result[1][1]
681
(None, to_executable))
682
file_id = to_entry.file_id
683
to_paths[file_id] = to_path
1093
684
entry_count += 1
685
changed_content = False
686
from_path, from_entry = from_data.get(file_id, (None, None))
687
from_versioned = (from_entry is not None)
688
if from_entry is not None:
689
from_versioned = True
690
from_name = from_entry.name
691
from_parent = from_entry.parent_id
692
from_kind, from_executable, from_stat = \
693
self.source._comparison_data(from_entry, from_path)
1095
694
entry_count += 1
696
from_versioned = False
700
from_executable = None
701
versioned = (from_versioned, True)
702
to_kind, to_executable, to_stat = \
703
self.target._comparison_data(to_entry, to_path)
704
kind = (from_kind, to_kind)
705
if kind[0] != kind[1]:
706
changed_content = True
707
elif from_kind == 'file':
708
from_size = self.source._file_size(from_entry, from_stat)
709
to_size = self.target._file_size(to_entry, to_stat)
710
if from_size != to_size:
711
changed_content = True
712
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
713
self.target.get_file_sha1(file_id, to_path, to_stat)):
714
changed_content = True
715
elif from_kind == 'symlink':
716
if (self.source.get_symlink_target(file_id) !=
717
self.target.get_symlink_target(file_id)):
718
changed_content = True
719
elif from_kind == 'tree-reference':
720
if (self.source.get_reference_revision(file_id, from_path)
721
!= self.target.get_reference_revision(file_id, to_path)):
722
changed_content = True
723
parent = (from_parent, to_entry.parent_id)
724
name = (from_name, to_entry.name)
725
executable = (from_executable, to_executable)
1096
726
if pb is not None:
1097
727
pb.update('comparing files', entry_count, num_entries)
1098
if changes or include_unchanged:
1099
if specific_file_ids is not None:
1100
new_parent_id = result[4][1]
1101
precise_file_ids.add(new_parent_id)
1102
changed_file_ids.append(result[0])
1104
# Ensure correct behaviour for reparented/added specific files.
1105
if specific_files is not None:
1106
# Record output dirs
1107
if result[6][1] == 'directory':
1108
seen_dirs.add(result[0])
1109
# Record parents of reparented/added entries.
1110
versioned = result[3]
1112
if not versioned[0] or parents[0] != parents[1]:
1113
seen_parents.add(parents[1])
728
if (changed_content is not False or versioned[0] != versioned[1]
729
or parent[0] != parent[1] or name[0] != name[1] or
730
executable[0] != executable[1] or include_unchanged):
731
yield (file_id, (from_path, to_path), changed_content,
732
versioned, parent, name, kind, executable)
1114
734
while all_unversioned:
1115
735
# yield any trailing unversioned paths
1116
736
unversioned_path = all_unversioned.popleft()
1143
775
self.source._comparison_data(from_entry, path)
1144
776
kind = (from_kind, None)
1145
777
executable = (from_executable, None)
1146
changed_content = from_kind is not None
778
changed_content = True
1147
779
# the parent's path is necessarily known at this point.
1148
changed_file_ids.append(file_id)
1149
780
yield(file_id, (path, to_path), changed_content, versioned, parent,
1150
781
name, kind, executable)
1151
changed_file_ids = set(changed_file_ids)
1152
if specific_file_ids is not None:
1153
for result in self._handle_precise_ids(precise_file_ids,
1157
def _get_entry(self, tree, file_id):
1158
"""Get an inventory entry from a tree, with missing entries as None.
1160
If the tree raises NotImplementedError on accessing .inventory, then
1161
this is worked around using iter_entries_by_dir on just the file id
1164
:param tree: The tree to lookup the entry in.
1165
:param file_id: The file_id to lookup.
1168
inventory = tree.inventory
1169
except NotImplementedError:
1170
# No inventory available.
1172
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1173
return iterator.next()[1]
1174
except StopIteration:
1178
return inventory[file_id]
1179
except errors.NoSuchId:
1182
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1183
discarded_changes=None):
1184
"""Fill out a partial iter_changes to be consistent.
1186
:param precise_file_ids: The file ids of parents that were seen during
1188
:param changed_file_ids: The file ids of already emitted items.
1189
:param discarded_changes: An optional dict of precalculated
1190
iter_changes items which the partial iter_changes had not output
1192
:return: A generator of iter_changes items to output.
1194
# process parents of things that had changed under the users
1195
# requested paths to prevent incorrect paths or parent ids which
1196
# aren't in the tree.
1197
while precise_file_ids:
1198
precise_file_ids.discard(None)
1199
# Don't emit file_ids twice
1200
precise_file_ids.difference_update(changed_file_ids)
1201
if not precise_file_ids:
1203
# If the there was something at a given output path in source, we
1204
# have to include the entry from source in the delta, or we would
1205
# be putting this entry into a used path.
1207
for parent_id in precise_file_ids:
1209
paths.append(self.target.id2path(parent_id))
1210
except errors.NoSuchId:
1211
# This id has been dragged in from the source by delta
1212
# expansion and isn't present in target at all: we don't
1213
# need to check for path collisions on it.
1216
old_id = self.source.path2id(path)
1217
precise_file_ids.add(old_id)
1218
precise_file_ids.discard(None)
1219
current_ids = precise_file_ids
1220
precise_file_ids = set()
1221
# We have to emit all of precise_file_ids that have been altered.
1222
# We may have to output the children of some of those ids if any
1223
# directories have stopped being directories.
1224
for file_id in current_ids:
1226
if discarded_changes:
1227
result = discarded_changes.get(file_id)
1232
old_entry = self._get_entry(self.source, file_id)
1233
new_entry = self._get_entry(self.target, file_id)
1234
result, changes = self._changes_from_entries(
1235
old_entry, new_entry)
1238
# Get this parents parent to examine.
1239
new_parent_id = result[4][1]
1240
precise_file_ids.add(new_parent_id)
1242
if (result[6][0] == 'directory' and
1243
result[6][1] != 'directory'):
1244
# This stopped being a directory, the old children have
1246
if old_entry is None:
1247
# Reusing a discarded change.
1248
old_entry = self._get_entry(self.source, file_id)
1249
for child in old_entry.children.values():
1250
precise_file_ids.add(child.file_id)
1251
changed_file_ids.add(result[0])
1255
class MultiWalker(object):
1256
"""Walk multiple trees simultaneously, getting combined results."""
1258
# Note: This could be written to not assume you can do out-of-order
1259
# lookups. Instead any nodes that don't match in all trees could be
1260
# marked as 'deferred', and then returned in the final cleanup loop.
1261
# For now, I think it is "nicer" to return things as close to the
1262
# "master_tree" order as we can.
1264
def __init__(self, master_tree, other_trees):
1265
"""Create a new MultiWalker.
1267
All trees being walked must implement "iter_entries_by_dir()", such
1268
that they yield (path, object) tuples, where that object will have a
1269
'.file_id' member, that can be used to check equality.
1271
:param master_tree: All trees will be 'slaved' to the master_tree such
1272
that nodes in master_tree will be used as 'first-pass' sync points.
1273
Any nodes that aren't in master_tree will be merged in a second
1275
:param other_trees: A list of other trees to walk simultaneously.
1277
self._master_tree = master_tree
1278
self._other_trees = other_trees
1280
# Keep track of any nodes that were properly processed just out of
1281
# order, that way we don't return them at the end, we don't have to
1282
# track *all* processed file_ids, just the out-of-order ones
1283
self._out_of_order_processed = set()
1286
def _step_one(iterator):
1287
"""Step an iter_entries_by_dir iterator.
1289
:return: (has_more, path, ie)
1290
If has_more is False, path and ie will be None.
1293
path, ie = iterator.next()
1294
except StopIteration:
1295
return False, None, None
1297
return True, path, ie
1300
def _cmp_path_by_dirblock(path1, path2):
1301
"""Compare two paths based on what directory they are in.
1303
This generates a sort order, such that all children of a directory are
1304
sorted together, and grandchildren are in the same order as the
1305
children appear. But all grandchildren come after all children.
1307
:param path1: first path
1308
:param path2: the second path
1309
:return: negative number if ``path1`` comes first,
1310
0 if paths are equal
1311
and a positive number if ``path2`` sorts first
1313
# Shortcut this special case
1316
# This is stolen from _dirstate_helpers_py.py, only switching it to
1317
# Unicode objects. Consider using encode_utf8() and then using the
1318
# optimized versions, or maybe writing optimized unicode versions.
1319
if not isinstance(path1, unicode):
1320
raise TypeError("'path1' must be a unicode string, not %s: %r"
1321
% (type(path1), path1))
1322
if not isinstance(path2, unicode):
1323
raise TypeError("'path2' must be a unicode string, not %s: %r"
1324
% (type(path2), path2))
1325
return cmp(MultiWalker._path_to_key(path1),
1326
MultiWalker._path_to_key(path2))
1329
def _path_to_key(path):
1330
dirname, basename = osutils.split(path)
1331
return (dirname.split(u'/'), basename)
1333
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1334
"""Lookup an inventory entry by file_id.
1336
This is called when an entry is missing in the normal order.
1337
Generally this is because a file was either renamed, or it was
1338
deleted/added. If the entry was found in the inventory and not in
1339
extra_entries, it will be added to self._out_of_order_processed
1341
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1342
should be filled with entries that were found before they were
1343
used. If file_id is present, it will be removed from the
1345
:param other_tree: The Tree to search, in case we didn't find the entry
1347
:param file_id: The file_id to look for
1348
:return: (path, ie) if found or (None, None) if not present.
1350
if file_id in extra_entries:
1351
return extra_entries.pop(file_id)
1352
# TODO: Is id2path better as the first call, or is
1353
# inventory[file_id] better as a first check?
1355
cur_path = other_tree.id2path(file_id)
1356
except errors.NoSuchId:
1358
if cur_path is None:
1361
self._out_of_order_processed.add(file_id)
1362
cur_ie = other_tree.inventory[file_id]
1363
return (cur_path, cur_ie)
1366
"""Match up the values in the different trees."""
1367
for result in self._walk_master_tree():
1369
self._finish_others()
1370
for result in self._walk_others():
1373
def _walk_master_tree(self):
1374
"""First pass, walk all trees in lock-step.
1376
When we are done, all nodes in the master_tree will have been
1377
processed. _other_walkers, _other_entries, and _others_extra will be
1378
set on 'self' for future processing.
1380
# This iterator has the most "inlining" done, because it tends to touch
1381
# every file in the tree, while the others only hit nodes that don't
1383
master_iterator = self._master_tree.iter_entries_by_dir()
1385
other_walkers = [other.iter_entries_by_dir()
1386
for other in self._other_trees]
1387
other_entries = [self._step_one(walker) for walker in other_walkers]
1388
# Track extra nodes in the other trees
1389
others_extra = [{} for i in xrange(len(self._other_trees))]
1391
master_has_more = True
1392
step_one = self._step_one
1393
lookup_by_file_id = self._lookup_by_file_id
1394
out_of_order_processed = self._out_of_order_processed
1396
while master_has_more:
1397
(master_has_more, path, master_ie) = step_one(master_iterator)
1398
if not master_has_more:
1401
file_id = master_ie.file_id
1403
other_values_append = other_values.append
1404
next_other_entries = []
1405
next_other_entries_append = next_other_entries.append
1406
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1407
if not other_has_more:
1408
other_values_append(lookup_by_file_id(
1409
others_extra[idx], self._other_trees[idx], file_id))
1410
next_other_entries_append((False, None, None))
1411
elif file_id == other_ie.file_id:
1412
# This is the critical code path, as most of the entries
1413
# should match between most trees.
1414
other_values_append((other_path, other_ie))
1415
next_other_entries_append(step_one(other_walkers[idx]))
1417
# This walker did not match, step it until it either
1418
# matches, or we know we are past the current walker.
1419
other_walker = other_walkers[idx]
1420
other_extra = others_extra[idx]
1421
while (other_has_more and
1422
self._cmp_path_by_dirblock(other_path, path) < 0):
1423
other_file_id = other_ie.file_id
1424
if other_file_id not in out_of_order_processed:
1425
other_extra[other_file_id] = (other_path, other_ie)
1426
other_has_more, other_path, other_ie = \
1427
step_one(other_walker)
1428
if other_has_more and other_ie.file_id == file_id:
1429
# We ended up walking to this point, match and step
1431
other_values_append((other_path, other_ie))
1432
other_has_more, other_path, other_ie = \
1433
step_one(other_walker)
1435
# This record isn't in the normal order, see if it
1437
other_values_append(lookup_by_file_id(
1438
other_extra, self._other_trees[idx], file_id))
1439
next_other_entries_append((other_has_more, other_path,
1441
other_entries = next_other_entries
1443
# We've matched all the walkers, yield this datapoint
1444
yield path, file_id, master_ie, other_values
1445
self._other_walkers = other_walkers
1446
self._other_entries = other_entries
1447
self._others_extra = others_extra
1449
def _finish_others(self):
1450
"""Finish walking the other iterators, so we get all entries."""
1451
for idx, info in enumerate(self._other_entries):
1452
other_extra = self._others_extra[idx]
1453
(other_has_more, other_path, other_ie) = info
1454
while other_has_more:
1455
other_file_id = other_ie.file_id
1456
if other_file_id not in self._out_of_order_processed:
1457
other_extra[other_file_id] = (other_path, other_ie)
1458
other_has_more, other_path, other_ie = \
1459
self._step_one(self._other_walkers[idx])
1460
del self._other_entries
1462
def _walk_others(self):
1463
"""Finish up by walking all the 'deferred' nodes."""
1464
# TODO: One alternative would be to grab all possible unprocessed
1465
# file_ids, and then sort by path, and then yield them. That
1466
# might ensure better ordering, in case a caller strictly
1467
# requires parents before children.
1468
for idx, other_extra in enumerate(self._others_extra):
1469
others = sorted(other_extra.itervalues(),
1470
key=lambda x: self._path_to_key(x[0]))
1471
for other_path, other_ie in others:
1472
file_id = other_ie.file_id
1473
# We don't need to check out_of_order_processed here, because
1474
# the lookup_by_file_id will be removing anything processed
1475
# from the extras cache
1476
other_extra.pop(file_id)
1477
other_values = [(None, None) for i in xrange(idx)]
1478
other_values.append((other_path, other_ie))
1479
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1480
alt_idx = alt_idx + idx + 1
1481
alt_extra = self._others_extra[alt_idx]
1482
alt_tree = self._other_trees[alt_idx]
1483
other_values.append(self._lookup_by_file_id(
1484
alt_extra, alt_tree, file_id))
1485
yield other_path, file_id, None, other_values
784
# This was deprecated before 0.12, but did not have an official warning
785
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
786
def RevisionTree(*args, **kwargs):
787
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
789
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
792
from bzrlib.revisiontree import RevisionTree as _RevisionTree
793
return _RevisionTree(*args, **kwargs)