170
167
return self.bzrdir.is_control_filename(filename)
173
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
170
def iter_entries_by_dir(self, specific_file_ids=None):
174
171
"""Walk the tree in 'by_dir' order.
176
This will yield each entry in the tree as a (path, entry) tuple.
177
The order that they are yielded is:
179
Directories are walked in a depth-first lexicographical order,
180
however, whenever a directory is reached, all of its direct child
181
nodes are yielded in lexicographical order before yielding the
184
For example, in the tree::
194
The yield order (ignoring root) would be::
195
a, f, a/b, a/d, a/b/c, a/d/e, f/g
197
:param yield_parents: If True, yield the parents from the root leading
198
down to specific_file_ids that have been requested. This has no
199
impact if specific_file_ids is None.
173
This will yield each entry in the tree as a (path, entry) tuple. The
174
order that they are yielded is: the contents of a directory are
175
preceeded by the parent of a directory, and all the contents of a
176
directory are grouped together.
201
178
return self.inventory.iter_entries_by_dir(
202
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
179
specific_file_ids=specific_file_ids)
204
181
def iter_references(self):
205
if self.supports_tree_reference():
206
for path, entry in self.iter_entries_by_dir():
207
if entry.kind == 'tree-reference':
208
yield path, entry.file_id
182
for path, entry in self.iter_entries_by_dir():
183
if entry.kind == 'tree-reference':
184
yield path, entry.file_id
210
186
def kind(self, file_id):
211
187
raise NotImplementedError("Tree subclass %s must implement kind"
212
188
% self.__class__.__name__)
214
def stored_kind(self, file_id):
215
"""File kind stored for this file_id.
217
May not match kind on disk for working trees. Always available
218
for versioned files, even when the file itself is missing.
220
return self.kind(file_id)
222
def path_content_summary(self, path):
223
"""Get a summary of the information about path.
225
All the attributes returned are for the canonical form, not the
226
convenient form (if content filters are in use.)
228
:param path: A relative path within the tree.
229
:return: A tuple containing kind, size, exec, sha1-or-link.
230
Kind is always present (see tree.kind()).
231
size is present if kind is file and the size of the
232
canonical form can be cheaply determined, None otherwise.
233
exec is None unless kind is file and the platform supports the 'x'
235
sha1-or-link is the link target if kind is symlink, or the sha1 if
236
it can be obtained without reading the file.
238
raise NotImplementedError(self.path_content_summary)
240
190
def get_reference_revision(self, file_id, path=None):
241
191
raise NotImplementedError("Tree subclass %s must implement "
242
192
"get_reference_revision"
363
236
raise NotImplementedError(self.get_symlink_target)
365
def get_canonical_inventory_paths(self, paths):
366
"""Like get_canonical_inventory_path() but works on multiple items.
368
:param paths: A sequence of paths relative to the root of the tree.
369
:return: A list of paths, with each item the corresponding input path
370
adjusted to account for existing elements that match case
373
return list(self._yield_canonical_inventory_paths(paths))
375
def get_canonical_inventory_path(self, path):
376
"""Returns the first inventory item that case-insensitively matches path.
378
If a path matches exactly, it is returned. If no path matches exactly
379
but more than one path matches case-insensitively, it is implementation
380
defined which is returned.
382
If no path matches case-insensitively, the input path is returned, but
383
with as many path entries that do exist changed to their canonical
386
If you need to resolve many names from the same tree, you should
387
use get_canonical_inventory_paths() to avoid O(N) behaviour.
389
:param path: A paths relative to the root of the tree.
390
:return: The input path adjusted to account for existing elements
391
that match case insensitively.
393
return self._yield_canonical_inventory_paths([path]).next()
395
def _yield_canonical_inventory_paths(self, paths):
397
# First, if the path as specified exists exactly, just use it.
398
if self.path2id(path) is not None:
402
cur_id = self.get_root_id()
404
bit_iter = iter(path.split("/"))
408
for child in self.iter_children(cur_id):
410
# XXX: it seem like if the child is known to be in the
411
# tree, we shouldn't need to go from its id back to
412
# its path -- mbp 2010-02-11
414
# XXX: it seems like we could be more efficient
415
# by just directly looking up the original name and
416
# only then searching all children; also by not
417
# chopping paths so much. -- mbp 2010-02-11
418
child_base = os.path.basename(self.id2path(child))
419
if (child_base == elt):
420
# if we found an exact match, we can stop now; if
421
# we found an approximate match we need to keep
422
# searching because there might be an exact match
425
new_path = osutils.pathjoin(cur_path, child_base)
427
elif child_base.lower() == lelt:
429
new_path = osutils.pathjoin(cur_path, child_base)
431
# before a change is committed we can see this error...
436
# got to the end of this directory and no entries matched.
437
# Return what matched so far, plus the rest as specified.
438
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
443
def get_root_id(self):
444
"""Return the file_id for the root of this tree."""
445
raise NotImplementedError(self.get_root_id)
447
def annotate_iter(self, file_id,
448
default_revision=_mod_revision.CURRENT_REVISION):
449
"""Return an iterator of revision_id, line tuples.
238
def annotate_iter(self, file_id):
239
"""Return an iterator of revision_id, line tuples
451
241
For working trees (and mutable trees in general), the special
452
242
revision_id 'current:' will be used for lines that are new in this
453
243
tree, e.g. uncommitted changes.
454
244
:param file_id: The file to produce an annotated version from
455
:param default_revision: For lines that don't match a basis, mark them
456
with this revision id. Not all implementations will make use of
459
246
raise NotImplementedError(self.annotate_iter)
461
def _get_plan_merge_data(self, file_id, other, base):
462
from bzrlib import versionedfile
463
vf = versionedfile._PlanMergeVersionedFile(file_id)
464
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
465
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
467
last_revision_base = None
469
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
470
return vf, last_revision_a, last_revision_b, last_revision_base
472
def plan_file_merge(self, file_id, other, base=None):
473
"""Generate a merge plan based on annotations.
475
If the file contains uncommitted changes in this tree, they will be
476
attributed to the 'current:' pseudo-revision. If the file contains
477
uncommitted changes in the other tree, they will be assigned to the
478
'other:' pseudo-revision.
480
data = self._get_plan_merge_data(file_id, other, base)
481
vf, last_revision_a, last_revision_b, last_revision_base = data
482
return vf.plan_merge(last_revision_a, last_revision_b,
485
def plan_file_lca_merge(self, file_id, other, base=None):
486
"""Generate a merge plan based lca-newness.
488
If the file contains uncommitted changes in this tree, they will be
489
attributed to the 'current:' pseudo-revision. If the file contains
490
uncommitted changes in the other tree, they will be assigned to the
491
'other:' pseudo-revision.
493
data = self._get_plan_merge_data(file_id, other, base)
494
vf, last_revision_a, last_revision_b, last_revision_base = data
495
return vf.plan_lca_merge(last_revision_a, last_revision_b,
498
def _iter_parent_trees(self):
499
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
500
for revision_id in self.get_parent_ids():
502
yield self.revision_tree(revision_id)
503
except errors.NoSuchRevisionInTree:
504
yield self.repository.revision_tree(revision_id)
507
def _file_revision(revision_tree, file_id):
508
"""Determine the revision associated with a file in a given tree."""
509
revision_tree.lock_read()
511
return revision_tree.inventory[file_id].revision
513
revision_tree.unlock()
515
def _get_file_revision(self, file_id, vf, tree_revision):
516
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
518
if getattr(self, '_repository', None) is None:
519
last_revision = tree_revision
520
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
521
self._iter_parent_trees()]
522
vf.add_lines((file_id, last_revision), parent_keys,
523
self.get_file_lines(file_id))
524
repo = self.branch.repository
527
last_revision = self._file_revision(self, file_id)
528
base_vf = self._repository.texts
529
if base_vf not in vf.fallback_versionedfiles:
530
vf.fallback_versionedfiles.append(base_vf)
248
def plan_file_merge(self, file_id, other):
249
"""Generate a merge plan based on annotations
251
If the file contains uncommitted changes in this tree, they will be
252
attributed to the 'current:' pseudo-revision. If the file contains
253
uncommitted changes in the other tree, they will be assigned to the
254
'other:' pseudo-revision.
256
from bzrlib import merge
257
annotated_a = list(self.annotate_iter(file_id,
258
_mod_revision.CURRENT_REVISION))
259
annotated_b = list(other.annotate_iter(file_id, 'other:'))
260
ancestors_a = self._get_ancestors(_mod_revision.CURRENT_REVISION)
261
ancestors_b = other._get_ancestors('other:')
262
return merge._plan_annotate_merge(annotated_a, annotated_b,
263
ancestors_a, ancestors_b)
533
265
inventory = property(_get_inventory,
534
266
doc="Inventory of this Tree")
536
268
def _check_retrieved(self, ie, f):
537
269
if not __debug__:
539
271
fp = fingerprint_file(f)
542
274
if ie.text_size is not None:
543
275
if ie.text_size != fp['size']:
544
276
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
658
387
raise NotImplementedError(self.walkdirs)
660
def supports_content_filtering(self):
390
class EmptyTree(Tree):
393
self._inventory = Inventory(root_id=None)
394
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
395
' use repository.revision_tree instead.',
396
DeprecationWarning, stacklevel=2)
398
def get_parent_ids(self):
401
def get_symlink_target(self, file_id):
404
def has_filename(self, filename):
663
def _content_filter_stack(self, path=None, file_id=None):
664
"""The stack of content filters for a path if filtering is supported.
666
Readers will be applied in first-to-last order.
667
Writers will be applied in last-to-first order.
668
Either the path or the file-id needs to be provided.
670
:param path: path relative to the root of the tree
672
:param file_id: file_id or None if unknown
673
:return: the list of filters - [] if there are none
675
filter_pref_names = filters._get_registered_names()
676
if len(filter_pref_names) == 0:
679
path = self.id2path(file_id)
680
prefs = self.iter_search_rules([path], filter_pref_names).next()
681
stk = filters._get_filter_stack_for(prefs)
682
if 'filters' in debug.debug_flags:
683
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
686
def _content_filter_stack_provider(self):
687
"""A function that returns a stack of ContentFilters.
689
The function takes a path (relative to the top of the tree) and a
690
file-id as parameters.
692
:return: None if content filtering is not supported by this tree.
694
if self.supports_content_filtering():
695
return lambda path, file_id: \
696
self._content_filter_stack(path, file_id)
700
def iter_search_rules(self, path_names, pref_names=None,
701
_default_searcher=None):
702
"""Find the preferences for filenames in a tree.
704
:param path_names: an iterable of paths to find attributes for.
705
Paths are given relative to the root of the tree.
706
:param pref_names: the list of preferences to lookup - None for all
707
:param _default_searcher: private parameter to assist testing - don't use
708
:return: an iterator of tuple sequences, one per path-name.
709
See _RulesSearcher.get_items for details on the tuple sequence.
711
if _default_searcher is None:
712
_default_searcher = rules._per_user_searcher
713
searcher = self._get_rules_searcher(_default_searcher)
714
if searcher is not None:
715
if pref_names is not None:
716
for path in path_names:
717
yield searcher.get_selected_items(path, pref_names)
719
for path in path_names:
720
yield searcher.get_items(path)
722
def _get_rules_searcher(self, default_searcher):
723
"""Get the RulesSearcher for this tree given the default one."""
724
searcher = default_searcher
407
def kind(self, file_id):
408
file_id = osutils.safe_file_id(file_id)
409
assert self._inventory[file_id].kind == "directory"
412
def list_files(self, include_root=False):
415
def __contains__(self, file_id):
416
file_id = osutils.safe_file_id(file_id)
417
return (file_id in self._inventory)
419
def get_file_sha1(self, file_id, path=None, stat_value=None):
728
423
######################################################################
868
565
Its instances have methods like 'compare' and contain references to the
869
566
source and target trees these operations are to be carried out on.
871
Clients of bzrlib should not need to use InterTree directly, rather they
568
clients of bzrlib should not need to use InterTree directly, rather they
872
569
should use the convenience methods on Tree such as 'Tree.compare()' which
873
570
will pass through to InterTree as appropriate.
876
# Formats that will be used to test this InterTree. If both are
877
# None, this InterTree will not be tested (e.g. because a complex
879
_matching_from_tree_format = None
880
_matching_to_tree_format = None
884
def _changes_from_entries(self, source_entry, target_entry,
885
source_path=None, target_path=None):
886
"""Generate a iter_changes tuple between source_entry and target_entry.
888
:param source_entry: An inventory entry from self.source, or None.
889
:param target_entry: An inventory entry from self.target, or None.
890
:param source_path: The path of source_entry, if known. If not known
891
it will be looked up.
892
:param target_path: The path of target_entry, if known. If not known
893
it will be looked up.
894
:return: A tuple, item 0 of which is an iter_changes result tuple, and
895
item 1 is True if there are any changes in the result tuple.
897
if source_entry is None:
898
if target_entry is None:
900
file_id = target_entry.file_id
902
file_id = source_entry.file_id
903
if source_entry is not None:
904
source_versioned = True
905
source_name = source_entry.name
906
source_parent = source_entry.parent_id
907
if source_path is None:
908
source_path = self.source.id2path(file_id)
909
source_kind, source_executable, source_stat = \
910
self.source._comparison_data(source_entry, source_path)
912
source_versioned = False
916
source_executable = None
917
if target_entry is not None:
918
target_versioned = True
919
target_name = target_entry.name
920
target_parent = target_entry.parent_id
921
if target_path is None:
922
target_path = self.target.id2path(file_id)
923
target_kind, target_executable, target_stat = \
924
self.target._comparison_data(target_entry, target_path)
926
target_versioned = False
930
target_executable = None
931
versioned = (source_versioned, target_versioned)
932
kind = (source_kind, target_kind)
933
changed_content = False
934
if source_kind != target_kind:
935
changed_content = True
936
elif source_kind == 'file':
937
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
938
self.target.get_file_sha1(file_id, target_path, target_stat)):
939
changed_content = True
940
elif source_kind == 'symlink':
941
if (self.source.get_symlink_target(file_id) !=
942
self.target.get_symlink_target(file_id)):
943
changed_content = True
944
# XXX: Yes, the indentation below is wrong. But fixing it broke
945
# test_merge.TestMergerEntriesLCAOnDisk.
946
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
947
# the fix from bzr.dev -- vila 2009026
948
elif source_kind == 'tree-reference':
949
if (self.source.get_reference_revision(file_id, source_path)
950
!= self.target.get_reference_revision(file_id, target_path)):
951
changed_content = True
952
parent = (source_parent, target_parent)
953
name = (source_name, target_name)
954
executable = (source_executable, target_executable)
955
if (changed_content is not False or versioned[0] != versioned[1]
956
or parent[0] != parent[1] or name[0] != name[1] or
957
executable[0] != executable[1]):
961
return (file_id, (source_path, target_path), changed_content,
962
versioned, parent, name, kind, executable), changes
965
576
def compare(self, want_unchanged=False, specific_files=None,
966
577
extra_trees=None, require_versioned=False, include_root=False,
1027
640
:param require_versioned: Raise errors.PathsNotVersionedError if a
1028
641
path in the specific_files list is not versioned in one of
1029
642
source, target or extra_trees.
1030
:param specific_files: An optional list of file paths to restrict the
1031
comparison to. When mapping filenames to ids, all matches in all
1032
trees (including optional extra_trees) are used, and all children
1033
of matched directories are included. The parents in the target tree
1034
of the specific files up to and including the root of the tree are
1035
always evaluated for changes too.
1036
643
:param want_unversioned: Should unversioned files be returned in the
1037
644
output. An unversioned file is defined as one with (False, False)
1038
645
for the versioned pair.
1040
648
lookup_trees = [self.source]
1042
650
lookup_trees.extend(extra_trees)
1043
# The ids of items we need to examine to insure delta consistency.
1044
precise_file_ids = set()
1045
changed_file_ids = []
1046
if specific_files == []:
1047
specific_file_ids = []
1049
specific_file_ids = self.target.paths2ids(specific_files,
1050
lookup_trees, require_versioned=require_versioned)
1051
if specific_files is not None:
1052
# reparented or added entries must have their parents included
1053
# so that valid deltas can be created. The seen_parents set
1054
# tracks the parents that we need to have.
1055
# The seen_dirs set tracks directory entries we've yielded.
1056
# After outputting version object in to_entries we set difference
1057
# the two seen sets and start checking parents.
1058
seen_parents = set()
651
specific_file_ids = self.target.paths2ids(specific_files,
652
lookup_trees, require_versioned=require_versioned)
1060
653
if want_unversioned:
1061
all_unversioned = sorted([(p.split('/'), p) for p in
1062
self.target.extras()
1063
if specific_files is None or
654
all_unversioned = sorted([(p.split('/'), p) for p in self.target.extras()
655
if not specific_files or
1064
656
osutils.is_inside_any(specific_files, p)])
1065
657
all_unversioned = deque(all_unversioned)
1067
659
all_unversioned = deque()
1069
from_entries_by_dir = list(self.source.iter_entries_by_dir(
661
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1070
662
specific_file_ids=specific_file_ids))
1071
663
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1072
to_entries_by_dir = list(self.target.iter_entries_by_dir(
664
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1073
665
specific_file_ids=specific_file_ids))
1074
666
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1076
# the unversioned path lookup only occurs on real trees - where there
668
# the unversioned path lookup only occurs on real trees - where there
1077
669
# can be extras. So the fake_entry is solely used to look up
1078
670
# executable it values when execute is not supported.
1079
671
fake_entry = InventoryFile('unused', 'unused', 'unused')
1080
for target_path, target_entry in to_entries_by_dir:
1081
while (all_unversioned and
1082
all_unversioned[0][0] < target_path.split('/')):
672
for to_path, to_entry in to_entries_by_dir:
673
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1083
674
unversioned_path = all_unversioned.popleft()
1084
target_kind, target_executable, target_stat = \
675
to_kind, to_executable, to_stat = \
1085
676
self.target._comparison_data(fake_entry, unversioned_path[1])
1086
677
yield (None, (None, unversioned_path[1]), True, (False, False),
1088
679
(None, unversioned_path[0][-1]),
1089
(None, target_kind),
1090
(None, target_executable))
1091
source_path, source_entry = from_data.get(target_entry.file_id,
1093
result, changes = self._changes_from_entries(source_entry,
1094
target_entry, source_path=source_path, target_path=target_path)
1095
to_paths[result[0]] = result[1][1]
681
(None, to_executable))
682
file_id = to_entry.file_id
683
to_paths[file_id] = to_path
1096
684
entry_count += 1
685
changed_content = False
686
from_path, from_entry = from_data.get(file_id, (None, None))
687
from_versioned = (from_entry is not None)
688
if from_entry is not None:
689
from_versioned = True
690
from_name = from_entry.name
691
from_parent = from_entry.parent_id
692
from_kind, from_executable, from_stat = \
693
self.source._comparison_data(from_entry, from_path)
1098
694
entry_count += 1
696
from_versioned = False
700
from_executable = None
701
versioned = (from_versioned, True)
702
to_kind, to_executable, to_stat = \
703
self.target._comparison_data(to_entry, to_path)
704
kind = (from_kind, to_kind)
705
if kind[0] != kind[1]:
706
changed_content = True
707
elif from_kind == 'file':
708
from_size = self.source._file_size(from_entry, from_stat)
709
to_size = self.target._file_size(to_entry, to_stat)
710
if from_size != to_size:
711
changed_content = True
712
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
713
self.target.get_file_sha1(file_id, to_path, to_stat)):
714
changed_content = True
715
elif from_kind == 'symlink':
716
if (self.source.get_symlink_target(file_id) !=
717
self.target.get_symlink_target(file_id)):
718
changed_content = True
719
elif from_kind == 'tree-reference':
720
if (self.source.get_reference_revision(file_id, from_path)
721
!= self.target.get_reference_revision(file_id, to_path)):
722
changed_content = True
723
parent = (from_parent, to_entry.parent_id)
724
name = (from_name, to_entry.name)
725
executable = (from_executable, to_executable)
1099
726
if pb is not None:
1100
727
pb.update('comparing files', entry_count, num_entries)
1101
if changes or include_unchanged:
1102
if specific_file_ids is not None:
1103
new_parent_id = result[4][1]
1104
precise_file_ids.add(new_parent_id)
1105
changed_file_ids.append(result[0])
1107
# Ensure correct behaviour for reparented/added specific files.
1108
if specific_files is not None:
1109
# Record output dirs
1110
if result[6][1] == 'directory':
1111
seen_dirs.add(result[0])
1112
# Record parents of reparented/added entries.
1113
versioned = result[3]
1115
if not versioned[0] or parents[0] != parents[1]:
1116
seen_parents.add(parents[1])
728
if (changed_content is not False or versioned[0] != versioned[1]
729
or parent[0] != parent[1] or name[0] != name[1] or
730
executable[0] != executable[1] or include_unchanged):
731
yield (file_id, (from_path, to_path), changed_content,
732
versioned, parent, name, kind, executable)
1117
734
while all_unversioned:
1118
735
# yield any trailing unversioned paths
1119
736
unversioned_path = all_unversioned.popleft()
1146
775
self.source._comparison_data(from_entry, path)
1147
776
kind = (from_kind, None)
1148
777
executable = (from_executable, None)
1149
changed_content = from_kind is not None
778
changed_content = True
1150
779
# the parent's path is necessarily known at this point.
1151
changed_file_ids.append(file_id)
1152
780
yield(file_id, (path, to_path), changed_content, versioned, parent,
1153
781
name, kind, executable)
1154
changed_file_ids = set(changed_file_ids)
1155
if specific_file_ids is not None:
1156
for result in self._handle_precise_ids(precise_file_ids,
1160
def _get_entry(self, tree, file_id):
1161
"""Get an inventory entry from a tree, with missing entries as None.
1163
If the tree raises NotImplementedError on accessing .inventory, then
1164
this is worked around using iter_entries_by_dir on just the file id
1167
:param tree: The tree to lookup the entry in.
1168
:param file_id: The file_id to lookup.
1171
inventory = tree.inventory
1172
except NotImplementedError:
1173
# No inventory available.
1175
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1176
return iterator.next()[1]
1177
except StopIteration:
1181
return inventory[file_id]
1182
except errors.NoSuchId:
1185
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1186
discarded_changes=None):
1187
"""Fill out a partial iter_changes to be consistent.
1189
:param precise_file_ids: The file ids of parents that were seen during
1191
:param changed_file_ids: The file ids of already emitted items.
1192
:param discarded_changes: An optional dict of precalculated
1193
iter_changes items which the partial iter_changes had not output
1195
:return: A generator of iter_changes items to output.
1197
# process parents of things that had changed under the users
1198
# requested paths to prevent incorrect paths or parent ids which
1199
# aren't in the tree.
1200
while precise_file_ids:
1201
precise_file_ids.discard(None)
1202
# Don't emit file_ids twice
1203
precise_file_ids.difference_update(changed_file_ids)
1204
if not precise_file_ids:
1206
# If the there was something at a given output path in source, we
1207
# have to include the entry from source in the delta, or we would
1208
# be putting this entry into a used path.
1210
for parent_id in precise_file_ids:
1212
paths.append(self.target.id2path(parent_id))
1213
except errors.NoSuchId:
1214
# This id has been dragged in from the source by delta
1215
# expansion and isn't present in target at all: we don't
1216
# need to check for path collisions on it.
1219
old_id = self.source.path2id(path)
1220
precise_file_ids.add(old_id)
1221
precise_file_ids.discard(None)
1222
current_ids = precise_file_ids
1223
precise_file_ids = set()
1224
# We have to emit all of precise_file_ids that have been altered.
1225
# We may have to output the children of some of those ids if any
1226
# directories have stopped being directories.
1227
for file_id in current_ids:
1229
if discarded_changes:
1230
result = discarded_changes.get(file_id)
1235
old_entry = self._get_entry(self.source, file_id)
1236
new_entry = self._get_entry(self.target, file_id)
1237
result, changes = self._changes_from_entries(
1238
old_entry, new_entry)
1241
# Get this parents parent to examine.
1242
new_parent_id = result[4][1]
1243
precise_file_ids.add(new_parent_id)
1245
if (result[6][0] == 'directory' and
1246
result[6][1] != 'directory'):
1247
# This stopped being a directory, the old children have
1249
if old_entry is None:
1250
# Reusing a discarded change.
1251
old_entry = self._get_entry(self.source, file_id)
1252
for child in old_entry.children.values():
1253
precise_file_ids.add(child.file_id)
1254
changed_file_ids.add(result[0])
1258
class MultiWalker(object):
1259
"""Walk multiple trees simultaneously, getting combined results."""
1261
# Note: This could be written to not assume you can do out-of-order
1262
# lookups. Instead any nodes that don't match in all trees could be
1263
# marked as 'deferred', and then returned in the final cleanup loop.
1264
# For now, I think it is "nicer" to return things as close to the
1265
# "master_tree" order as we can.
1267
def __init__(self, master_tree, other_trees):
1268
"""Create a new MultiWalker.
1270
All trees being walked must implement "iter_entries_by_dir()", such
1271
that they yield (path, object) tuples, where that object will have a
1272
'.file_id' member, that can be used to check equality.
1274
:param master_tree: All trees will be 'slaved' to the master_tree such
1275
that nodes in master_tree will be used as 'first-pass' sync points.
1276
Any nodes that aren't in master_tree will be merged in a second
1278
:param other_trees: A list of other trees to walk simultaneously.
1280
self._master_tree = master_tree
1281
self._other_trees = other_trees
1283
# Keep track of any nodes that were properly processed just out of
1284
# order, that way we don't return them at the end, we don't have to
1285
# track *all* processed file_ids, just the out-of-order ones
1286
self._out_of_order_processed = set()
1289
def _step_one(iterator):
1290
"""Step an iter_entries_by_dir iterator.
1292
:return: (has_more, path, ie)
1293
If has_more is False, path and ie will be None.
1296
path, ie = iterator.next()
1297
except StopIteration:
1298
return False, None, None
1300
return True, path, ie
1303
def _cmp_path_by_dirblock(path1, path2):
1304
"""Compare two paths based on what directory they are in.
1306
This generates a sort order, such that all children of a directory are
1307
sorted together, and grandchildren are in the same order as the
1308
children appear. But all grandchildren come after all children.
1310
:param path1: first path
1311
:param path2: the second path
1312
:return: negative number if ``path1`` comes first,
1313
0 if paths are equal
1314
and a positive number if ``path2`` sorts first
1316
# Shortcut this special case
1319
# This is stolen from _dirstate_helpers_py.py, only switching it to
1320
# Unicode objects. Consider using encode_utf8() and then using the
1321
# optimized versions, or maybe writing optimized unicode versions.
1322
if not isinstance(path1, unicode):
1323
raise TypeError("'path1' must be a unicode string, not %s: %r"
1324
% (type(path1), path1))
1325
if not isinstance(path2, unicode):
1326
raise TypeError("'path2' must be a unicode string, not %s: %r"
1327
% (type(path2), path2))
1328
return cmp(MultiWalker._path_to_key(path1),
1329
MultiWalker._path_to_key(path2))
1332
def _path_to_key(path):
1333
dirname, basename = osutils.split(path)
1334
return (dirname.split(u'/'), basename)
1336
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1337
"""Lookup an inventory entry by file_id.
1339
This is called when an entry is missing in the normal order.
1340
Generally this is because a file was either renamed, or it was
1341
deleted/added. If the entry was found in the inventory and not in
1342
extra_entries, it will be added to self._out_of_order_processed
1344
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1345
should be filled with entries that were found before they were
1346
used. If file_id is present, it will be removed from the
1348
:param other_tree: The Tree to search, in case we didn't find the entry
1350
:param file_id: The file_id to look for
1351
:return: (path, ie) if found or (None, None) if not present.
1353
if file_id in extra_entries:
1354
return extra_entries.pop(file_id)
1355
# TODO: Is id2path better as the first call, or is
1356
# inventory[file_id] better as a first check?
1358
cur_path = other_tree.id2path(file_id)
1359
except errors.NoSuchId:
1361
if cur_path is None:
1364
self._out_of_order_processed.add(file_id)
1365
cur_ie = other_tree.inventory[file_id]
1366
return (cur_path, cur_ie)
1369
"""Match up the values in the different trees."""
1370
for result in self._walk_master_tree():
1372
self._finish_others()
1373
for result in self._walk_others():
1376
def _walk_master_tree(self):
1377
"""First pass, walk all trees in lock-step.
1379
When we are done, all nodes in the master_tree will have been
1380
processed. _other_walkers, _other_entries, and _others_extra will be
1381
set on 'self' for future processing.
1383
# This iterator has the most "inlining" done, because it tends to touch
1384
# every file in the tree, while the others only hit nodes that don't
1386
master_iterator = self._master_tree.iter_entries_by_dir()
1388
other_walkers = [other.iter_entries_by_dir()
1389
for other in self._other_trees]
1390
other_entries = [self._step_one(walker) for walker in other_walkers]
1391
# Track extra nodes in the other trees
1392
others_extra = [{} for i in xrange(len(self._other_trees))]
1394
master_has_more = True
1395
step_one = self._step_one
1396
lookup_by_file_id = self._lookup_by_file_id
1397
out_of_order_processed = self._out_of_order_processed
1399
while master_has_more:
1400
(master_has_more, path, master_ie) = step_one(master_iterator)
1401
if not master_has_more:
1404
file_id = master_ie.file_id
1406
other_values_append = other_values.append
1407
next_other_entries = []
1408
next_other_entries_append = next_other_entries.append
1409
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1410
if not other_has_more:
1411
other_values_append(lookup_by_file_id(
1412
others_extra[idx], self._other_trees[idx], file_id))
1413
next_other_entries_append((False, None, None))
1414
elif file_id == other_ie.file_id:
1415
# This is the critical code path, as most of the entries
1416
# should match between most trees.
1417
other_values_append((other_path, other_ie))
1418
next_other_entries_append(step_one(other_walkers[idx]))
1420
# This walker did not match, step it until it either
1421
# matches, or we know we are past the current walker.
1422
other_walker = other_walkers[idx]
1423
other_extra = others_extra[idx]
1424
while (other_has_more and
1425
self._cmp_path_by_dirblock(other_path, path) < 0):
1426
other_file_id = other_ie.file_id
1427
if other_file_id not in out_of_order_processed:
1428
other_extra[other_file_id] = (other_path, other_ie)
1429
other_has_more, other_path, other_ie = \
1430
step_one(other_walker)
1431
if other_has_more and other_ie.file_id == file_id:
1432
# We ended up walking to this point, match and step
1434
other_values_append((other_path, other_ie))
1435
other_has_more, other_path, other_ie = \
1436
step_one(other_walker)
1438
# This record isn't in the normal order, see if it
1440
other_values_append(lookup_by_file_id(
1441
other_extra, self._other_trees[idx], file_id))
1442
next_other_entries_append((other_has_more, other_path,
1444
other_entries = next_other_entries
1446
# We've matched all the walkers, yield this datapoint
1447
yield path, file_id, master_ie, other_values
1448
self._other_walkers = other_walkers
1449
self._other_entries = other_entries
1450
self._others_extra = others_extra
1452
def _finish_others(self):
1453
"""Finish walking the other iterators, so we get all entries."""
1454
for idx, info in enumerate(self._other_entries):
1455
other_extra = self._others_extra[idx]
1456
(other_has_more, other_path, other_ie) = info
1457
while other_has_more:
1458
other_file_id = other_ie.file_id
1459
if other_file_id not in self._out_of_order_processed:
1460
other_extra[other_file_id] = (other_path, other_ie)
1461
other_has_more, other_path, other_ie = \
1462
self._step_one(self._other_walkers[idx])
1463
del self._other_entries
1465
def _walk_others(self):
1466
"""Finish up by walking all the 'deferred' nodes."""
1467
# TODO: One alternative would be to grab all possible unprocessed
1468
# file_ids, and then sort by path, and then yield them. That
1469
# might ensure better ordering, in case a caller strictly
1470
# requires parents before children.
1471
for idx, other_extra in enumerate(self._others_extra):
1472
others = sorted(other_extra.itervalues(),
1473
key=lambda x: self._path_to_key(x[0]))
1474
for other_path, other_ie in others:
1475
file_id = other_ie.file_id
1476
# We don't need to check out_of_order_processed here, because
1477
# the lookup_by_file_id will be removing anything processed
1478
# from the extras cache
1479
other_extra.pop(file_id)
1480
other_values = [(None, None) for i in xrange(idx)]
1481
other_values.append((other_path, other_ie))
1482
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1483
alt_idx = alt_idx + idx + 1
1484
alt_extra = self._others_extra[alt_idx]
1485
alt_tree = self._other_trees[alt_idx]
1486
other_values.append(self._lookup_by_file_id(
1487
alt_extra, alt_tree, file_id))
1488
yield other_path, file_id, None, other_values
784
# This was deprecated before 0.12, but did not have an official warning
785
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
786
def RevisionTree(*args, **kwargs):
787
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
789
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
792
from bzrlib.revisiontree import RevisionTree as _RevisionTree
793
return _RevisionTree(*args, **kwargs)