146
return iter(self.inventory)
144
def all_file_ids(self):
145
"""Iterate through all file ids, including ids for missing files."""
146
raise NotImplementedError(self.all_file_ids)
148
148
def id2path(self, file_id):
149
149
"""Return the path for a file id.
151
151
:raises NoSuchId:
153
return self.inventory.id2path(file_id)
155
def is_control_filename(self, filename):
156
"""True if filename is the name of a control file in this tree.
158
:param filename: A filename within the tree. This is a relative path
159
from the root of this tree.
161
This is true IF and ONLY IF the filename is part of the meta data
162
that bzr controls in this tree. I.E. a random .bzr directory placed
163
on disk will not be a control file for this tree.
165
return self.bzrdir.is_control_filename(filename)
168
def iter_entries_by_dir(self, specific_file_ids=None):
153
raise NotImplementedError(self.id2path)
155
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
169
156
"""Walk the tree in 'by_dir' order.
171
This will yield each entry in the tree as a (path, entry) tuple. The
172
order that they are yielded is: the contents of a directory are
173
preceeded by the parent of a directory, and all the contents of a
174
directory are grouped together.
176
return self.inventory.iter_entries_by_dir(
177
specific_file_ids=specific_file_ids)
158
This will yield each entry in the tree as a (path, entry) tuple.
159
The order that they are yielded is:
161
Directories are walked in a depth-first lexicographical order,
162
however, whenever a directory is reached, all of its direct child
163
nodes are yielded in lexicographical order before yielding the
166
For example, in the tree::
176
The yield order (ignoring root) would be::
177
a, f, a/b, a/d, a/b/c, a/d/e, f/g
179
:param yield_parents: If True, yield the parents from the root leading
180
down to specific_file_ids that have been requested. This has no
181
impact if specific_file_ids is None.
183
raise NotImplementedError(self.iter_entries_by_dir)
185
def list_files(self, include_root=False, from_dir=None, recursive=True):
186
"""List all files in this tree.
188
:param include_root: Whether to include the entry for the tree root
189
:param from_dir: Directory under which to list files
190
:param recursive: Whether to list files recursively
191
:return: iterator over tuples of (path, versioned, kind, file_id,
194
raise NotImplementedError(self.list_files)
179
196
def iter_references(self):
180
for path, entry in self.iter_entries_by_dir():
181
if entry.kind == 'tree-reference':
182
yield path, entry.file_id
197
if self.supports_tree_reference():
198
for path, entry in self.iter_entries_by_dir():
199
if entry.kind == 'tree-reference':
200
yield path, entry.file_id
184
202
def kind(self, file_id):
185
203
raise NotImplementedError("Tree subclass %s must implement kind"
186
204
% self.__class__.__name__)
206
def stored_kind(self, file_id):
207
"""File kind stored for this file_id.
209
May not match kind on disk for working trees. Always available
210
for versioned files, even when the file itself is missing.
212
return self.kind(file_id)
188
214
def path_content_summary(self, path):
189
215
"""Get a summary of the information about path.
217
All the attributes returned are for the canonical form, not the
218
convenient form (if content filters are in use.)
191
220
:param path: A relative path within the tree.
192
221
:return: A tuple containing kind, size, exec, sha1-or-link.
193
222
Kind is always present (see tree.kind()).
194
size is present if kind is file, None otherwise.
223
size is present if kind is file and the size of the
224
canonical form can be cheaply determined, None otherwise.
195
225
exec is None unless kind is file and the platform supports the 'x'
197
227
sha1-or-link is the link target if kind is symlink, or the sha1 if
299
407
uncommitted changes in the other tree, they will be assigned to the
300
408
'other:' pseudo-revision.
302
from bzrlib import merge
303
annotated_a = list(self.annotate_iter(file_id,
304
_mod_revision.CURRENT_REVISION))
305
annotated_b = list(other.annotate_iter(file_id, 'other:'))
306
ancestors_a = self._get_ancestors(_mod_revision.CURRENT_REVISION)
307
ancestors_b = other._get_ancestors('other:')
308
return merge._plan_annotate_merge(annotated_a, annotated_b,
309
ancestors_a, ancestors_b)
311
inventory = property(_get_inventory,
312
doc="Inventory of this Tree")
410
data = self._get_plan_merge_data(file_id, other, base)
411
vf, last_revision_a, last_revision_b, last_revision_base = data
412
return vf.plan_merge(last_revision_a, last_revision_b,
415
def plan_file_lca_merge(self, file_id, other, base=None):
416
"""Generate a merge plan based lca-newness.
418
If the file contains uncommitted changes in this tree, they will be
419
attributed to the 'current:' pseudo-revision. If the file contains
420
uncommitted changes in the other tree, they will be assigned to the
421
'other:' pseudo-revision.
423
data = self._get_plan_merge_data(file_id, other, base)
424
vf, last_revision_a, last_revision_b, last_revision_base = data
425
return vf.plan_lca_merge(last_revision_a, last_revision_b,
428
def _iter_parent_trees(self):
429
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
430
for revision_id in self.get_parent_ids():
432
yield self.revision_tree(revision_id)
433
except errors.NoSuchRevisionInTree:
434
yield self.repository.revision_tree(revision_id)
436
def _get_file_revision(self, file_id, vf, tree_revision):
437
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
439
if getattr(self, '_repository', None) is None:
440
last_revision = tree_revision
441
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
442
self._iter_parent_trees()]
443
vf.add_lines((file_id, last_revision), parent_keys,
444
self.get_file_lines(file_id))
445
repo = self.branch.repository
448
last_revision = self.get_file_revision(file_id)
449
base_vf = self._repository.texts
450
if base_vf not in vf.fallback_versionedfiles:
451
vf.fallback_versionedfiles.append(base_vf)
314
454
def _check_retrieved(self, ie, f):
315
455
if not __debug__:
317
fp = fingerprint_file(f)
457
fp = osutils.fingerprint_file(f)
320
460
if ie.text_size is not None:
321
461
if ie.text_size != fp['size']:
322
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
462
raise errors.BzrError(
463
"mismatched size for file %r in %r" %
464
(ie.file_id, self._store),
323
465
["inventory expects %d bytes" % ie.text_size,
324
466
"file is actually %d bytes" % fp['size'],
325
467
"store is probably damaged/corrupt"])
327
469
if ie.text_sha1 != fp['sha1']:
328
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
470
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
471
(ie.file_id, self._store),
329
472
["inventory expects %s" % ie.text_sha1,
330
473
"file is actually %s" % fp['sha1'],
331
474
"store is probably damaged/corrupt"])
334
476
def path2id(self, path):
335
477
"""Return the id for path in this tree."""
336
return self._inventory.path2id(path)
478
raise NotImplementedError(self.path2id)
338
480
def paths2ids(self, paths, trees=[], require_versioned=True):
339
481
"""Return all the ids that can be reached by walking from paths.
341
483
Each path is looked up in this tree and any extras provided in
342
484
trees, and this is repeated recursively: the children in an extra tree
343
485
of a directory that has been renamed under a provided path in this tree
432
574
raise NotImplementedError(self.walkdirs)
435
class EmptyTree(Tree):
438
self._inventory = Inventory(root_id=None)
439
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
440
' use repository.revision_tree instead.',
441
DeprecationWarning, stacklevel=2)
443
def get_parent_ids(self):
446
def get_symlink_target(self, file_id):
449
def has_filename(self, filename):
576
def supports_content_filtering(self):
452
def kind(self, file_id):
453
assert self._inventory[file_id].kind == "directory"
456
def list_files(self, include_root=False):
459
def __contains__(self, file_id):
460
return (file_id in self._inventory)
462
def get_file_sha1(self, file_id, path=None, stat_value=None):
579
def _content_filter_stack(self, path=None, file_id=None):
580
"""The stack of content filters for a path if filtering is supported.
582
Readers will be applied in first-to-last order.
583
Writers will be applied in last-to-first order.
584
Either the path or the file-id needs to be provided.
586
:param path: path relative to the root of the tree
588
:param file_id: file_id or None if unknown
589
:return: the list of filters - [] if there are none
591
filter_pref_names = filters._get_registered_names()
592
if len(filter_pref_names) == 0:
595
path = self.id2path(file_id)
596
prefs = self.iter_search_rules([path], filter_pref_names).next()
597
stk = filters._get_filter_stack_for(prefs)
598
if 'filters' in debug.debug_flags:
599
trace.note("*** %s content-filter: %s => %r" % (path,prefs,stk))
602
def _content_filter_stack_provider(self):
603
"""A function that returns a stack of ContentFilters.
605
The function takes a path (relative to the top of the tree) and a
606
file-id as parameters.
608
:return: None if content filtering is not supported by this tree.
610
if self.supports_content_filtering():
611
return lambda path, file_id: \
612
self._content_filter_stack(path, file_id)
616
def iter_search_rules(self, path_names, pref_names=None,
617
_default_searcher=None):
618
"""Find the preferences for filenames in a tree.
620
:param path_names: an iterable of paths to find attributes for.
621
Paths are given relative to the root of the tree.
622
:param pref_names: the list of preferences to lookup - None for all
623
:param _default_searcher: private parameter to assist testing - don't use
624
:return: an iterator of tuple sequences, one per path-name.
625
See _RulesSearcher.get_items for details on the tuple sequence.
627
if _default_searcher is None:
628
_default_searcher = rules._per_user_searcher
629
searcher = self._get_rules_searcher(_default_searcher)
630
if searcher is not None:
631
if pref_names is not None:
632
for path in path_names:
633
yield searcher.get_selected_items(path, pref_names)
635
for path in path_names:
636
yield searcher.get_items(path)
638
def _get_rules_searcher(self, default_searcher):
639
"""Get the RulesSearcher for this tree given the default one."""
640
searcher = default_searcher
644
class InventoryTree(Tree):
645
"""A tree that relies on an inventory for its metadata.
647
Trees contain an `Inventory` object, and also know how to retrieve
648
file texts mentioned in the inventory, either from a working
649
directory or from a store.
651
It is possible for trees to contain files that are not described
652
in their inventory or vice versa; for this use `filenames()`.
654
Subclasses should set the _inventory attribute, which is considered
655
private to external API users.
658
def get_canonical_inventory_paths(self, paths):
659
"""Like get_canonical_inventory_path() but works on multiple items.
661
:param paths: A sequence of paths relative to the root of the tree.
662
:return: A list of paths, with each item the corresponding input path
663
adjusted to account for existing elements that match case
666
return list(self._yield_canonical_inventory_paths(paths))
668
def get_canonical_inventory_path(self, path):
669
"""Returns the first inventory item that case-insensitively matches path.
671
If a path matches exactly, it is returned. If no path matches exactly
672
but more than one path matches case-insensitively, it is implementation
673
defined which is returned.
675
If no path matches case-insensitively, the input path is returned, but
676
with as many path entries that do exist changed to their canonical
679
If you need to resolve many names from the same tree, you should
680
use get_canonical_inventory_paths() to avoid O(N) behaviour.
682
:param path: A paths relative to the root of the tree.
683
:return: The input path adjusted to account for existing elements
684
that match case insensitively.
686
return self._yield_canonical_inventory_paths([path]).next()
688
def _yield_canonical_inventory_paths(self, paths):
690
# First, if the path as specified exists exactly, just use it.
691
if self.path2id(path) is not None:
695
cur_id = self.get_root_id()
697
bit_iter = iter(path.split("/"))
701
for child in self.iter_children(cur_id):
703
# XXX: it seem like if the child is known to be in the
704
# tree, we shouldn't need to go from its id back to
705
# its path -- mbp 2010-02-11
707
# XXX: it seems like we could be more efficient
708
# by just directly looking up the original name and
709
# only then searching all children; also by not
710
# chopping paths so much. -- mbp 2010-02-11
711
child_base = os.path.basename(self.id2path(child))
712
if (child_base == elt):
713
# if we found an exact match, we can stop now; if
714
# we found an approximate match we need to keep
715
# searching because there might be an exact match
718
new_path = osutils.pathjoin(cur_path, child_base)
720
elif child_base.lower() == lelt:
722
new_path = osutils.pathjoin(cur_path, child_base)
723
except errors.NoSuchId:
724
# before a change is committed we can see this error...
729
# got to the end of this directory and no entries matched.
730
# Return what matched so far, plus the rest as specified.
731
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
736
def _get_inventory(self):
737
return self._inventory
739
inventory = property(_get_inventory,
740
doc="Inventory of this Tree")
743
def path2id(self, path):
744
"""Return the id for path in this tree."""
745
return self._inventory.path2id(path)
747
def id2path(self, file_id):
748
"""Return the path for a file id.
752
return self.inventory.id2path(file_id)
754
def has_id(self, file_id):
755
return self.inventory.has_id(file_id)
757
def has_or_had_id(self, file_id):
758
return self.inventory.has_id(file_id)
760
def all_file_ids(self):
761
return set(self.inventory)
763
@deprecated_method(deprecated_in((2, 4, 0)))
765
return iter(self.inventory)
767
def filter_unversioned_files(self, paths):
768
"""Filter out paths that are versioned.
770
:return: set of paths.
772
# NB: we specifically *don't* call self.has_filename, because for
773
# WorkingTrees that can indicate files that exist on disk but that
775
pred = self.inventory.has_filename
776
return set((p for p in paths if not pred(p)))
779
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
780
"""Walk the tree in 'by_dir' order.
782
This will yield each entry in the tree as a (path, entry) tuple.
783
The order that they are yielded is:
785
See Tree.iter_entries_by_dir for details.
787
:param yield_parents: If True, yield the parents from the root leading
788
down to specific_file_ids that have been requested. This has no
789
impact if specific_file_ids is None.
791
return self.inventory.iter_entries_by_dir(
792
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
794
def get_file_by_path(self, path):
795
return self.get_file(self._inventory.path2id(path), path)
466
798
######################################################################
613
932
will pass through to InterTree as appropriate.
935
# Formats that will be used to test this InterTree. If both are
936
# None, this InterTree will not be tested (e.g. because a complex
938
_matching_from_tree_format = None
939
_matching_to_tree_format = None
944
def is_compatible(kls, source, target):
945
# The default implementation is naive and uses the public API, so
946
# it works for all trees.
949
def _changes_from_entries(self, source_entry, target_entry,
950
source_path=None, target_path=None):
951
"""Generate a iter_changes tuple between source_entry and target_entry.
953
:param source_entry: An inventory entry from self.source, or None.
954
:param target_entry: An inventory entry from self.target, or None.
955
:param source_path: The path of source_entry, if known. If not known
956
it will be looked up.
957
:param target_path: The path of target_entry, if known. If not known
958
it will be looked up.
959
:return: A tuple, item 0 of which is an iter_changes result tuple, and
960
item 1 is True if there are any changes in the result tuple.
962
if source_entry is None:
963
if target_entry is None:
965
file_id = target_entry.file_id
967
file_id = source_entry.file_id
968
if source_entry is not None:
969
source_versioned = True
970
source_name = source_entry.name
971
source_parent = source_entry.parent_id
972
if source_path is None:
973
source_path = self.source.id2path(file_id)
974
source_kind, source_executable, source_stat = \
975
self.source._comparison_data(source_entry, source_path)
977
source_versioned = False
981
source_executable = None
982
if target_entry is not None:
983
target_versioned = True
984
target_name = target_entry.name
985
target_parent = target_entry.parent_id
986
if target_path is None:
987
target_path = self.target.id2path(file_id)
988
target_kind, target_executable, target_stat = \
989
self.target._comparison_data(target_entry, target_path)
991
target_versioned = False
995
target_executable = None
996
versioned = (source_versioned, target_versioned)
997
kind = (source_kind, target_kind)
998
changed_content = False
999
if source_kind != target_kind:
1000
changed_content = True
1001
elif source_kind == 'file':
1002
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
1003
self.target.get_file_sha1(file_id, target_path, target_stat)):
1004
changed_content = True
1005
elif source_kind == 'symlink':
1006
if (self.source.get_symlink_target(file_id) !=
1007
self.target.get_symlink_target(file_id)):
1008
changed_content = True
1009
# XXX: Yes, the indentation below is wrong. But fixing it broke
1010
# test_merge.TestMergerEntriesLCAOnDisk.
1011
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
1012
# the fix from bzr.dev -- vila 2009026
1013
elif source_kind == 'tree-reference':
1014
if (self.source.get_reference_revision(file_id, source_path)
1015
!= self.target.get_reference_revision(file_id, target_path)):
1016
changed_content = True
1017
parent = (source_parent, target_parent)
1018
name = (source_name, target_name)
1019
executable = (source_executable, target_executable)
1020
if (changed_content is not False or versioned[0] != versioned[1]
1021
or parent[0] != parent[1] or name[0] != name[1] or
1022
executable[0] != executable[1]):
1026
return (file_id, (source_path, target_path), changed_content,
1027
versioned, parent, name, kind, executable), changes
618
1029
@needs_read_lock
619
1030
def compare(self, want_unchanged=False, specific_files=None,
620
1031
extra_trees=None, require_versioned=False, include_root=False,
683
1092
:param require_versioned: Raise errors.PathsNotVersionedError if a
684
1093
path in the specific_files list is not versioned in one of
685
1094
source, target or extra_trees.
1095
:param specific_files: An optional list of file paths to restrict the
1096
comparison to. When mapping filenames to ids, all matches in all
1097
trees (including optional extra_trees) are used, and all children
1098
of matched directories are included. The parents in the target tree
1099
of the specific files up to and including the root of the tree are
1100
always evaluated for changes too.
686
1101
:param want_unversioned: Should unversioned files be returned in the
687
1102
output. An unversioned file is defined as one with (False, False)
688
1103
for the versioned pair.
691
1105
lookup_trees = [self.source]
693
1107
lookup_trees.extend(extra_trees)
1108
# The ids of items we need to examine to insure delta consistency.
1109
precise_file_ids = set()
1110
changed_file_ids = []
694
1111
if specific_files == []:
695
1112
specific_file_ids = []
697
1114
specific_file_ids = self.target.paths2ids(specific_files,
698
1115
lookup_trees, require_versioned=require_versioned)
1116
if specific_files is not None:
1117
# reparented or added entries must have their parents included
1118
# so that valid deltas can be created. The seen_parents set
1119
# tracks the parents that we need to have.
1120
# The seen_dirs set tracks directory entries we've yielded.
1121
# After outputting version object in to_entries we set difference
1122
# the two seen sets and start checking parents.
1123
seen_parents = set()
699
1125
if want_unversioned:
700
1126
all_unversioned = sorted([(p.split('/'), p) for p in
701
1127
self.target.extras()
702
1128
if specific_files is None or
703
1129
osutils.is_inside_any(specific_files, p)])
704
all_unversioned = deque(all_unversioned)
1130
all_unversioned = collections.deque(all_unversioned)
706
all_unversioned = deque()
1132
all_unversioned = collections.deque()
708
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1134
from_entries_by_dir = list(self.source.iter_entries_by_dir(
709
1135
specific_file_ids=specific_file_ids))
710
1136
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
711
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1137
to_entries_by_dir = list(self.target.iter_entries_by_dir(
712
1138
specific_file_ids=specific_file_ids))
713
1139
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
715
# the unversioned path lookup only occurs on real trees - where there
1141
# the unversioned path lookup only occurs on real trees - where there
716
1142
# can be extras. So the fake_entry is solely used to look up
717
1143
# executable it values when execute is not supported.
718
fake_entry = InventoryFile('unused', 'unused', 'unused')
719
for to_path, to_entry in to_entries_by_dir:
720
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1144
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1145
for target_path, target_entry in to_entries_by_dir:
1146
while (all_unversioned and
1147
all_unversioned[0][0] < target_path.split('/')):
721
1148
unversioned_path = all_unversioned.popleft()
722
to_kind, to_executable, to_stat = \
1149
target_kind, target_executable, target_stat = \
723
1150
self.target._comparison_data(fake_entry, unversioned_path[1])
724
1151
yield (None, (None, unversioned_path[1]), True, (False, False),
726
1153
(None, unversioned_path[0][-1]),
728
(None, to_executable))
729
file_id = to_entry.file_id
730
to_paths[file_id] = to_path
1154
(None, target_kind),
1155
(None, target_executable))
1156
source_path, source_entry = from_data.get(target_entry.file_id,
1158
result, changes = self._changes_from_entries(source_entry,
1159
target_entry, source_path=source_path, target_path=target_path)
1160
to_paths[result[0]] = result[1][1]
731
1161
entry_count += 1
732
changed_content = False
733
from_path, from_entry = from_data.get(file_id, (None, None))
734
from_versioned = (from_entry is not None)
735
if from_entry is not None:
736
from_versioned = True
737
from_name = from_entry.name
738
from_parent = from_entry.parent_id
739
from_kind, from_executable, from_stat = \
740
self.source._comparison_data(from_entry, from_path)
741
1163
entry_count += 1
743
from_versioned = False
747
from_executable = None
748
versioned = (from_versioned, True)
749
to_kind, to_executable, to_stat = \
750
self.target._comparison_data(to_entry, to_path)
751
kind = (from_kind, to_kind)
752
if kind[0] != kind[1]:
753
changed_content = True
754
elif from_kind == 'file':
755
from_size = self.source._file_size(from_entry, from_stat)
756
to_size = self.target._file_size(to_entry, to_stat)
757
if from_size != to_size:
758
changed_content = True
759
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
760
self.target.get_file_sha1(file_id, to_path, to_stat)):
761
changed_content = True
762
elif from_kind == 'symlink':
763
if (self.source.get_symlink_target(file_id) !=
764
self.target.get_symlink_target(file_id)):
765
changed_content = True
766
elif from_kind == 'tree-reference':
767
if (self.source.get_reference_revision(file_id, from_path)
768
!= self.target.get_reference_revision(file_id, to_path)):
769
changed_content = True
770
parent = (from_parent, to_entry.parent_id)
771
name = (from_name, to_entry.name)
772
executable = (from_executable, to_executable)
773
1164
if pb is not None:
774
1165
pb.update('comparing files', entry_count, num_entries)
775
if (changed_content is not False or versioned[0] != versioned[1]
776
or parent[0] != parent[1] or name[0] != name[1] or
777
executable[0] != executable[1] or include_unchanged):
778
yield (file_id, (from_path, to_path), changed_content,
779
versioned, parent, name, kind, executable)
1166
if changes or include_unchanged:
1167
if specific_file_ids is not None:
1168
new_parent_id = result[4][1]
1169
precise_file_ids.add(new_parent_id)
1170
changed_file_ids.append(result[0])
1172
# Ensure correct behaviour for reparented/added specific files.
1173
if specific_files is not None:
1174
# Record output dirs
1175
if result[6][1] == 'directory':
1176
seen_dirs.add(result[0])
1177
# Record parents of reparented/added entries.
1178
versioned = result[3]
1180
if not versioned[0] or parents[0] != parents[1]:
1181
seen_parents.add(parents[1])
781
1182
while all_unversioned:
782
1183
# yield any trailing unversioned paths
783
1184
unversioned_path = all_unversioned.popleft()
822
1211
self.source._comparison_data(from_entry, path)
823
1212
kind = (from_kind, None)
824
1213
executable = (from_executable, None)
825
changed_content = True
1214
changed_content = from_kind is not None
826
1215
# the parent's path is necessarily known at this point.
1216
changed_file_ids.append(file_id)
827
1217
yield(file_id, (path, to_path), changed_content, versioned, parent,
828
1218
name, kind, executable)
831
# This was deprecated before 0.12, but did not have an official warning
832
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
833
def RevisionTree(*args, **kwargs):
834
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
836
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
839
from bzrlib.revisiontree import RevisionTree as _RevisionTree
840
return _RevisionTree(*args, **kwargs)
1219
changed_file_ids = set(changed_file_ids)
1220
if specific_file_ids is not None:
1221
for result in self._handle_precise_ids(precise_file_ids,
1225
def _get_entry(self, tree, file_id):
1226
"""Get an inventory entry from a tree, with missing entries as None.
1228
If the tree raises NotImplementedError on accessing .inventory, then
1229
this is worked around using iter_entries_by_dir on just the file id
1232
:param tree: The tree to lookup the entry in.
1233
:param file_id: The file_id to lookup.
1236
inventory = tree.inventory
1237
except NotImplementedError:
1238
# No inventory available.
1240
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1241
return iterator.next()[1]
1242
except StopIteration:
1246
return inventory[file_id]
1247
except errors.NoSuchId:
1250
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1251
discarded_changes=None):
1252
"""Fill out a partial iter_changes to be consistent.
1254
:param precise_file_ids: The file ids of parents that were seen during
1256
:param changed_file_ids: The file ids of already emitted items.
1257
:param discarded_changes: An optional dict of precalculated
1258
iter_changes items which the partial iter_changes had not output
1260
:return: A generator of iter_changes items to output.
1262
# process parents of things that had changed under the users
1263
# requested paths to prevent incorrect paths or parent ids which
1264
# aren't in the tree.
1265
while precise_file_ids:
1266
precise_file_ids.discard(None)
1267
# Don't emit file_ids twice
1268
precise_file_ids.difference_update(changed_file_ids)
1269
if not precise_file_ids:
1271
# If the there was something at a given output path in source, we
1272
# have to include the entry from source in the delta, or we would
1273
# be putting this entry into a used path.
1275
for parent_id in precise_file_ids:
1277
paths.append(self.target.id2path(parent_id))
1278
except errors.NoSuchId:
1279
# This id has been dragged in from the source by delta
1280
# expansion and isn't present in target at all: we don't
1281
# need to check for path collisions on it.
1284
old_id = self.source.path2id(path)
1285
precise_file_ids.add(old_id)
1286
precise_file_ids.discard(None)
1287
current_ids = precise_file_ids
1288
precise_file_ids = set()
1289
# We have to emit all of precise_file_ids that have been altered.
1290
# We may have to output the children of some of those ids if any
1291
# directories have stopped being directories.
1292
for file_id in current_ids:
1294
if discarded_changes:
1295
result = discarded_changes.get(file_id)
1300
old_entry = self._get_entry(self.source, file_id)
1301
new_entry = self._get_entry(self.target, file_id)
1302
result, changes = self._changes_from_entries(
1303
old_entry, new_entry)
1306
# Get this parents parent to examine.
1307
new_parent_id = result[4][1]
1308
precise_file_ids.add(new_parent_id)
1310
if (result[6][0] == 'directory' and
1311
result[6][1] != 'directory'):
1312
# This stopped being a directory, the old children have
1314
if old_entry is None:
1315
# Reusing a discarded change.
1316
old_entry = self._get_entry(self.source, file_id)
1317
for child in old_entry.children.values():
1318
precise_file_ids.add(child.file_id)
1319
changed_file_ids.add(result[0])
1323
InterTree.register_optimiser(InterTree)
1326
class MultiWalker(object):
1327
"""Walk multiple trees simultaneously, getting combined results."""
1329
# Note: This could be written to not assume you can do out-of-order
1330
# lookups. Instead any nodes that don't match in all trees could be
1331
# marked as 'deferred', and then returned in the final cleanup loop.
1332
# For now, I think it is "nicer" to return things as close to the
1333
# "master_tree" order as we can.
1335
def __init__(self, master_tree, other_trees):
1336
"""Create a new MultiWalker.
1338
All trees being walked must implement "iter_entries_by_dir()", such
1339
that they yield (path, object) tuples, where that object will have a
1340
'.file_id' member, that can be used to check equality.
1342
:param master_tree: All trees will be 'slaved' to the master_tree such
1343
that nodes in master_tree will be used as 'first-pass' sync points.
1344
Any nodes that aren't in master_tree will be merged in a second
1346
:param other_trees: A list of other trees to walk simultaneously.
1348
self._master_tree = master_tree
1349
self._other_trees = other_trees
1351
# Keep track of any nodes that were properly processed just out of
1352
# order, that way we don't return them at the end, we don't have to
1353
# track *all* processed file_ids, just the out-of-order ones
1354
self._out_of_order_processed = set()
1357
def _step_one(iterator):
1358
"""Step an iter_entries_by_dir iterator.
1360
:return: (has_more, path, ie)
1361
If has_more is False, path and ie will be None.
1364
path, ie = iterator.next()
1365
except StopIteration:
1366
return False, None, None
1368
return True, path, ie
1371
def _cmp_path_by_dirblock(path1, path2):
1372
"""Compare two paths based on what directory they are in.
1374
This generates a sort order, such that all children of a directory are
1375
sorted together, and grandchildren are in the same order as the
1376
children appear. But all grandchildren come after all children.
1378
:param path1: first path
1379
:param path2: the second path
1380
:return: negative number if ``path1`` comes first,
1381
0 if paths are equal
1382
and a positive number if ``path2`` sorts first
1384
# Shortcut this special case
1387
# This is stolen from _dirstate_helpers_py.py, only switching it to
1388
# Unicode objects. Consider using encode_utf8() and then using the
1389
# optimized versions, or maybe writing optimized unicode versions.
1390
if not isinstance(path1, unicode):
1391
raise TypeError("'path1' must be a unicode string, not %s: %r"
1392
% (type(path1), path1))
1393
if not isinstance(path2, unicode):
1394
raise TypeError("'path2' must be a unicode string, not %s: %r"
1395
% (type(path2), path2))
1396
return cmp(MultiWalker._path_to_key(path1),
1397
MultiWalker._path_to_key(path2))
1400
def _path_to_key(path):
1401
dirname, basename = osutils.split(path)
1402
return (dirname.split(u'/'), basename)
1404
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1405
"""Lookup an inventory entry by file_id.
1407
This is called when an entry is missing in the normal order.
1408
Generally this is because a file was either renamed, or it was
1409
deleted/added. If the entry was found in the inventory and not in
1410
extra_entries, it will be added to self._out_of_order_processed
1412
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1413
should be filled with entries that were found before they were
1414
used. If file_id is present, it will be removed from the
1416
:param other_tree: The Tree to search, in case we didn't find the entry
1418
:param file_id: The file_id to look for
1419
:return: (path, ie) if found or (None, None) if not present.
1421
if file_id in extra_entries:
1422
return extra_entries.pop(file_id)
1423
# TODO: Is id2path better as the first call, or is
1424
# inventory[file_id] better as a first check?
1426
cur_path = other_tree.id2path(file_id)
1427
except errors.NoSuchId:
1429
if cur_path is None:
1432
self._out_of_order_processed.add(file_id)
1433
cur_ie = other_tree.inventory[file_id]
1434
return (cur_path, cur_ie)
1437
"""Match up the values in the different trees."""
1438
for result in self._walk_master_tree():
1440
self._finish_others()
1441
for result in self._walk_others():
1444
def _walk_master_tree(self):
1445
"""First pass, walk all trees in lock-step.
1447
When we are done, all nodes in the master_tree will have been
1448
processed. _other_walkers, _other_entries, and _others_extra will be
1449
set on 'self' for future processing.
1451
# This iterator has the most "inlining" done, because it tends to touch
1452
# every file in the tree, while the others only hit nodes that don't
1454
master_iterator = self._master_tree.iter_entries_by_dir()
1456
other_walkers = [other.iter_entries_by_dir()
1457
for other in self._other_trees]
1458
other_entries = [self._step_one(walker) for walker in other_walkers]
1459
# Track extra nodes in the other trees
1460
others_extra = [{} for i in xrange(len(self._other_trees))]
1462
master_has_more = True
1463
step_one = self._step_one
1464
lookup_by_file_id = self._lookup_by_file_id
1465
out_of_order_processed = self._out_of_order_processed
1467
while master_has_more:
1468
(master_has_more, path, master_ie) = step_one(master_iterator)
1469
if not master_has_more:
1472
file_id = master_ie.file_id
1474
other_values_append = other_values.append
1475
next_other_entries = []
1476
next_other_entries_append = next_other_entries.append
1477
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1478
if not other_has_more:
1479
other_values_append(lookup_by_file_id(
1480
others_extra[idx], self._other_trees[idx], file_id))
1481
next_other_entries_append((False, None, None))
1482
elif file_id == other_ie.file_id:
1483
# This is the critical code path, as most of the entries
1484
# should match between most trees.
1485
other_values_append((other_path, other_ie))
1486
next_other_entries_append(step_one(other_walkers[idx]))
1488
# This walker did not match, step it until it either
1489
# matches, or we know we are past the current walker.
1490
other_walker = other_walkers[idx]
1491
other_extra = others_extra[idx]
1492
while (other_has_more and
1493
self._cmp_path_by_dirblock(other_path, path) < 0):
1494
other_file_id = other_ie.file_id
1495
if other_file_id not in out_of_order_processed:
1496
other_extra[other_file_id] = (other_path, other_ie)
1497
other_has_more, other_path, other_ie = \
1498
step_one(other_walker)
1499
if other_has_more and other_ie.file_id == file_id:
1500
# We ended up walking to this point, match and step
1502
other_values_append((other_path, other_ie))
1503
other_has_more, other_path, other_ie = \
1504
step_one(other_walker)
1506
# This record isn't in the normal order, see if it
1508
other_values_append(lookup_by_file_id(
1509
other_extra, self._other_trees[idx], file_id))
1510
next_other_entries_append((other_has_more, other_path,
1512
other_entries = next_other_entries
1514
# We've matched all the walkers, yield this datapoint
1515
yield path, file_id, master_ie, other_values
1516
self._other_walkers = other_walkers
1517
self._other_entries = other_entries
1518
self._others_extra = others_extra
1520
def _finish_others(self):
1521
"""Finish walking the other iterators, so we get all entries."""
1522
for idx, info in enumerate(self._other_entries):
1523
other_extra = self._others_extra[idx]
1524
(other_has_more, other_path, other_ie) = info
1525
while other_has_more:
1526
other_file_id = other_ie.file_id
1527
if other_file_id not in self._out_of_order_processed:
1528
other_extra[other_file_id] = (other_path, other_ie)
1529
other_has_more, other_path, other_ie = \
1530
self._step_one(self._other_walkers[idx])
1531
del self._other_entries
1533
def _walk_others(self):
1534
"""Finish up by walking all the 'deferred' nodes."""
1535
# TODO: One alternative would be to grab all possible unprocessed
1536
# file_ids, and then sort by path, and then yield them. That
1537
# might ensure better ordering, in case a caller strictly
1538
# requires parents before children.
1539
for idx, other_extra in enumerate(self._others_extra):
1540
others = sorted(other_extra.itervalues(),
1541
key=lambda x: self._path_to_key(x[0]))
1542
for other_path, other_ie in others:
1543
file_id = other_ie.file_id
1544
# We don't need to check out_of_order_processed here, because
1545
# the lookup_by_file_id will be removing anything processed
1546
# from the extras cache
1547
other_extra.pop(file_id)
1548
other_values = [(None, None) for i in xrange(idx)]
1549
other_values.append((other_path, other_ie))
1550
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1551
alt_idx = alt_idx + idx + 1
1552
alt_extra = self._others_extra[alt_idx]
1553
alt_tree = self._other_trees[alt_idx]
1554
other_values.append(self._lookup_by_file_id(
1555
alt_extra, alt_tree, file_id))
1556
yield other_path, file_id, None, other_values