84
84
specific_files=specific_files,
85
85
extra_trees=extra_trees,
86
86
require_versioned=require_versioned,
87
include_root=include_root
87
include_root=include_root,
88
want_unversioned=want_unversioned,
90
def _iter_changes(self, from_tree, include_unchanged=False,
91
def iter_changes(self, from_tree, include_unchanged=False,
91
92
specific_files=None, pb=None, extra_trees=None,
92
require_versioned=True):
93
require_versioned=True, want_unversioned=False):
94
"""See InterTree.iter_changes"""
93
95
intertree = InterTree.get(from_tree, self)
94
return intertree._iter_changes(include_unchanged, specific_files, pb,
95
extra_trees, require_versioned)
96
return intertree.iter_changes(include_unchanged, specific_files, pb,
97
extra_trees, require_versioned, want_unversioned=want_unversioned)
97
99
def conflicts(self):
98
100
"""Get a list of the conflicts in the tree.
100
102
Each conflict is an instance of bzrlib.conflicts.Conflict.
104
return _mod_conflicts.ConflictList()
107
"""For trees that can have unversioned files, return all such paths."""
104
110
def get_parent_ids(self):
105
"""Get the parent ids for this tree.
111
"""Get the parent ids for this tree.
107
113
:return: a list of parent ids. [] is returned to indicate
108
114
a tree with no parents.
109
115
:raises: BzrError if the parents are not known.
111
117
raise NotImplementedError(self.get_parent_ids)
113
119
def has_filename(self, filename):
114
120
"""True if the tree has given filename."""
115
raise NotImplementedError()
121
raise NotImplementedError(self.has_filename)
117
123
def has_id(self, file_id):
118
file_id = osutils.safe_file_id(file_id)
119
return self.inventory.has_id(file_id)
124
raise NotImplementedError(self.has_id)
121
__contains__ = has_id
126
def __contains__(self, file_id):
127
return self.has_id(file_id)
123
129
def has_or_had_id(self, file_id):
124
file_id = osutils.safe_file_id(file_id)
125
if file_id == self.inventory.root.file_id:
127
return self.inventory.has_id(file_id)
130
raise NotImplementedError(self.has_or_had_id)
132
def is_ignored(self, filename):
133
"""Check whether the filename is ignored by this tree.
135
:param filename: The relative filename within the tree.
136
:return: True if the filename is ignored.
129
140
def __iter__(self):
130
return iter(self.inventory)
141
"""Yield all file ids in this tree."""
142
raise NotImplementedError(self.__iter__)
144
def all_file_ids(self):
145
"""Iterate through all file ids, including ids for missing files."""
146
return set(self.inventory)
132
148
def id2path(self, file_id):
133
file_id = osutils.safe_file_id(file_id)
134
return self.inventory.id2path(file_id)
136
def is_control_filename(self, filename):
137
"""True if filename is the name of a control file in this tree.
139
:param filename: A filename within the tree. This is a relative path
140
from the root of this tree.
142
This is true IF and ONLY IF the filename is part of the meta data
143
that bzr controls in this tree. I.E. a random .bzr directory placed
144
on disk will not be a control file for this tree.
149
"""Return the path for a file id.
146
return self.bzrdir.is_control_filename(filename)
153
raise NotImplementedError(self.id2path)
149
def iter_entries_by_dir(self, specific_file_ids=None):
155
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
150
156
"""Walk the tree in 'by_dir' order.
152
This will yield each entry in the tree as a (path, entry) tuple. The
153
order that they are yielded is: the contents of a directory are
154
preceeded by the parent of a directory, and all the contents of a
155
directory are grouped together.
157
return self.inventory.iter_entries_by_dir(
158
specific_file_ids=specific_file_ids)
158
This will yield each entry in the tree as a (path, entry) tuple.
159
The order that they are yielded is:
161
Directories are walked in a depth-first lexicographical order,
162
however, whenever a directory is reached, all of its direct child
163
nodes are yielded in lexicographical order before yielding the
166
For example, in the tree::
176
The yield order (ignoring root) would be::
177
a, f, a/b, a/d, a/b/c, a/d/e, f/g
179
:param yield_parents: If True, yield the parents from the root leading
180
down to specific_file_ids that have been requested. This has no
181
impact if specific_file_ids is None.
183
raise NotImplementedError(self.iter_entries_by_dir)
185
def list_files(self, include_root=False, from_dir=None, recursive=True):
186
"""List all files in this tree.
188
:param include_root: Whether to include the entry for the tree root
189
:param from_dir: Directory under which to list files
190
:param recursive: Whether to list files recursively
191
:return: iterator over tuples of (path, versioned, kind, file_id,
194
raise NotImplementedError(self.list_files)
196
def iter_references(self):
197
if self.supports_tree_reference():
198
for path, entry in self.iter_entries_by_dir():
199
if entry.kind == 'tree-reference':
200
yield path, entry.file_id
160
202
def kind(self, file_id):
161
raise NotImplementedError("subclasses must implement kind")
203
raise NotImplementedError("Tree subclass %s must implement kind"
204
% self.__class__.__name__)
206
def stored_kind(self, file_id):
207
"""File kind stored for this file_id.
209
May not match kind on disk for working trees. Always available
210
for versioned files, even when the file itself is missing.
212
return self.kind(file_id)
214
def path_content_summary(self, path):
215
"""Get a summary of the information about path.
217
All the attributes returned are for the canonical form, not the
218
convenient form (if content filters are in use.)
220
:param path: A relative path within the tree.
221
:return: A tuple containing kind, size, exec, sha1-or-link.
222
Kind is always present (see tree.kind()).
223
size is present if kind is file and the size of the
224
canonical form can be cheaply determined, None otherwise.
225
exec is None unless kind is file and the platform supports the 'x'
227
sha1-or-link is the link target if kind is symlink, or the sha1 if
228
it can be obtained without reading the file.
230
raise NotImplementedError(self.path_content_summary)
232
def get_reference_revision(self, file_id, path=None):
233
raise NotImplementedError("Tree subclass %s must implement "
234
"get_reference_revision"
235
% self.__class__.__name__)
163
237
def _comparison_data(self, entry, path):
164
238
"""Return a tuple of kind, executable, stat_value for a file.
204
370
raise NotImplementedError(self.get_symlink_target)
206
def annotate_iter(self, file_id):
207
"""Return an iterator of revision_id, line tuples
373
def get_root_id(self):
374
"""Return the file_id for the root of this tree."""
375
raise NotImplementedError(self.get_root_id)
377
def annotate_iter(self, file_id,
378
default_revision=_mod_revision.CURRENT_REVISION):
379
"""Return an iterator of revision_id, line tuples.
209
381
For working trees (and mutable trees in general), the special
210
382
revision_id 'current:' will be used for lines that are new in this
211
383
tree, e.g. uncommitted changes.
212
384
:param file_id: The file to produce an annotated version from
385
:param default_revision: For lines that don't match a basis, mark them
386
with this revision id. Not all implementations will make use of
214
389
raise NotImplementedError(self.annotate_iter)
216
inventory = property(_get_inventory,
217
doc="Inventory of this Tree")
391
def _get_plan_merge_data(self, file_id, other, base):
392
from bzrlib import versionedfile
393
vf = versionedfile._PlanMergeVersionedFile(file_id)
394
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
395
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
397
last_revision_base = None
399
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
400
return vf, last_revision_a, last_revision_b, last_revision_base
402
def plan_file_merge(self, file_id, other, base=None):
403
"""Generate a merge plan based on annotations.
405
If the file contains uncommitted changes in this tree, they will be
406
attributed to the 'current:' pseudo-revision. If the file contains
407
uncommitted changes in the other tree, they will be assigned to the
408
'other:' pseudo-revision.
410
data = self._get_plan_merge_data(file_id, other, base)
411
vf, last_revision_a, last_revision_b, last_revision_base = data
412
return vf.plan_merge(last_revision_a, last_revision_b,
415
def plan_file_lca_merge(self, file_id, other, base=None):
416
"""Generate a merge plan based lca-newness.
418
If the file contains uncommitted changes in this tree, they will be
419
attributed to the 'current:' pseudo-revision. If the file contains
420
uncommitted changes in the other tree, they will be assigned to the
421
'other:' pseudo-revision.
423
data = self._get_plan_merge_data(file_id, other, base)
424
vf, last_revision_a, last_revision_b, last_revision_base = data
425
return vf.plan_lca_merge(last_revision_a, last_revision_b,
428
def _iter_parent_trees(self):
429
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
430
for revision_id in self.get_parent_ids():
432
yield self.revision_tree(revision_id)
433
except errors.NoSuchRevisionInTree:
434
yield self.repository.revision_tree(revision_id)
436
def _get_file_revision(self, file_id, vf, tree_revision):
437
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
439
if getattr(self, '_repository', None) is None:
440
last_revision = tree_revision
441
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
442
self._iter_parent_trees()]
443
vf.add_lines((file_id, last_revision), parent_keys,
444
self.get_file_lines(file_id))
445
repo = self.branch.repository
448
last_revision = self.get_file_revision(file_id)
449
base_vf = self._repository.texts
450
if base_vf not in vf.fallback_versionedfiles:
451
vf.fallback_versionedfiles.append(base_vf)
219
454
def _check_retrieved(self, ie, f):
220
455
if not __debug__:
222
fp = fingerprint_file(f)
457
fp = osutils.fingerprint_file(f)
225
460
if ie.text_size is not None:
226
461
if ie.text_size != fp['size']:
227
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
462
raise errors.BzrError(
463
"mismatched size for file %r in %r" %
464
(ie.file_id, self._store),
228
465
["inventory expects %d bytes" % ie.text_size,
229
466
"file is actually %d bytes" % fp['size'],
230
467
"store is probably damaged/corrupt"])
232
469
if ie.text_sha1 != fp['sha1']:
233
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
470
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
471
(ie.file_id, self._store),
234
472
["inventory expects %s" % ie.text_sha1,
235
473
"file is actually %s" % fp['sha1'],
236
474
"store is probably damaged/corrupt"])
238
476
def path2id(self, path):
239
477
"""Return the id for path in this tree."""
240
return self._inventory.path2id(path)
478
raise NotImplementedError(self.path2id)
242
480
def paths2ids(self, paths, trees=[], require_versioned=True):
243
481
"""Return all the ids that can be reached by walking from paths.
245
Each path is looked up in each this tree and any extras provided in
483
Each path is looked up in this tree and any extras provided in
246
484
trees, and this is repeated recursively: the children in an extra tree
247
485
of a directory that has been renamed under a provided path in this tree
248
are all returned, even if none exist until a provided path in this
486
are all returned, even if none exist under a provided path in this
249
487
tree, and vice versa.
251
489
:param paths: An iterable of paths to start converting to ids from.
337
574
raise NotImplementedError(self.walkdirs)
340
class EmptyTree(Tree):
343
self._inventory = Inventory(root_id=None)
344
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
345
' use repository.revision_tree instead.',
346
DeprecationWarning, stacklevel=2)
348
def get_parent_ids(self):
351
def get_symlink_target(self, file_id):
354
def has_filename(self, filename):
576
def supports_content_filtering(self):
357
def kind(self, file_id):
358
file_id = osutils.safe_file_id(file_id)
359
assert self._inventory[file_id].kind == "directory"
362
def list_files(self, include_root=False):
365
def __contains__(self, file_id):
366
file_id = osutils.safe_file_id(file_id)
367
return (file_id in self._inventory)
369
def get_file_sha1(self, file_id, path=None, stat_value=None):
579
def _content_filter_stack(self, path=None, file_id=None):
580
"""The stack of content filters for a path if filtering is supported.
582
Readers will be applied in first-to-last order.
583
Writers will be applied in last-to-first order.
584
Either the path or the file-id needs to be provided.
586
:param path: path relative to the root of the tree
588
:param file_id: file_id or None if unknown
589
:return: the list of filters - [] if there are none
591
filter_pref_names = filters._get_registered_names()
592
if len(filter_pref_names) == 0:
595
path = self.id2path(file_id)
596
prefs = self.iter_search_rules([path], filter_pref_names).next()
597
stk = filters._get_filter_stack_for(prefs)
598
if 'filters' in debug.debug_flags:
599
trace.note("*** %s content-filter: %s => %r" % (path,prefs,stk))
602
def _content_filter_stack_provider(self):
603
"""A function that returns a stack of ContentFilters.
605
The function takes a path (relative to the top of the tree) and a
606
file-id as parameters.
608
:return: None if content filtering is not supported by this tree.
610
if self.supports_content_filtering():
611
return lambda path, file_id: \
612
self._content_filter_stack(path, file_id)
616
def iter_search_rules(self, path_names, pref_names=None,
617
_default_searcher=None):
618
"""Find the preferences for filenames in a tree.
620
:param path_names: an iterable of paths to find attributes for.
621
Paths are given relative to the root of the tree.
622
:param pref_names: the list of preferences to lookup - None for all
623
:param _default_searcher: private parameter to assist testing - don't use
624
:return: an iterator of tuple sequences, one per path-name.
625
See _RulesSearcher.get_items for details on the tuple sequence.
627
if _default_searcher is None:
628
_default_searcher = rules._per_user_searcher
629
searcher = self._get_rules_searcher(_default_searcher)
630
if searcher is not None:
631
if pref_names is not None:
632
for path in path_names:
633
yield searcher.get_selected_items(path, pref_names)
635
for path in path_names:
636
yield searcher.get_items(path)
638
def _get_rules_searcher(self, default_searcher):
639
"""Get the RulesSearcher for this tree given the default one."""
640
searcher = default_searcher
644
class InventoryTree(Tree):
645
"""A tree that relies on an inventory for its metadata.
647
Trees contain an `Inventory` object, and also know how to retrieve
648
file texts mentioned in the inventory, either from a working
649
directory or from a store.
651
It is possible for trees to contain files that are not described
652
in their inventory or vice versa; for this use `filenames()`.
654
Subclasses should set the _inventory attribute, which is considered
655
private to external API users.
658
def get_canonical_inventory_paths(self, paths):
659
"""Like get_canonical_inventory_path() but works on multiple items.
661
:param paths: A sequence of paths relative to the root of the tree.
662
:return: A list of paths, with each item the corresponding input path
663
adjusted to account for existing elements that match case
666
return list(self._yield_canonical_inventory_paths(paths))
668
def get_canonical_inventory_path(self, path):
669
"""Returns the first inventory item that case-insensitively matches path.
671
If a path matches exactly, it is returned. If no path matches exactly
672
but more than one path matches case-insensitively, it is implementation
673
defined which is returned.
675
If no path matches case-insensitively, the input path is returned, but
676
with as many path entries that do exist changed to their canonical
679
If you need to resolve many names from the same tree, you should
680
use get_canonical_inventory_paths() to avoid O(N) behaviour.
682
:param path: A paths relative to the root of the tree.
683
:return: The input path adjusted to account for existing elements
684
that match case insensitively.
686
return self._yield_canonical_inventory_paths([path]).next()
688
def _yield_canonical_inventory_paths(self, paths):
690
# First, if the path as specified exists exactly, just use it.
691
if self.path2id(path) is not None:
695
cur_id = self.get_root_id()
697
bit_iter = iter(path.split("/"))
701
for child in self.iter_children(cur_id):
703
# XXX: it seem like if the child is known to be in the
704
# tree, we shouldn't need to go from its id back to
705
# its path -- mbp 2010-02-11
707
# XXX: it seems like we could be more efficient
708
# by just directly looking up the original name and
709
# only then searching all children; also by not
710
# chopping paths so much. -- mbp 2010-02-11
711
child_base = os.path.basename(self.id2path(child))
712
if (child_base == elt):
713
# if we found an exact match, we can stop now; if
714
# we found an approximate match we need to keep
715
# searching because there might be an exact match
718
new_path = osutils.pathjoin(cur_path, child_base)
720
elif child_base.lower() == lelt:
722
new_path = osutils.pathjoin(cur_path, child_base)
723
except errors.NoSuchId:
724
# before a change is committed we can see this error...
729
# got to the end of this directory and no entries matched.
730
# Return what matched so far, plus the rest as specified.
731
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
736
def _get_inventory(self):
737
return self._inventory
739
inventory = property(_get_inventory,
740
doc="Inventory of this Tree")
743
def path2id(self, path):
744
"""Return the id for path in this tree."""
745
return self._inventory.path2id(path)
747
def id2path(self, file_id):
748
"""Return the path for a file id.
752
return self.inventory.id2path(file_id)
754
def has_id(self, file_id):
755
return self.inventory.has_id(file_id)
757
def has_or_had_id(self, file_id):
758
return self.inventory.has_id(file_id)
761
return iter(self.inventory)
763
def filter_unversioned_files(self, paths):
764
"""Filter out paths that are versioned.
766
:return: set of paths.
768
# NB: we specifically *don't* call self.has_filename, because for
769
# WorkingTrees that can indicate files that exist on disk but that
771
pred = self.inventory.has_filename
772
return set((p for p in paths if not pred(p)))
775
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
776
"""Walk the tree in 'by_dir' order.
778
This will yield each entry in the tree as a (path, entry) tuple.
779
The order that they are yielded is:
781
See Tree.iter_entries_by_dir for details.
783
:param yield_parents: If True, yield the parents from the root leading
784
down to specific_file_ids that have been requested. This has no
785
impact if specific_file_ids is None.
787
return self.inventory.iter_entries_by_dir(
788
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
790
def get_file_by_path(self, path):
791
return self.get_file(self._inventory.path2id(path), path)
373
794
######################################################################
453
862
specified_path_ids = _find_ids_across_trees(filenames, trees,
454
863
require_versioned)
455
864
return _find_children_across_trees(specified_path_ids, trees)
456
# specified_ids = [id for path, id in _find_path_ids_across_trees(filenames, trees, require_versioned)]
457
# return _find_children_across_trees(specified_ids, trees)
459
def find_path_ids_across_trees(filenames, trees, require_versioned=True):
460
"""Find the paths and ids corresponding to specified filenames.
462
All matches in all trees will be used, and all children of matched
463
directories will be included
465
:param filenames: The filenames to find file_ids for
466
:param trees: The trees to find file_ids within
467
:param require_versioned: if true, all specified filenames must occur in
469
:return: a set of (path, file ids) for the specified filenames and their
470
children. The returned path is the path of the id in the first tree
471
that contains it. This matters when files have been moved
475
# This function needs to know the ids for filenames in all trees, then
476
# search for those same files and children in all the other trees.
477
# it is complicated by the same path in two trees being able to have
478
# different ids, which might both be present in both trees.
479
# consider two trees, which have had 'mv foo bar' and 'mv baz foo' done
480
# in this case, a diff of 'foo' should should changes to both the current
481
# 'bar' and the current 'foo' which was baz. Its arguable that if
482
# the situation is 'mv parent/foo bar' and 'mv baz parent/foo', that
483
# we should return the current bar and the current parent/foo' - at the
484
# moment we do, but we loop around all ids and all trees: I*T checks.
486
# Updating this algorithm to be fast in the common case:
487
# nothing has moved, all files have the same id in parent, child and there
488
# are only two trees (or one is working tree and the others are parents).
489
# walk the dirstate. as we find each path, gather the paths of that
490
# id in all trees. add a mapping from the id to the path in those trees.
491
# now lookup children by id, again in all trees; for these trees that
492
# nothing has moved in, the id->path mapping will allow us to find the
493
# parent trivially. To answer 'has anything been moved' in one of the
494
# dirstate parent trees though, we will need to stare harder at it.
496
# Now, given a path index, that is trivial for any one tree, and given
497
# that we can ask for additional data from a dirstate tree, its a single
498
# pass, though it will require scanning the entire tree to find paths
499
# that were at the current location.
500
# ideal results?: There are three things: tree, path, id. Pathologically
501
# we can have completely disjoint ids for each tree; but we cannot have
502
# disjoin paths for each tree, except if we scan each tree for the
503
# different ids from other trees.
505
specified_path_ids = _find_ids_across_trees(filenames, trees,
507
return _find_path_id_children_across_trees(specified_path_ids, trees)
510
867
def _find_ids_across_trees(filenames, trees, require_versioned):
511
868
"""Find the ids corresponding to specified filenames.
513
870
All matches in all trees will be used, but subdirectories are not scanned.
515
872
:param filenames: The filenames to find file_ids for
516
873
:param trees: The trees to find file_ids within
517
874
:param require_versioned: if true, all specified filenames must occur in
518
875
at least one tree.
519
:return: a set of (path, file ids) for the specified filenames
876
:return: a set of file ids for the specified filenames
521
878
not_versioned = []
522
879
interesting_ids = set()
567
923
Its instances have methods like 'compare' and contain references to the
568
924
source and target trees these operations are to be carried out on.
570
clients of bzrlib should not need to use InterTree directly, rather they
926
Clients of bzrlib should not need to use InterTree directly, rather they
571
927
should use the convenience methods on Tree such as 'Tree.compare()' which
572
928
will pass through to InterTree as appropriate.
931
# Formats that will be used to test this InterTree. If both are
932
# None, this InterTree will not be tested (e.g. because a complex
934
_matching_from_tree_format = None
935
_matching_to_tree_format = None
939
def _changes_from_entries(self, source_entry, target_entry,
940
source_path=None, target_path=None):
941
"""Generate a iter_changes tuple between source_entry and target_entry.
943
:param source_entry: An inventory entry from self.source, or None.
944
:param target_entry: An inventory entry from self.target, or None.
945
:param source_path: The path of source_entry, if known. If not known
946
it will be looked up.
947
:param target_path: The path of target_entry, if known. If not known
948
it will be looked up.
949
:return: A tuple, item 0 of which is an iter_changes result tuple, and
950
item 1 is True if there are any changes in the result tuple.
952
if source_entry is None:
953
if target_entry is None:
955
file_id = target_entry.file_id
957
file_id = source_entry.file_id
958
if source_entry is not None:
959
source_versioned = True
960
source_name = source_entry.name
961
source_parent = source_entry.parent_id
962
if source_path is None:
963
source_path = self.source.id2path(file_id)
964
source_kind, source_executable, source_stat = \
965
self.source._comparison_data(source_entry, source_path)
967
source_versioned = False
971
source_executable = None
972
if target_entry is not None:
973
target_versioned = True
974
target_name = target_entry.name
975
target_parent = target_entry.parent_id
976
if target_path is None:
977
target_path = self.target.id2path(file_id)
978
target_kind, target_executable, target_stat = \
979
self.target._comparison_data(target_entry, target_path)
981
target_versioned = False
985
target_executable = None
986
versioned = (source_versioned, target_versioned)
987
kind = (source_kind, target_kind)
988
changed_content = False
989
if source_kind != target_kind:
990
changed_content = True
991
elif source_kind == 'file':
992
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
993
self.target.get_file_sha1(file_id, target_path, target_stat)):
994
changed_content = True
995
elif source_kind == 'symlink':
996
if (self.source.get_symlink_target(file_id) !=
997
self.target.get_symlink_target(file_id)):
998
changed_content = True
999
# XXX: Yes, the indentation below is wrong. But fixing it broke
1000
# test_merge.TestMergerEntriesLCAOnDisk.
1001
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
1002
# the fix from bzr.dev -- vila 2009026
1003
elif source_kind == 'tree-reference':
1004
if (self.source.get_reference_revision(file_id, source_path)
1005
!= self.target.get_reference_revision(file_id, target_path)):
1006
changed_content = True
1007
parent = (source_parent, target_parent)
1008
name = (source_name, target_name)
1009
executable = (source_executable, target_executable)
1010
if (changed_content is not False or versioned[0] != versioned[1]
1011
or parent[0] != parent[1] or name[0] != name[1] or
1012
executable[0] != executable[1]):
1016
return (file_id, (source_path, target_path), changed_content,
1017
versioned, parent, name, kind, executable), changes
577
1019
@needs_read_lock
578
1020
def compare(self, want_unchanged=False, specific_files=None,
579
extra_trees=None, require_versioned=False, include_root=False):
1021
extra_trees=None, require_versioned=False, include_root=False,
1022
want_unversioned=False):
580
1023
"""Return the changes from source to target.
582
1025
:return: A TreeDelta.
633
1082
:param require_versioned: Raise errors.PathsNotVersionedError if a
634
1083
path in the specific_files list is not versioned in one of
635
1084
source, target or extra_trees.
1085
:param specific_files: An optional list of file paths to restrict the
1086
comparison to. When mapping filenames to ids, all matches in all
1087
trees (including optional extra_trees) are used, and all children
1088
of matched directories are included. The parents in the target tree
1089
of the specific files up to and including the root of the tree are
1090
always evaluated for changes too.
1091
:param want_unversioned: Should unversioned files be returned in the
1092
output. An unversioned file is defined as one with (False, False)
1093
for the versioned pair.
637
1095
lookup_trees = [self.source]
639
1097
lookup_trees.extend(extra_trees)
640
specific_file_ids = self.target.paths2ids(specific_files,
641
lookup_trees, require_versioned=require_versioned)
1098
# The ids of items we need to examine to insure delta consistency.
1099
precise_file_ids = set()
1100
changed_file_ids = []
1101
if specific_files == []:
1102
specific_file_ids = []
1104
specific_file_ids = self.target.paths2ids(specific_files,
1105
lookup_trees, require_versioned=require_versioned)
1106
if specific_files is not None:
1107
# reparented or added entries must have their parents included
1108
# so that valid deltas can be created. The seen_parents set
1109
# tracks the parents that we need to have.
1110
# The seen_dirs set tracks directory entries we've yielded.
1111
# After outputting version object in to_entries we set difference
1112
# the two seen sets and start checking parents.
1113
seen_parents = set()
1115
if want_unversioned:
1116
all_unversioned = sorted([(p.split('/'), p) for p in
1117
self.target.extras()
1118
if specific_files is None or
1119
osutils.is_inside_any(specific_files, p)])
1120
all_unversioned = collections.deque(all_unversioned)
1122
all_unversioned = collections.deque()
643
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1124
from_entries_by_dir = list(self.source.iter_entries_by_dir(
644
1125
specific_file_ids=specific_file_ids))
645
1126
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
646
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1127
to_entries_by_dir = list(self.target.iter_entries_by_dir(
647
1128
specific_file_ids=specific_file_ids))
648
1129
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
650
for to_path, to_entry in to_entries_by_dir:
651
file_id = to_entry.file_id
652
to_paths[file_id] = to_path
1131
# the unversioned path lookup only occurs on real trees - where there
1132
# can be extras. So the fake_entry is solely used to look up
1133
# executable it values when execute is not supported.
1134
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1135
for target_path, target_entry in to_entries_by_dir:
1136
while (all_unversioned and
1137
all_unversioned[0][0] < target_path.split('/')):
1138
unversioned_path = all_unversioned.popleft()
1139
target_kind, target_executable, target_stat = \
1140
self.target._comparison_data(fake_entry, unversioned_path[1])
1141
yield (None, (None, unversioned_path[1]), True, (False, False),
1143
(None, unversioned_path[0][-1]),
1144
(None, target_kind),
1145
(None, target_executable))
1146
source_path, source_entry = from_data.get(target_entry.file_id,
1148
result, changes = self._changes_from_entries(source_entry,
1149
target_entry, source_path=source_path, target_path=target_path)
1150
to_paths[result[0]] = result[1][1]
653
1151
entry_count += 1
654
changed_content = False
655
from_path, from_entry = from_data.get(file_id, (None, None))
656
from_versioned = (from_entry is not None)
657
if from_entry is not None:
658
from_versioned = True
659
from_name = from_entry.name
660
from_parent = from_entry.parent_id
661
from_kind, from_executable, from_stat = \
662
self.source._comparison_data(from_entry, from_path)
663
1153
entry_count += 1
665
from_versioned = False
669
from_executable = None
670
versioned = (from_versioned, True)
1155
pb.update('comparing files', entry_count, num_entries)
1156
if changes or include_unchanged:
1157
if specific_file_ids is not None:
1158
new_parent_id = result[4][1]
1159
precise_file_ids.add(new_parent_id)
1160
changed_file_ids.append(result[0])
1162
# Ensure correct behaviour for reparented/added specific files.
1163
if specific_files is not None:
1164
# Record output dirs
1165
if result[6][1] == 'directory':
1166
seen_dirs.add(result[0])
1167
# Record parents of reparented/added entries.
1168
versioned = result[3]
1170
if not versioned[0] or parents[0] != parents[1]:
1171
seen_parents.add(parents[1])
1172
while all_unversioned:
1173
# yield any trailing unversioned paths
1174
unversioned_path = all_unversioned.popleft()
671
1175
to_kind, to_executable, to_stat = \
672
self.target._comparison_data(to_entry, to_path)
673
kind = (from_kind, to_kind)
674
if kind[0] != kind[1]:
675
changed_content = True
676
elif from_kind == 'file':
677
from_size = self.source._file_size(from_entry, from_stat)
678
to_size = self.target._file_size(to_entry, to_stat)
679
if from_size != to_size:
680
changed_content = True
681
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
682
self.target.get_file_sha1(file_id, to_path, to_stat)):
683
changed_content = True
684
elif from_kind == 'symlink':
685
if (self.source.get_symlink_target(file_id) !=
686
self.target.get_symlink_target(file_id)):
687
changed_content = True
688
parent = (from_parent, to_entry.parent_id)
689
name = (from_name, to_entry.name)
690
executable = (from_executable, to_executable)
692
pb.update('comparing files', entry_count, num_entries)
693
if (changed_content is not False or versioned[0] != versioned[1]
694
or parent[0] != parent[1] or name[0] != name[1] or
695
executable[0] != executable[1] or include_unchanged):
696
yield (file_id, to_path, changed_content, versioned, parent,
697
name, kind, executable)
699
def get_to_path(from_entry):
700
if from_entry.parent_id is None:
703
if from_entry.parent_id not in to_paths:
704
get_to_path(self.source.inventory[from_entry.parent_id])
705
to_path = osutils.pathjoin(to_paths[from_entry.parent_id],
707
to_paths[from_entry.file_id] = to_path
1176
self.target._comparison_data(fake_entry, unversioned_path[1])
1177
yield (None, (None, unversioned_path[1]), True, (False, False),
1179
(None, unversioned_path[0][-1]),
1181
(None, to_executable))
1182
# Yield all remaining source paths
710
1183
for path, from_entry in from_entries_by_dir:
711
1184
file_id = from_entry.file_id
712
1185
if file_id in to_paths:
714
to_path = get_to_path(from_entry)
1188
if not self.target.has_id(file_id):
1189
# common case - paths we have not emitted are not present in
1193
to_path = self.target.id2path(file_id)
715
1194
entry_count += 1
716
1195
if pb is not None:
717
1196
pb.update('comparing files', entry_count, num_entries)
722
1201
self.source._comparison_data(from_entry, path)
723
1202
kind = (from_kind, None)
724
1203
executable = (from_executable, None)
725
changed_content = True
1204
changed_content = from_kind is not None
726
1205
# the parent's path is necessarily known at this point.
727
yield(file_id, to_path, changed_content, versioned, parent,
1206
changed_file_ids.append(file_id)
1207
yield(file_id, (path, to_path), changed_content, versioned, parent,
728
1208
name, kind, executable)
731
# This was deprecated before 0.12, but did not have an official warning
732
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
733
def RevisionTree(*args, **kwargs):
734
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
736
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
739
from bzrlib.revisiontree import RevisionTree as _RevisionTree
740
return _RevisionTree(*args, **kwargs)
1209
changed_file_ids = set(changed_file_ids)
1210
if specific_file_ids is not None:
1211
for result in self._handle_precise_ids(precise_file_ids,
1215
def _get_entry(self, tree, file_id):
1216
"""Get an inventory entry from a tree, with missing entries as None.
1218
If the tree raises NotImplementedError on accessing .inventory, then
1219
this is worked around using iter_entries_by_dir on just the file id
1222
:param tree: The tree to lookup the entry in.
1223
:param file_id: The file_id to lookup.
1226
inventory = tree.inventory
1227
except NotImplementedError:
1228
# No inventory available.
1230
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1231
return iterator.next()[1]
1232
except StopIteration:
1236
return inventory[file_id]
1237
except errors.NoSuchId:
1240
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1241
discarded_changes=None):
1242
"""Fill out a partial iter_changes to be consistent.
1244
:param precise_file_ids: The file ids of parents that were seen during
1246
:param changed_file_ids: The file ids of already emitted items.
1247
:param discarded_changes: An optional dict of precalculated
1248
iter_changes items which the partial iter_changes had not output
1250
:return: A generator of iter_changes items to output.
1252
# process parents of things that had changed under the users
1253
# requested paths to prevent incorrect paths or parent ids which
1254
# aren't in the tree.
1255
while precise_file_ids:
1256
precise_file_ids.discard(None)
1257
# Don't emit file_ids twice
1258
precise_file_ids.difference_update(changed_file_ids)
1259
if not precise_file_ids:
1261
# If the there was something at a given output path in source, we
1262
# have to include the entry from source in the delta, or we would
1263
# be putting this entry into a used path.
1265
for parent_id in precise_file_ids:
1267
paths.append(self.target.id2path(parent_id))
1268
except errors.NoSuchId:
1269
# This id has been dragged in from the source by delta
1270
# expansion and isn't present in target at all: we don't
1271
# need to check for path collisions on it.
1274
old_id = self.source.path2id(path)
1275
precise_file_ids.add(old_id)
1276
precise_file_ids.discard(None)
1277
current_ids = precise_file_ids
1278
precise_file_ids = set()
1279
# We have to emit all of precise_file_ids that have been altered.
1280
# We may have to output the children of some of those ids if any
1281
# directories have stopped being directories.
1282
for file_id in current_ids:
1284
if discarded_changes:
1285
result = discarded_changes.get(file_id)
1290
old_entry = self._get_entry(self.source, file_id)
1291
new_entry = self._get_entry(self.target, file_id)
1292
result, changes = self._changes_from_entries(
1293
old_entry, new_entry)
1296
# Get this parents parent to examine.
1297
new_parent_id = result[4][1]
1298
precise_file_ids.add(new_parent_id)
1300
if (result[6][0] == 'directory' and
1301
result[6][1] != 'directory'):
1302
# This stopped being a directory, the old children have
1304
if old_entry is None:
1305
# Reusing a discarded change.
1306
old_entry = self._get_entry(self.source, file_id)
1307
for child in old_entry.children.values():
1308
precise_file_ids.add(child.file_id)
1309
changed_file_ids.add(result[0])
1313
class MultiWalker(object):
1314
"""Walk multiple trees simultaneously, getting combined results."""
1316
# Note: This could be written to not assume you can do out-of-order
1317
# lookups. Instead any nodes that don't match in all trees could be
1318
# marked as 'deferred', and then returned in the final cleanup loop.
1319
# For now, I think it is "nicer" to return things as close to the
1320
# "master_tree" order as we can.
1322
def __init__(self, master_tree, other_trees):
1323
"""Create a new MultiWalker.
1325
All trees being walked must implement "iter_entries_by_dir()", such
1326
that they yield (path, object) tuples, where that object will have a
1327
'.file_id' member, that can be used to check equality.
1329
:param master_tree: All trees will be 'slaved' to the master_tree such
1330
that nodes in master_tree will be used as 'first-pass' sync points.
1331
Any nodes that aren't in master_tree will be merged in a second
1333
:param other_trees: A list of other trees to walk simultaneously.
1335
self._master_tree = master_tree
1336
self._other_trees = other_trees
1338
# Keep track of any nodes that were properly processed just out of
1339
# order, that way we don't return them at the end, we don't have to
1340
# track *all* processed file_ids, just the out-of-order ones
1341
self._out_of_order_processed = set()
1344
def _step_one(iterator):
1345
"""Step an iter_entries_by_dir iterator.
1347
:return: (has_more, path, ie)
1348
If has_more is False, path and ie will be None.
1351
path, ie = iterator.next()
1352
except StopIteration:
1353
return False, None, None
1355
return True, path, ie
1358
def _cmp_path_by_dirblock(path1, path2):
1359
"""Compare two paths based on what directory they are in.
1361
This generates a sort order, such that all children of a directory are
1362
sorted together, and grandchildren are in the same order as the
1363
children appear. But all grandchildren come after all children.
1365
:param path1: first path
1366
:param path2: the second path
1367
:return: negative number if ``path1`` comes first,
1368
0 if paths are equal
1369
and a positive number if ``path2`` sorts first
1371
# Shortcut this special case
1374
# This is stolen from _dirstate_helpers_py.py, only switching it to
1375
# Unicode objects. Consider using encode_utf8() and then using the
1376
# optimized versions, or maybe writing optimized unicode versions.
1377
if not isinstance(path1, unicode):
1378
raise TypeError("'path1' must be a unicode string, not %s: %r"
1379
% (type(path1), path1))
1380
if not isinstance(path2, unicode):
1381
raise TypeError("'path2' must be a unicode string, not %s: %r"
1382
% (type(path2), path2))
1383
return cmp(MultiWalker._path_to_key(path1),
1384
MultiWalker._path_to_key(path2))
1387
def _path_to_key(path):
1388
dirname, basename = osutils.split(path)
1389
return (dirname.split(u'/'), basename)
1391
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1392
"""Lookup an inventory entry by file_id.
1394
This is called when an entry is missing in the normal order.
1395
Generally this is because a file was either renamed, or it was
1396
deleted/added. If the entry was found in the inventory and not in
1397
extra_entries, it will be added to self._out_of_order_processed
1399
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1400
should be filled with entries that were found before they were
1401
used. If file_id is present, it will be removed from the
1403
:param other_tree: The Tree to search, in case we didn't find the entry
1405
:param file_id: The file_id to look for
1406
:return: (path, ie) if found or (None, None) if not present.
1408
if file_id in extra_entries:
1409
return extra_entries.pop(file_id)
1410
# TODO: Is id2path better as the first call, or is
1411
# inventory[file_id] better as a first check?
1413
cur_path = other_tree.id2path(file_id)
1414
except errors.NoSuchId:
1416
if cur_path is None:
1419
self._out_of_order_processed.add(file_id)
1420
cur_ie = other_tree.inventory[file_id]
1421
return (cur_path, cur_ie)
1424
"""Match up the values in the different trees."""
1425
for result in self._walk_master_tree():
1427
self._finish_others()
1428
for result in self._walk_others():
1431
def _walk_master_tree(self):
1432
"""First pass, walk all trees in lock-step.
1434
When we are done, all nodes in the master_tree will have been
1435
processed. _other_walkers, _other_entries, and _others_extra will be
1436
set on 'self' for future processing.
1438
# This iterator has the most "inlining" done, because it tends to touch
1439
# every file in the tree, while the others only hit nodes that don't
1441
master_iterator = self._master_tree.iter_entries_by_dir()
1443
other_walkers = [other.iter_entries_by_dir()
1444
for other in self._other_trees]
1445
other_entries = [self._step_one(walker) for walker in other_walkers]
1446
# Track extra nodes in the other trees
1447
others_extra = [{} for i in xrange(len(self._other_trees))]
1449
master_has_more = True
1450
step_one = self._step_one
1451
lookup_by_file_id = self._lookup_by_file_id
1452
out_of_order_processed = self._out_of_order_processed
1454
while master_has_more:
1455
(master_has_more, path, master_ie) = step_one(master_iterator)
1456
if not master_has_more:
1459
file_id = master_ie.file_id
1461
other_values_append = other_values.append
1462
next_other_entries = []
1463
next_other_entries_append = next_other_entries.append
1464
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1465
if not other_has_more:
1466
other_values_append(lookup_by_file_id(
1467
others_extra[idx], self._other_trees[idx], file_id))
1468
next_other_entries_append((False, None, None))
1469
elif file_id == other_ie.file_id:
1470
# This is the critical code path, as most of the entries
1471
# should match between most trees.
1472
other_values_append((other_path, other_ie))
1473
next_other_entries_append(step_one(other_walkers[idx]))
1475
# This walker did not match, step it until it either
1476
# matches, or we know we are past the current walker.
1477
other_walker = other_walkers[idx]
1478
other_extra = others_extra[idx]
1479
while (other_has_more and
1480
self._cmp_path_by_dirblock(other_path, path) < 0):
1481
other_file_id = other_ie.file_id
1482
if other_file_id not in out_of_order_processed:
1483
other_extra[other_file_id] = (other_path, other_ie)
1484
other_has_more, other_path, other_ie = \
1485
step_one(other_walker)
1486
if other_has_more and other_ie.file_id == file_id:
1487
# We ended up walking to this point, match and step
1489
other_values_append((other_path, other_ie))
1490
other_has_more, other_path, other_ie = \
1491
step_one(other_walker)
1493
# This record isn't in the normal order, see if it
1495
other_values_append(lookup_by_file_id(
1496
other_extra, self._other_trees[idx], file_id))
1497
next_other_entries_append((other_has_more, other_path,
1499
other_entries = next_other_entries
1501
# We've matched all the walkers, yield this datapoint
1502
yield path, file_id, master_ie, other_values
1503
self._other_walkers = other_walkers
1504
self._other_entries = other_entries
1505
self._others_extra = others_extra
1507
def _finish_others(self):
1508
"""Finish walking the other iterators, so we get all entries."""
1509
for idx, info in enumerate(self._other_entries):
1510
other_extra = self._others_extra[idx]
1511
(other_has_more, other_path, other_ie) = info
1512
while other_has_more:
1513
other_file_id = other_ie.file_id
1514
if other_file_id not in self._out_of_order_processed:
1515
other_extra[other_file_id] = (other_path, other_ie)
1516
other_has_more, other_path, other_ie = \
1517
self._step_one(self._other_walkers[idx])
1518
del self._other_entries
1520
def _walk_others(self):
1521
"""Finish up by walking all the 'deferred' nodes."""
1522
# TODO: One alternative would be to grab all possible unprocessed
1523
# file_ids, and then sort by path, and then yield them. That
1524
# might ensure better ordering, in case a caller strictly
1525
# requires parents before children.
1526
for idx, other_extra in enumerate(self._others_extra):
1527
others = sorted(other_extra.itervalues(),
1528
key=lambda x: self._path_to_key(x[0]))
1529
for other_path, other_ie in others:
1530
file_id = other_ie.file_id
1531
# We don't need to check out_of_order_processed here, because
1532
# the lookup_by_file_id will be removing anything processed
1533
# from the extras cache
1534
other_extra.pop(file_id)
1535
other_values = [(None, None) for i in xrange(idx)]
1536
other_values.append((other_path, other_ie))
1537
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1538
alt_idx = alt_idx + idx + 1
1539
alt_extra = self._others_extra[alt_idx]
1540
alt_tree = self._other_trees[alt_idx]
1541
other_values.append(self._lookup_by_file_id(
1542
alt_extra, alt_tree, file_id))
1543
yield other_path, file_id, None, other_values