150
return iter(self.inventory)
152
156
def all_file_ids(self):
153
157
"""Iterate through all file ids, including ids for missing files."""
154
return set(self.inventory)
158
raise NotImplementedError(self.all_file_ids)
156
160
def id2path(self, file_id):
157
161
"""Return the path for a file id.
159
163
:raises NoSuchId:
161
return self.inventory.id2path(file_id)
163
def is_control_filename(self, filename):
164
"""True if filename is the name of a control file in this tree.
166
:param filename: A filename within the tree. This is a relative path
167
from the root of this tree.
169
This is true IF and ONLY IF the filename is part of the meta data
170
that bzr controls in this tree. I.E. a random .bzr directory placed
171
on disk will not be a control file for this tree.
173
return self.bzrdir.is_control_filename(filename)
176
def iter_entries_by_dir(self, specific_file_ids=None):
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
177
168
"""Walk the tree in 'by_dir' order.
179
This will yield each entry in the tree as a (path, entry) tuple. The
180
order that they are yielded is: the contents of a directory are
181
preceeded by the parent of a directory, and all the contents of a
182
directory are grouped together.
184
return self.inventory.iter_entries_by_dir(
185
specific_file_ids=specific_file_ids)
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def list_files(self, include_root=False, from_dir=None, recursive=True):
199
"""List all files in this tree.
201
:param include_root: Whether to include the entry for the tree root
202
:param from_dir: Directory under which to list files
203
:param recursive: Whether to list files recursively
204
:return: iterator over tuples of (path, versioned, kind, file_id,
207
raise NotImplementedError(self.list_files)
187
209
def iter_references(self):
188
for path, entry in self.iter_entries_by_dir():
189
if entry.kind == 'tree-reference':
190
yield path, entry.file_id
210
if self.supports_tree_reference():
211
for path, entry in self.iter_entries_by_dir():
212
if entry.kind == 'tree-reference':
213
yield path, entry.file_id
192
215
def kind(self, file_id):
193
216
raise NotImplementedError("Tree subclass %s must implement kind"
234
261
def _file_size(self, entry, stat_value):
235
262
raise NotImplementedError(self._file_size)
237
def _get_inventory(self):
238
return self._inventory
240
264
def get_file(self, file_id, path=None):
241
265
"""Return a file object for the file file_id in the tree.
243
267
If both file_id and path are defined, it is implementation defined as
244
268
to which one is used.
246
270
raise NotImplementedError(self.get_file)
272
def get_file_with_stat(self, file_id, path=None):
273
"""Get a file handle and stat object for file_id.
275
The default implementation returns (self.get_file, None) for backwards
278
:param file_id: The file id to read.
279
:param path: The path of the file, if it is known.
280
:return: A tuple (file_handle, stat_value_or_None). If the tree has
281
no stat facility, or need for a stat cache feedback during commit,
282
it may return None for the second element of the tuple.
284
return (self.get_file(file_id, path), None)
286
def get_file_text(self, file_id, path=None):
287
"""Return the byte content of a file.
289
:param file_id: The file_id of the file.
290
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
:returns: A single byte string for the whole file.
297
my_file = self.get_file(file_id, path)
299
return my_file.read()
303
def get_file_lines(self, file_id, path=None):
304
"""Return the content of a file, as lines.
306
:param file_id: The file_id of the file.
307
:param path: The path of the file.
309
If both file_id and path are supplied, an implementation may use
312
return osutils.split_lines(self.get_file_text(file_id, path))
314
def get_file_verifier(self, file_id, path=None, stat_value=None):
315
"""Return a verifier for a file.
317
The default implementation returns a sha1.
319
:param file_id: The handle for this file.
320
:param path: The path that this file can be found at.
321
These must point to the same object.
322
:param stat_value: Optional stat value for the object
323
:return: Tuple with verifier name and verifier data
325
return ("SHA1", self.get_file_sha1(file_id, path=path,
326
stat_value=stat_value))
328
def get_file_sha1(self, file_id, path=None, stat_value=None):
329
"""Return the SHA1 file for a file.
331
:note: callers should use get_file_verifier instead
332
where possible, as the underlying repository implementation may
333
have quicker access to a non-sha1 verifier.
335
:param file_id: The handle for this file.
336
:param path: The path that this file can be found at.
337
These must point to the same object.
338
:param stat_value: Optional stat value for the object
340
raise NotImplementedError(self.get_file_sha1)
248
342
def get_file_mtime(self, file_id, path=None):
249
343
"""Return the modification time for a file.
357
460
return vf.plan_lca_merge(last_revision_a, last_revision_b,
358
461
last_revision_base)
463
def _iter_parent_trees(self):
464
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
465
for revision_id in self.get_parent_ids():
467
yield self.revision_tree(revision_id)
468
except errors.NoSuchRevisionInTree:
469
yield self.repository.revision_tree(revision_id)
360
471
def _get_file_revision(self, file_id, vf, tree_revision):
361
def file_revision(revision_tree):
362
revision_tree.lock_read()
364
return revision_tree.inventory[file_id].revision
366
revision_tree.unlock()
368
def iter_parent_trees():
369
for revision_id in self.get_parent_ids():
371
yield self.revision_tree(revision_id)
373
yield self.repository.revision_tree(revision_id)
375
if getattr(self, '_get_weave', None) is None:
472
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
474
if getattr(self, '_repository', None) is None:
376
475
last_revision = tree_revision
377
parent_revisions = [file_revision(t) for t in iter_parent_trees()]
378
vf.add_lines(last_revision, parent_revisions,
379
self.get_file(file_id).readlines())
476
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
477
self._iter_parent_trees()]
478
vf.add_lines((file_id, last_revision), parent_keys,
479
self.get_file_lines(file_id))
380
480
repo = self.branch.repository
381
transaction = repo.get_transaction()
382
base_vf = repo.weave_store.get_weave(file_id, transaction)
384
last_revision = file_revision(self)
385
base_vf = self._get_weave(file_id)
386
vf.fallback_versionedfiles.append(base_vf)
483
last_revision = self.get_file_revision(file_id)
484
base_vf = self._repository.texts
485
if base_vf not in vf.fallback_versionedfiles:
486
vf.fallback_versionedfiles.append(base_vf)
387
487
return last_revision
389
inventory = property(_get_inventory,
390
doc="Inventory of this Tree")
392
489
def _check_retrieved(self, ie, f):
393
490
if not __debug__:
395
fp = fingerprint_file(f)
492
fp = osutils.fingerprint_file(f)
398
495
if ie.text_size is not None:
399
496
if ie.text_size != fp['size']:
400
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
497
raise errors.BzrError(
498
"mismatched size for file %r in %r" %
499
(ie.file_id, self._store),
401
500
["inventory expects %d bytes" % ie.text_size,
402
501
"file is actually %d bytes" % fp['size'],
403
502
"store is probably damaged/corrupt"])
405
504
if ie.text_sha1 != fp['sha1']:
406
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
505
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
506
(ie.file_id, self._store),
407
507
["inventory expects %s" % ie.text_sha1,
408
508
"file is actually %s" % fp['sha1'],
409
509
"store is probably damaged/corrupt"])
412
511
def path2id(self, path):
413
512
"""Return the id for path in this tree."""
414
return self._inventory.path2id(path)
513
raise NotImplementedError(self.path2id)
416
515
def paths2ids(self, paths, trees=[], require_versioned=True):
417
516
"""Return all the ids that can be reached by walking from paths.
419
518
Each path is looked up in this tree and any extras provided in
420
519
trees, and this is repeated recursively: the children in an extra tree
421
520
of a directory that has been renamed under a provided path in this tree
510
609
raise NotImplementedError(self.walkdirs)
513
class EmptyTree(Tree):
516
self._inventory = Inventory(root_id=None)
517
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
518
' use repository.revision_tree instead.',
519
DeprecationWarning, stacklevel=2)
521
def get_parent_ids(self):
524
def get_symlink_target(self, file_id):
527
def has_filename(self, filename):
611
def supports_content_filtering(self):
530
def kind(self, file_id):
533
def list_files(self, include_root=False):
536
def __contains__(self, file_id):
537
return (file_id in self._inventory)
539
def get_file_sha1(self, file_id, path=None, stat_value=None):
543
######################################################################
546
# TODO: Merge these two functions into a single one that can operate
547
# on either a whole tree or a set of files.
549
# TODO: Return the diff in order by filename, not by category or in
550
# random order. Can probably be done by lock-stepping through the
551
# filenames from both trees.
554
def file_status(filename, old_tree, new_tree):
555
"""Return single-letter status, old and new names for a file.
557
The complexity here is in deciding how to represent renames;
558
many complex cases are possible.
614
def _content_filter_stack(self, path=None, file_id=None):
615
"""The stack of content filters for a path if filtering is supported.
617
Readers will be applied in first-to-last order.
618
Writers will be applied in last-to-first order.
619
Either the path or the file-id needs to be provided.
621
:param path: path relative to the root of the tree
623
:param file_id: file_id or None if unknown
624
:return: the list of filters - [] if there are none
626
filter_pref_names = filters._get_registered_names()
627
if len(filter_pref_names) == 0:
630
path = self.id2path(file_id)
631
prefs = self.iter_search_rules([path], filter_pref_names).next()
632
stk = filters._get_filter_stack_for(prefs)
633
if 'filters' in debug.debug_flags:
634
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
637
def _content_filter_stack_provider(self):
638
"""A function that returns a stack of ContentFilters.
640
The function takes a path (relative to the top of the tree) and a
641
file-id as parameters.
643
:return: None if content filtering is not supported by this tree.
645
if self.supports_content_filtering():
646
return lambda path, file_id: \
647
self._content_filter_stack(path, file_id)
651
def iter_search_rules(self, path_names, pref_names=None,
652
_default_searcher=None):
653
"""Find the preferences for filenames in a tree.
655
:param path_names: an iterable of paths to find attributes for.
656
Paths are given relative to the root of the tree.
657
:param pref_names: the list of preferences to lookup - None for all
658
:param _default_searcher: private parameter to assist testing - don't use
659
:return: an iterator of tuple sequences, one per path-name.
660
See _RulesSearcher.get_items for details on the tuple sequence.
662
if _default_searcher is None:
663
_default_searcher = rules._per_user_searcher
664
searcher = self._get_rules_searcher(_default_searcher)
665
if searcher is not None:
666
if pref_names is not None:
667
for path in path_names:
668
yield searcher.get_selected_items(path, pref_names)
670
for path in path_names:
671
yield searcher.get_items(path)
673
def _get_rules_searcher(self, default_searcher):
674
"""Get the RulesSearcher for this tree given the default one."""
675
searcher = default_searcher
679
class InventoryTree(Tree):
680
"""A tree that relies on an inventory for its metadata.
682
Trees contain an `Inventory` object, and also know how to retrieve
683
file texts mentioned in the inventory, either from a working
684
directory or from a store.
686
It is possible for trees to contain files that are not described
687
in their inventory or vice versa; for this use `filenames()`.
689
Subclasses should set the _inventory attribute, which is considered
690
private to external API users.
560
old_inv = old_tree.inventory
561
new_inv = new_tree.inventory
562
new_id = new_inv.path2id(filename)
563
old_id = old_inv.path2id(filename)
565
if not new_id and not old_id:
566
# easy: doesn't exist in either; not versioned at all
567
if new_tree.is_ignored(filename):
568
return 'I', None, None
570
return '?', None, None
572
# There is now a file of this name, great.
575
# There is no longer a file of this name, but we can describe
576
# what happened to the file that used to have
577
# this name. There are two possibilities: either it was
578
# deleted entirely, or renamed.
579
if new_inv.has_id(old_id):
580
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
582
return 'D', old_inv.id2path(old_id), None
584
# if the file_id is new in this revision, it is added
585
if new_id and not old_inv.has_id(new_id):
588
# if there used to be a file of this name, but that ID has now
589
# disappeared, it is deleted
590
if old_id and not new_inv.has_id(old_id):
597
def find_renames(old_inv, new_inv):
598
for file_id in old_inv:
599
if file_id not in new_inv:
601
old_name = old_inv.id2path(file_id)
602
new_name = new_inv.id2path(file_id)
603
if old_name != new_name:
604
yield (old_name, new_name)
693
def get_canonical_inventory_paths(self, paths):
694
"""Like get_canonical_inventory_path() but works on multiple items.
696
:param paths: A sequence of paths relative to the root of the tree.
697
:return: A list of paths, with each item the corresponding input path
698
adjusted to account for existing elements that match case
701
return list(self._yield_canonical_inventory_paths(paths))
703
def get_canonical_inventory_path(self, path):
704
"""Returns the first inventory item that case-insensitively matches path.
706
If a path matches exactly, it is returned. If no path matches exactly
707
but more than one path matches case-insensitively, it is implementation
708
defined which is returned.
710
If no path matches case-insensitively, the input path is returned, but
711
with as many path entries that do exist changed to their canonical
714
If you need to resolve many names from the same tree, you should
715
use get_canonical_inventory_paths() to avoid O(N) behaviour.
717
:param path: A paths relative to the root of the tree.
718
:return: The input path adjusted to account for existing elements
719
that match case insensitively.
721
return self._yield_canonical_inventory_paths([path]).next()
723
def _yield_canonical_inventory_paths(self, paths):
725
# First, if the path as specified exists exactly, just use it.
726
if self.path2id(path) is not None:
730
cur_id = self.get_root_id()
732
bit_iter = iter(path.split("/"))
736
for child in self.iter_children(cur_id):
738
# XXX: it seem like if the child is known to be in the
739
# tree, we shouldn't need to go from its id back to
740
# its path -- mbp 2010-02-11
742
# XXX: it seems like we could be more efficient
743
# by just directly looking up the original name and
744
# only then searching all children; also by not
745
# chopping paths so much. -- mbp 2010-02-11
746
child_base = os.path.basename(self.id2path(child))
747
if (child_base == elt):
748
# if we found an exact match, we can stop now; if
749
# we found an approximate match we need to keep
750
# searching because there might be an exact match
753
new_path = osutils.pathjoin(cur_path, child_base)
755
elif child_base.lower() == lelt:
757
new_path = osutils.pathjoin(cur_path, child_base)
758
except errors.NoSuchId:
759
# before a change is committed we can see this error...
764
# got to the end of this directory and no entries matched.
765
# Return what matched so far, plus the rest as specified.
766
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
771
def _get_inventory(self):
772
return self._inventory
774
inventory = property(_get_inventory,
775
doc="Inventory of this Tree")
778
def path2id(self, path):
779
"""Return the id for path in this tree."""
780
return self._inventory.path2id(path)
782
def id2path(self, file_id):
783
"""Return the path for a file id.
787
return self.inventory.id2path(file_id)
789
def has_id(self, file_id):
790
return self.inventory.has_id(file_id)
792
def has_or_had_id(self, file_id):
793
return self.inventory.has_id(file_id)
795
def all_file_ids(self):
796
return set(self.inventory)
798
@deprecated_method(deprecated_in((2, 4, 0)))
800
return iter(self.inventory)
802
def filter_unversioned_files(self, paths):
803
"""Filter out paths that are versioned.
805
:return: set of paths.
807
# NB: we specifically *don't* call self.has_filename, because for
808
# WorkingTrees that can indicate files that exist on disk but that
810
pred = self.inventory.has_filename
811
return set((p for p in paths if not pred(p)))
814
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
815
"""Walk the tree in 'by_dir' order.
817
This will yield each entry in the tree as a (path, entry) tuple.
818
The order that they are yielded is:
820
See Tree.iter_entries_by_dir for details.
822
:param yield_parents: If True, yield the parents from the root leading
823
down to specific_file_ids that have been requested. This has no
824
impact if specific_file_ids is None.
826
return self.inventory.iter_entries_by_dir(
827
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
829
@deprecated_method(deprecated_in((2, 5, 0)))
830
def get_file_by_path(self, path):
831
return self.get_file(self.path2id(path), path)
607
834
def find_ids_across_trees(filenames, trees, require_versioned=True):
608
835
"""Find the ids corresponding to specified filenames.
610
837
All matches in all trees will be used, and all children of matched
611
838
directories will be used.
689
915
will pass through to InterTree as appropriate.
918
# Formats that will be used to test this InterTree. If both are
919
# None, this InterTree will not be tested (e.g. because a complex
921
_matching_from_tree_format = None
922
_matching_to_tree_format = None
927
def is_compatible(kls, source, target):
928
# The default implementation is naive and uses the public API, so
929
# it works for all trees.
932
def _changes_from_entries(self, source_entry, target_entry,
933
source_path=None, target_path=None):
934
"""Generate a iter_changes tuple between source_entry and target_entry.
936
:param source_entry: An inventory entry from self.source, or None.
937
:param target_entry: An inventory entry from self.target, or None.
938
:param source_path: The path of source_entry, if known. If not known
939
it will be looked up.
940
:param target_path: The path of target_entry, if known. If not known
941
it will be looked up.
942
:return: A tuple, item 0 of which is an iter_changes result tuple, and
943
item 1 is True if there are any changes in the result tuple.
945
if source_entry is None:
946
if target_entry is None:
948
file_id = target_entry.file_id
950
file_id = source_entry.file_id
951
if source_entry is not None:
952
source_versioned = True
953
source_name = source_entry.name
954
source_parent = source_entry.parent_id
955
if source_path is None:
956
source_path = self.source.id2path(file_id)
957
source_kind, source_executable, source_stat = \
958
self.source._comparison_data(source_entry, source_path)
960
source_versioned = False
964
source_executable = None
965
if target_entry is not None:
966
target_versioned = True
967
target_name = target_entry.name
968
target_parent = target_entry.parent_id
969
if target_path is None:
970
target_path = self.target.id2path(file_id)
971
target_kind, target_executable, target_stat = \
972
self.target._comparison_data(target_entry, target_path)
974
target_versioned = False
978
target_executable = None
979
versioned = (source_versioned, target_versioned)
980
kind = (source_kind, target_kind)
981
changed_content = False
982
if source_kind != target_kind:
983
changed_content = True
984
elif source_kind == 'file':
985
if not self.file_content_matches(file_id, file_id, source_path,
986
target_path, source_stat, target_stat):
987
changed_content = True
988
elif source_kind == 'symlink':
989
if (self.source.get_symlink_target(file_id) !=
990
self.target.get_symlink_target(file_id)):
991
changed_content = True
992
# XXX: Yes, the indentation below is wrong. But fixing it broke
993
# test_merge.TestMergerEntriesLCAOnDisk.
994
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
995
# the fix from bzr.dev -- vila 2009026
996
elif source_kind == 'tree-reference':
997
if (self.source.get_reference_revision(file_id, source_path)
998
!= self.target.get_reference_revision(file_id, target_path)):
999
changed_content = True
1000
parent = (source_parent, target_parent)
1001
name = (source_name, target_name)
1002
executable = (source_executable, target_executable)
1003
if (changed_content is not False or versioned[0] != versioned[1]
1004
or parent[0] != parent[1] or name[0] != name[1] or
1005
executable[0] != executable[1]):
1009
return (file_id, (source_path, target_path), changed_content,
1010
versioned, parent, name, kind, executable), changes
694
1012
@needs_read_lock
695
1013
def compare(self, want_unchanged=False, specific_files=None,
696
1014
extra_trees=None, require_versioned=False, include_root=False,
759
1075
:param require_versioned: Raise errors.PathsNotVersionedError if a
760
1076
path in the specific_files list is not versioned in one of
761
1077
source, target or extra_trees.
1078
:param specific_files: An optional list of file paths to restrict the
1079
comparison to. When mapping filenames to ids, all matches in all
1080
trees (including optional extra_trees) are used, and all children
1081
of matched directories are included. The parents in the target tree
1082
of the specific files up to and including the root of the tree are
1083
always evaluated for changes too.
762
1084
:param want_unversioned: Should unversioned files be returned in the
763
1085
output. An unversioned file is defined as one with (False, False)
764
1086
for the versioned pair.
767
1088
lookup_trees = [self.source]
769
1090
lookup_trees.extend(extra_trees)
1091
# The ids of items we need to examine to insure delta consistency.
1092
precise_file_ids = set()
1093
changed_file_ids = []
770
1094
if specific_files == []:
771
1095
specific_file_ids = []
773
1097
specific_file_ids = self.target.paths2ids(specific_files,
774
1098
lookup_trees, require_versioned=require_versioned)
1099
if specific_files is not None:
1100
# reparented or added entries must have their parents included
1101
# so that valid deltas can be created. The seen_parents set
1102
# tracks the parents that we need to have.
1103
# The seen_dirs set tracks directory entries we've yielded.
1104
# After outputting version object in to_entries we set difference
1105
# the two seen sets and start checking parents.
1106
seen_parents = set()
775
1108
if want_unversioned:
776
1109
all_unversioned = sorted([(p.split('/'), p) for p in
777
1110
self.target.extras()
778
1111
if specific_files is None or
779
1112
osutils.is_inside_any(specific_files, p)])
780
all_unversioned = deque(all_unversioned)
1113
all_unversioned = collections.deque(all_unversioned)
782
all_unversioned = deque()
1115
all_unversioned = collections.deque()
784
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1117
from_entries_by_dir = list(self.source.iter_entries_by_dir(
785
1118
specific_file_ids=specific_file_ids))
786
1119
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
787
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1120
to_entries_by_dir = list(self.target.iter_entries_by_dir(
788
1121
specific_file_ids=specific_file_ids))
789
1122
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
791
# the unversioned path lookup only occurs on real trees - where there
1124
# the unversioned path lookup only occurs on real trees - where there
792
1125
# can be extras. So the fake_entry is solely used to look up
793
1126
# executable it values when execute is not supported.
794
fake_entry = InventoryFile('unused', 'unused', 'unused')
795
for to_path, to_entry in to_entries_by_dir:
796
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1127
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1128
for target_path, target_entry in to_entries_by_dir:
1129
while (all_unversioned and
1130
all_unversioned[0][0] < target_path.split('/')):
797
1131
unversioned_path = all_unversioned.popleft()
798
to_kind, to_executable, to_stat = \
1132
target_kind, target_executable, target_stat = \
799
1133
self.target._comparison_data(fake_entry, unversioned_path[1])
800
1134
yield (None, (None, unversioned_path[1]), True, (False, False),
802
1136
(None, unversioned_path[0][-1]),
804
(None, to_executable))
805
file_id = to_entry.file_id
806
to_paths[file_id] = to_path
1137
(None, target_kind),
1138
(None, target_executable))
1139
source_path, source_entry = from_data.get(target_entry.file_id,
1141
result, changes = self._changes_from_entries(source_entry,
1142
target_entry, source_path=source_path, target_path=target_path)
1143
to_paths[result[0]] = result[1][1]
807
1144
entry_count += 1
808
changed_content = False
809
from_path, from_entry = from_data.get(file_id, (None, None))
810
from_versioned = (from_entry is not None)
811
if from_entry is not None:
812
from_versioned = True
813
from_name = from_entry.name
814
from_parent = from_entry.parent_id
815
from_kind, from_executable, from_stat = \
816
self.source._comparison_data(from_entry, from_path)
817
1146
entry_count += 1
819
from_versioned = False
823
from_executable = None
824
versioned = (from_versioned, True)
825
to_kind, to_executable, to_stat = \
826
self.target._comparison_data(to_entry, to_path)
827
kind = (from_kind, to_kind)
828
if kind[0] != kind[1]:
829
changed_content = True
830
elif from_kind == 'file':
831
from_size = self.source._file_size(from_entry, from_stat)
832
to_size = self.target._file_size(to_entry, to_stat)
833
if from_size != to_size:
834
changed_content = True
835
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
836
self.target.get_file_sha1(file_id, to_path, to_stat)):
837
changed_content = True
838
elif from_kind == 'symlink':
839
if (self.source.get_symlink_target(file_id) !=
840
self.target.get_symlink_target(file_id)):
841
changed_content = True
842
elif from_kind == 'tree-reference':
843
if (self.source.get_reference_revision(file_id, from_path)
844
!= self.target.get_reference_revision(file_id, to_path)):
845
changed_content = True
846
parent = (from_parent, to_entry.parent_id)
847
name = (from_name, to_entry.name)
848
executable = (from_executable, to_executable)
849
1147
if pb is not None:
850
1148
pb.update('comparing files', entry_count, num_entries)
851
if (changed_content is not False or versioned[0] != versioned[1]
852
or parent[0] != parent[1] or name[0] != name[1] or
853
executable[0] != executable[1] or include_unchanged):
854
yield (file_id, (from_path, to_path), changed_content,
855
versioned, parent, name, kind, executable)
1149
if changes or include_unchanged:
1150
if specific_file_ids is not None:
1151
new_parent_id = result[4][1]
1152
precise_file_ids.add(new_parent_id)
1153
changed_file_ids.append(result[0])
1155
# Ensure correct behaviour for reparented/added specific files.
1156
if specific_files is not None:
1157
# Record output dirs
1158
if result[6][1] == 'directory':
1159
seen_dirs.add(result[0])
1160
# Record parents of reparented/added entries.
1161
versioned = result[3]
1163
if not versioned[0] or parents[0] != parents[1]:
1164
seen_parents.add(parents[1])
857
1165
while all_unversioned:
858
1166
# yield any trailing unversioned paths
859
1167
unversioned_path = all_unversioned.popleft()
898
1194
self.source._comparison_data(from_entry, path)
899
1195
kind = (from_kind, None)
900
1196
executable = (from_executable, None)
901
changed_content = True
1197
changed_content = from_kind is not None
902
1198
# the parent's path is necessarily known at this point.
1199
changed_file_ids.append(file_id)
903
1200
yield(file_id, (path, to_path), changed_content, versioned, parent,
904
1201
name, kind, executable)
1202
changed_file_ids = set(changed_file_ids)
1203
if specific_file_ids is not None:
1204
for result in self._handle_precise_ids(precise_file_ids,
1208
def _get_entry(self, tree, file_id):
1209
"""Get an inventory entry from a tree, with missing entries as None.
1211
If the tree raises NotImplementedError on accessing .inventory, then
1212
this is worked around using iter_entries_by_dir on just the file id
1215
:param tree: The tree to lookup the entry in.
1216
:param file_id: The file_id to lookup.
1219
inventory = tree.inventory
1220
except NotImplementedError:
1221
# No inventory available.
1223
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1224
return iterator.next()[1]
1225
except StopIteration:
1229
return inventory[file_id]
1230
except errors.NoSuchId:
1233
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1234
discarded_changes=None):
1235
"""Fill out a partial iter_changes to be consistent.
1237
:param precise_file_ids: The file ids of parents that were seen during
1239
:param changed_file_ids: The file ids of already emitted items.
1240
:param discarded_changes: An optional dict of precalculated
1241
iter_changes items which the partial iter_changes had not output
1243
:return: A generator of iter_changes items to output.
1245
# process parents of things that had changed under the users
1246
# requested paths to prevent incorrect paths or parent ids which
1247
# aren't in the tree.
1248
while precise_file_ids:
1249
precise_file_ids.discard(None)
1250
# Don't emit file_ids twice
1251
precise_file_ids.difference_update(changed_file_ids)
1252
if not precise_file_ids:
1254
# If the there was something at a given output path in source, we
1255
# have to include the entry from source in the delta, or we would
1256
# be putting this entry into a used path.
1258
for parent_id in precise_file_ids:
1260
paths.append(self.target.id2path(parent_id))
1261
except errors.NoSuchId:
1262
# This id has been dragged in from the source by delta
1263
# expansion and isn't present in target at all: we don't
1264
# need to check for path collisions on it.
1267
old_id = self.source.path2id(path)
1268
precise_file_ids.add(old_id)
1269
precise_file_ids.discard(None)
1270
current_ids = precise_file_ids
1271
precise_file_ids = set()
1272
# We have to emit all of precise_file_ids that have been altered.
1273
# We may have to output the children of some of those ids if any
1274
# directories have stopped being directories.
1275
for file_id in current_ids:
1277
if discarded_changes:
1278
result = discarded_changes.get(file_id)
1283
old_entry = self._get_entry(self.source, file_id)
1284
new_entry = self._get_entry(self.target, file_id)
1285
result, changes = self._changes_from_entries(
1286
old_entry, new_entry)
1289
# Get this parents parent to examine.
1290
new_parent_id = result[4][1]
1291
precise_file_ids.add(new_parent_id)
1293
if (result[6][0] == 'directory' and
1294
result[6][1] != 'directory'):
1295
# This stopped being a directory, the old children have
1297
if old_entry is None:
1298
# Reusing a discarded change.
1299
old_entry = self._get_entry(self.source, file_id)
1300
for child in old_entry.children.values():
1301
precise_file_ids.add(child.file_id)
1302
changed_file_ids.add(result[0])
1306
def file_content_matches(self, source_file_id, target_file_id,
1307
source_path=None, target_path=None, source_stat=None, target_stat=None):
1308
"""Check if two files are the same in the source and target trees.
1310
This only checks that the contents of the files are the same,
1311
it does not touch anything else.
1313
:param source_file_id: File id of the file in the source tree
1314
:param target_file_id: File id of the file in the target tree
1315
:param source_path: Path of the file in the source tree
1316
:param target_path: Path of the file in the target tree
1317
:param source_stat: Optional stat value of the file in the source tree
1318
:param target_stat: Optional stat value of the file in the target tree
1319
:return: Boolean indicating whether the files have the same contents
1321
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1322
source_file_id, source_path, source_stat)
1323
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1324
target_file_id, target_path, target_stat)
1325
if source_verifier_kind == target_verifier_kind:
1326
return (source_verifier_data == target_verifier_data)
1327
# Fall back to SHA1 for now
1328
if source_verifier_kind != "SHA1":
1329
source_sha1 = self.source.get_file_sha1(source_file_id,
1330
source_path, source_stat)
1332
source_sha1 = source_verifier_data
1333
if target_verifier_kind != "SHA1":
1334
target_sha1 = self.target.get_file_sha1(target_file_id,
1335
target_path, target_stat)
1337
target_sha1 = target_verifier_data
1338
return (source_sha1 == target_sha1)
1340
InterTree.register_optimiser(InterTree)
1343
class MultiWalker(object):
1344
"""Walk multiple trees simultaneously, getting combined results."""
1346
# Note: This could be written to not assume you can do out-of-order
1347
# lookups. Instead any nodes that don't match in all trees could be
1348
# marked as 'deferred', and then returned in the final cleanup loop.
1349
# For now, I think it is "nicer" to return things as close to the
1350
# "master_tree" order as we can.
1352
def __init__(self, master_tree, other_trees):
1353
"""Create a new MultiWalker.
1355
All trees being walked must implement "iter_entries_by_dir()", such
1356
that they yield (path, object) tuples, where that object will have a
1357
'.file_id' member, that can be used to check equality.
1359
:param master_tree: All trees will be 'slaved' to the master_tree such
1360
that nodes in master_tree will be used as 'first-pass' sync points.
1361
Any nodes that aren't in master_tree will be merged in a second
1363
:param other_trees: A list of other trees to walk simultaneously.
1365
self._master_tree = master_tree
1366
self._other_trees = other_trees
1368
# Keep track of any nodes that were properly processed just out of
1369
# order, that way we don't return them at the end, we don't have to
1370
# track *all* processed file_ids, just the out-of-order ones
1371
self._out_of_order_processed = set()
1374
def _step_one(iterator):
1375
"""Step an iter_entries_by_dir iterator.
1377
:return: (has_more, path, ie)
1378
If has_more is False, path and ie will be None.
1381
path, ie = iterator.next()
1382
except StopIteration:
1383
return False, None, None
1385
return True, path, ie
1388
def _cmp_path_by_dirblock(path1, path2):
1389
"""Compare two paths based on what directory they are in.
1391
This generates a sort order, such that all children of a directory are
1392
sorted together, and grandchildren are in the same order as the
1393
children appear. But all grandchildren come after all children.
1395
:param path1: first path
1396
:param path2: the second path
1397
:return: negative number if ``path1`` comes first,
1398
0 if paths are equal
1399
and a positive number if ``path2`` sorts first
1401
# Shortcut this special case
1404
# This is stolen from _dirstate_helpers_py.py, only switching it to
1405
# Unicode objects. Consider using encode_utf8() and then using the
1406
# optimized versions, or maybe writing optimized unicode versions.
1407
if not isinstance(path1, unicode):
1408
raise TypeError("'path1' must be a unicode string, not %s: %r"
1409
% (type(path1), path1))
1410
if not isinstance(path2, unicode):
1411
raise TypeError("'path2' must be a unicode string, not %s: %r"
1412
% (type(path2), path2))
1413
return cmp(MultiWalker._path_to_key(path1),
1414
MultiWalker._path_to_key(path2))
1417
def _path_to_key(path):
1418
dirname, basename = osutils.split(path)
1419
return (dirname.split(u'/'), basename)
1421
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1422
"""Lookup an inventory entry by file_id.
1424
This is called when an entry is missing in the normal order.
1425
Generally this is because a file was either renamed, or it was
1426
deleted/added. If the entry was found in the inventory and not in
1427
extra_entries, it will be added to self._out_of_order_processed
1429
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1430
should be filled with entries that were found before they were
1431
used. If file_id is present, it will be removed from the
1433
:param other_tree: The Tree to search, in case we didn't find the entry
1435
:param file_id: The file_id to look for
1436
:return: (path, ie) if found or (None, None) if not present.
1438
if file_id in extra_entries:
1439
return extra_entries.pop(file_id)
1440
# TODO: Is id2path better as the first call, or is
1441
# inventory[file_id] better as a first check?
1443
cur_path = other_tree.id2path(file_id)
1444
except errors.NoSuchId:
1446
if cur_path is None:
1449
self._out_of_order_processed.add(file_id)
1450
cur_ie = other_tree.inventory[file_id]
1451
return (cur_path, cur_ie)
1454
"""Match up the values in the different trees."""
1455
for result in self._walk_master_tree():
1457
self._finish_others()
1458
for result in self._walk_others():
1461
def _walk_master_tree(self):
1462
"""First pass, walk all trees in lock-step.
1464
When we are done, all nodes in the master_tree will have been
1465
processed. _other_walkers, _other_entries, and _others_extra will be
1466
set on 'self' for future processing.
1468
# This iterator has the most "inlining" done, because it tends to touch
1469
# every file in the tree, while the others only hit nodes that don't
1471
master_iterator = self._master_tree.iter_entries_by_dir()
1473
other_walkers = [other.iter_entries_by_dir()
1474
for other in self._other_trees]
1475
other_entries = [self._step_one(walker) for walker in other_walkers]
1476
# Track extra nodes in the other trees
1477
others_extra = [{} for i in xrange(len(self._other_trees))]
1479
master_has_more = True
1480
step_one = self._step_one
1481
lookup_by_file_id = self._lookup_by_file_id
1482
out_of_order_processed = self._out_of_order_processed
1484
while master_has_more:
1485
(master_has_more, path, master_ie) = step_one(master_iterator)
1486
if not master_has_more:
1489
file_id = master_ie.file_id
1491
other_values_append = other_values.append
1492
next_other_entries = []
1493
next_other_entries_append = next_other_entries.append
1494
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1495
if not other_has_more:
1496
other_values_append(lookup_by_file_id(
1497
others_extra[idx], self._other_trees[idx], file_id))
1498
next_other_entries_append((False, None, None))
1499
elif file_id == other_ie.file_id:
1500
# This is the critical code path, as most of the entries
1501
# should match between most trees.
1502
other_values_append((other_path, other_ie))
1503
next_other_entries_append(step_one(other_walkers[idx]))
1505
# This walker did not match, step it until it either
1506
# matches, or we know we are past the current walker.
1507
other_walker = other_walkers[idx]
1508
other_extra = others_extra[idx]
1509
while (other_has_more and
1510
self._cmp_path_by_dirblock(other_path, path) < 0):
1511
other_file_id = other_ie.file_id
1512
if other_file_id not in out_of_order_processed:
1513
other_extra[other_file_id] = (other_path, other_ie)
1514
other_has_more, other_path, other_ie = \
1515
step_one(other_walker)
1516
if other_has_more and other_ie.file_id == file_id:
1517
# We ended up walking to this point, match and step
1519
other_values_append((other_path, other_ie))
1520
other_has_more, other_path, other_ie = \
1521
step_one(other_walker)
1523
# This record isn't in the normal order, see if it
1525
other_values_append(lookup_by_file_id(
1526
other_extra, self._other_trees[idx], file_id))
1527
next_other_entries_append((other_has_more, other_path,
1529
other_entries = next_other_entries
1531
# We've matched all the walkers, yield this datapoint
1532
yield path, file_id, master_ie, other_values
1533
self._other_walkers = other_walkers
1534
self._other_entries = other_entries
1535
self._others_extra = others_extra
1537
def _finish_others(self):
1538
"""Finish walking the other iterators, so we get all entries."""
1539
for idx, info in enumerate(self._other_entries):
1540
other_extra = self._others_extra[idx]
1541
(other_has_more, other_path, other_ie) = info
1542
while other_has_more:
1543
other_file_id = other_ie.file_id
1544
if other_file_id not in self._out_of_order_processed:
1545
other_extra[other_file_id] = (other_path, other_ie)
1546
other_has_more, other_path, other_ie = \
1547
self._step_one(self._other_walkers[idx])
1548
del self._other_entries
1550
def _walk_others(self):
1551
"""Finish up by walking all the 'deferred' nodes."""
1552
# TODO: One alternative would be to grab all possible unprocessed
1553
# file_ids, and then sort by path, and then yield them. That
1554
# might ensure better ordering, in case a caller strictly
1555
# requires parents before children.
1556
for idx, other_extra in enumerate(self._others_extra):
1557
others = sorted(other_extra.itervalues(),
1558
key=lambda x: self._path_to_key(x[0]))
1559
for other_path, other_ie in others:
1560
file_id = other_ie.file_id
1561
# We don't need to check out_of_order_processed here, because
1562
# the lookup_by_file_id will be removing anything processed
1563
# from the extras cache
1564
other_extra.pop(file_id)
1565
other_values = [(None, None) for i in xrange(idx)]
1566
other_values.append((other_path, other_ie))
1567
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1568
alt_idx = alt_idx + idx + 1
1569
alt_extra = self._others_extra[alt_idx]
1570
alt_tree = self._other_trees[alt_idx]
1571
other_values.append(self._lookup_by_file_id(
1572
alt_extra, alt_tree, file_id))
1573
yield other_path, file_id, None, other_values