150
return iter(self.inventory)
154
152
def all_file_ids(self):
155
153
"""Iterate through all file ids, including ids for missing files."""
156
raise NotImplementedError(self.all_file_ids)
154
return set(self.inventory)
158
156
def id2path(self, file_id):
159
157
"""Return the path for a file id.
161
159
:raises NoSuchId:
163
raise NotImplementedError(self.id2path)
165
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
161
return self.inventory.id2path(file_id)
163
def is_control_filename(self, filename):
164
"""True if filename is the name of a control file in this tree.
166
:param filename: A filename within the tree. This is a relative path
167
from the root of this tree.
169
This is true IF and ONLY IF the filename is part of the meta data
170
that bzr controls in this tree. I.E. a random .bzr directory placed
171
on disk will not be a control file for this tree.
173
return self.bzrdir.is_control_filename(filename)
176
def iter_entries_by_dir(self, specific_file_ids=None):
166
177
"""Walk the tree in 'by_dir' order.
168
This will yield each entry in the tree as a (path, entry) tuple.
169
The order that they are yielded is:
171
Directories are walked in a depth-first lexicographical order,
172
however, whenever a directory is reached, all of its direct child
173
nodes are yielded in lexicographical order before yielding the
176
For example, in the tree::
186
The yield order (ignoring root) would be::
188
a, f, a/b, a/d, a/b/c, a/d/e, f/g
190
:param yield_parents: If True, yield the parents from the root leading
191
down to specific_file_ids that have been requested. This has no
192
impact if specific_file_ids is None.
194
raise NotImplementedError(self.iter_entries_by_dir)
196
def list_files(self, include_root=False, from_dir=None, recursive=True):
197
"""List all files in this tree.
199
:param include_root: Whether to include the entry for the tree root
200
:param from_dir: Directory under which to list files
201
:param recursive: Whether to list files recursively
202
:return: iterator over tuples of (path, versioned, kind, file_id,
205
raise NotImplementedError(self.list_files)
179
This will yield each entry in the tree as a (path, entry) tuple. The
180
order that they are yielded is: the contents of a directory are
181
preceeded by the parent of a directory, and all the contents of a
182
directory are grouped together.
184
return self.inventory.iter_entries_by_dir(
185
specific_file_ids=specific_file_ids)
207
187
def iter_references(self):
208
if self.supports_tree_reference():
209
for path, entry in self.iter_entries_by_dir():
210
if entry.kind == 'tree-reference':
211
yield path, entry.file_id
188
for path, entry in self.iter_entries_by_dir():
189
if entry.kind == 'tree-reference':
190
yield path, entry.file_id
213
192
def kind(self, file_id):
214
193
raise NotImplementedError("Tree subclass %s must implement kind"
259
234
def _file_size(self, entry, stat_value):
260
235
raise NotImplementedError(self._file_size)
237
def _get_inventory(self):
238
return self._inventory
262
240
def get_file(self, file_id, path=None):
263
241
"""Return a file object for the file file_id in the tree.
265
243
If both file_id and path are defined, it is implementation defined as
266
244
to which one is used.
268
246
raise NotImplementedError(self.get_file)
270
def get_file_with_stat(self, file_id, path=None):
271
"""Get a file handle and stat object for file_id.
273
The default implementation returns (self.get_file, None) for backwards
276
:param file_id: The file id to read.
277
:param path: The path of the file, if it is known.
278
:return: A tuple (file_handle, stat_value_or_None). If the tree has
279
no stat facility, or need for a stat cache feedback during commit,
280
it may return None for the second element of the tuple.
282
return (self.get_file(file_id, path), None)
284
def get_file_text(self, file_id, path=None):
285
"""Return the byte content of a file.
287
:param file_id: The file_id of the file.
288
:param path: The path of the file.
290
If both file_id and path are supplied, an implementation may use
293
:returns: A single byte string for the whole file.
295
my_file = self.get_file(file_id, path)
297
return my_file.read()
301
def get_file_lines(self, file_id, path=None):
302
"""Return the content of a file, as lines.
304
:param file_id: The file_id of the file.
305
:param path: The path of the file.
307
If both file_id and path are supplied, an implementation may use
310
return osutils.split_lines(self.get_file_text(file_id, path))
312
def get_file_verifier(self, file_id, path=None, stat_value=None):
313
"""Return a verifier for a file.
315
The default implementation returns a sha1.
317
:param file_id: The handle for this file.
318
:param path: The path that this file can be found at.
319
These must point to the same object.
320
:param stat_value: Optional stat value for the object
321
:return: Tuple with verifier name and verifier data
323
return ("SHA1", self.get_file_sha1(file_id, path=path,
324
stat_value=stat_value))
326
def get_file_sha1(self, file_id, path=None, stat_value=None):
327
"""Return the SHA1 file for a file.
329
:note: callers should use get_file_verifier instead
330
where possible, as the underlying repository implementation may
331
have quicker access to a non-sha1 verifier.
333
:param file_id: The handle for this file.
334
:param path: The path that this file can be found at.
335
These must point to the same object.
336
:param stat_value: Optional stat value for the object
338
raise NotImplementedError(self.get_file_sha1)
340
248
def get_file_mtime(self, file_id, path=None):
341
249
"""Return the modification time for a file.
461
348
return vf.plan_lca_merge(last_revision_a, last_revision_b,
462
349
last_revision_base)
464
def _iter_parent_trees(self):
465
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
466
for revision_id in self.get_parent_ids():
468
yield self.revision_tree(revision_id)
469
except errors.NoSuchRevisionInTree:
470
yield self.repository.revision_tree(revision_id)
472
351
def _get_file_revision(self, file_id, vf, tree_revision):
473
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
475
if getattr(self, '_repository', None) is None:
352
def file_revision(revision_tree):
353
revision_tree.lock_read()
355
return revision_tree.inventory[file_id].revision
357
revision_tree.unlock()
359
def iter_parent_trees():
360
for revision_id in self.get_parent_ids():
362
yield self.revision_tree(revision_id)
364
yield self.repository.revision_tree(revision_id)
366
if getattr(self, '_get_weave', None) is None:
476
367
last_revision = tree_revision
477
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
478
self._iter_parent_trees()]
479
vf.add_lines((file_id, last_revision), parent_keys,
480
self.get_file_lines(file_id))
368
parent_revisions = [file_revision(t) for t in iter_parent_trees()]
369
vf.add_lines(last_revision, parent_revisions,
370
self.get_file(file_id).readlines())
481
371
repo = self.branch.repository
372
transaction = repo.get_transaction()
373
base_vf = repo.weave_store.get_weave(file_id, transaction)
484
last_revision = self.get_file_revision(file_id)
485
base_vf = self._repository.texts
486
if base_vf not in vf.fallback_versionedfiles:
487
vf.fallback_versionedfiles.append(base_vf)
375
last_revision = file_revision(self)
376
base_vf = self._get_weave(file_id)
377
vf.fallback_versionedfiles.append(base_vf)
488
378
return last_revision
380
inventory = property(_get_inventory,
381
doc="Inventory of this Tree")
490
383
def _check_retrieved(self, ie, f):
491
384
if not __debug__:
493
fp = osutils.fingerprint_file(f)
386
fp = fingerprint_file(f)
496
389
if ie.text_size is not None:
497
390
if ie.text_size != fp['size']:
498
raise errors.BzrError(
499
"mismatched size for file %r in %r" %
500
(ie.file_id, self._store),
391
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
501
392
["inventory expects %d bytes" % ie.text_size,
502
393
"file is actually %d bytes" % fp['size'],
503
394
"store is probably damaged/corrupt"])
505
396
if ie.text_sha1 != fp['sha1']:
506
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
507
(ie.file_id, self._store),
397
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
508
398
["inventory expects %s" % ie.text_sha1,
509
399
"file is actually %s" % fp['sha1'],
510
400
"store is probably damaged/corrupt"])
512
403
def path2id(self, path):
513
404
"""Return the id for path in this tree."""
514
raise NotImplementedError(self.path2id)
405
return self._inventory.path2id(path)
516
407
def paths2ids(self, paths, trees=[], require_versioned=True):
517
408
"""Return all the ids that can be reached by walking from paths.
519
410
Each path is looked up in this tree and any extras provided in
520
411
trees, and this is repeated recursively: the children in an extra tree
521
412
of a directory that has been renamed under a provided path in this tree
610
501
raise NotImplementedError(self.walkdirs)
612
def supports_content_filtering(self):
504
class EmptyTree(Tree):
507
self._inventory = Inventory(root_id=None)
508
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
509
' use repository.revision_tree instead.',
510
DeprecationWarning, stacklevel=2)
512
def get_parent_ids(self):
515
def get_symlink_target(self, file_id):
518
def has_filename(self, filename):
615
def _content_filter_stack(self, path=None, file_id=None):
616
"""The stack of content filters for a path if filtering is supported.
618
Readers will be applied in first-to-last order.
619
Writers will be applied in last-to-first order.
620
Either the path or the file-id needs to be provided.
622
:param path: path relative to the root of the tree
624
:param file_id: file_id or None if unknown
625
:return: the list of filters - [] if there are none
627
filter_pref_names = filters._get_registered_names()
628
if len(filter_pref_names) == 0:
631
path = self.id2path(file_id)
632
prefs = self.iter_search_rules([path], filter_pref_names).next()
633
stk = filters._get_filter_stack_for(prefs)
634
if 'filters' in debug.debug_flags:
635
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
638
def _content_filter_stack_provider(self):
639
"""A function that returns a stack of ContentFilters.
641
The function takes a path (relative to the top of the tree) and a
642
file-id as parameters.
644
:return: None if content filtering is not supported by this tree.
646
if self.supports_content_filtering():
647
return lambda path, file_id: \
648
self._content_filter_stack(path, file_id)
652
def iter_search_rules(self, path_names, pref_names=None,
653
_default_searcher=None):
654
"""Find the preferences for filenames in a tree.
656
:param path_names: an iterable of paths to find attributes for.
657
Paths are given relative to the root of the tree.
658
:param pref_names: the list of preferences to lookup - None for all
659
:param _default_searcher: private parameter to assist testing - don't use
660
:return: an iterator of tuple sequences, one per path-name.
661
See _RulesSearcher.get_items for details on the tuple sequence.
663
if _default_searcher is None:
664
_default_searcher = rules._per_user_searcher
665
searcher = self._get_rules_searcher(_default_searcher)
666
if searcher is not None:
667
if pref_names is not None:
668
for path in path_names:
669
yield searcher.get_selected_items(path, pref_names)
671
for path in path_names:
672
yield searcher.get_items(path)
674
def _get_rules_searcher(self, default_searcher):
675
"""Get the RulesSearcher for this tree given the default one."""
676
searcher = default_searcher
680
class InventoryTree(Tree):
681
"""A tree that relies on an inventory for its metadata.
683
Trees contain an `Inventory` object, and also know how to retrieve
684
file texts mentioned in the inventory, either from a working
685
directory or from a store.
687
It is possible for trees to contain files that are not described
688
in their inventory or vice versa; for this use `filenames()`.
690
Subclasses should set the _inventory attribute, which is considered
691
private to external API users.
521
def kind(self, file_id):
522
assert self._inventory[file_id].kind == "directory"
525
def list_files(self, include_root=False):
528
def __contains__(self, file_id):
529
return (file_id in self._inventory)
531
def get_file_sha1(self, file_id, path=None, stat_value=None):
535
######################################################################
538
# TODO: Merge these two functions into a single one that can operate
539
# on either a whole tree or a set of files.
541
# TODO: Return the diff in order by filename, not by category or in
542
# random order. Can probably be done by lock-stepping through the
543
# filenames from both trees.
546
def file_status(filename, old_tree, new_tree):
547
"""Return single-letter status, old and new names for a file.
549
The complexity here is in deciding how to represent renames;
550
many complex cases are possible.
694
def get_canonical_inventory_paths(self, paths):
695
"""Like get_canonical_inventory_path() but works on multiple items.
697
:param paths: A sequence of paths relative to the root of the tree.
698
:return: A list of paths, with each item the corresponding input path
699
adjusted to account for existing elements that match case
702
return list(self._yield_canonical_inventory_paths(paths))
704
def get_canonical_inventory_path(self, path):
705
"""Returns the first inventory item that case-insensitively matches path.
707
If a path matches exactly, it is returned. If no path matches exactly
708
but more than one path matches case-insensitively, it is implementation
709
defined which is returned.
711
If no path matches case-insensitively, the input path is returned, but
712
with as many path entries that do exist changed to their canonical
715
If you need to resolve many names from the same tree, you should
716
use get_canonical_inventory_paths() to avoid O(N) behaviour.
718
:param path: A paths relative to the root of the tree.
719
:return: The input path adjusted to account for existing elements
720
that match case insensitively.
722
return self._yield_canonical_inventory_paths([path]).next()
724
def _yield_canonical_inventory_paths(self, paths):
726
# First, if the path as specified exists exactly, just use it.
727
if self.path2id(path) is not None:
731
cur_id = self.get_root_id()
733
bit_iter = iter(path.split("/"))
737
for child in self.iter_children(cur_id):
739
# XXX: it seem like if the child is known to be in the
740
# tree, we shouldn't need to go from its id back to
741
# its path -- mbp 2010-02-11
743
# XXX: it seems like we could be more efficient
744
# by just directly looking up the original name and
745
# only then searching all children; also by not
746
# chopping paths so much. -- mbp 2010-02-11
747
child_base = os.path.basename(self.id2path(child))
748
if (child_base == elt):
749
# if we found an exact match, we can stop now; if
750
# we found an approximate match we need to keep
751
# searching because there might be an exact match
754
new_path = osutils.pathjoin(cur_path, child_base)
756
elif child_base.lower() == lelt:
758
new_path = osutils.pathjoin(cur_path, child_base)
759
except errors.NoSuchId:
760
# before a change is committed we can see this error...
765
# got to the end of this directory and no entries matched.
766
# Return what matched so far, plus the rest as specified.
767
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
772
def _get_inventory(self):
773
return self._inventory
775
inventory = property(_get_inventory,
776
doc="Inventory of this Tree")
779
def path2id(self, path):
780
"""Return the id for path in this tree."""
781
return self._inventory.path2id(path)
783
def id2path(self, file_id):
784
"""Return the path for a file id.
788
return self.inventory.id2path(file_id)
790
def has_id(self, file_id):
791
return self.inventory.has_id(file_id)
793
def has_or_had_id(self, file_id):
794
return self.inventory.has_id(file_id)
796
def all_file_ids(self):
797
return set(self.inventory)
799
@deprecated_method(deprecated_in((2, 4, 0)))
801
return iter(self.inventory)
803
def filter_unversioned_files(self, paths):
804
"""Filter out paths that are versioned.
806
:return: set of paths.
808
# NB: we specifically *don't* call self.has_filename, because for
809
# WorkingTrees that can indicate files that exist on disk but that
811
pred = self.inventory.has_filename
812
return set((p for p in paths if not pred(p)))
815
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
816
"""Walk the tree in 'by_dir' order.
818
This will yield each entry in the tree as a (path, entry) tuple.
819
The order that they are yielded is:
821
See Tree.iter_entries_by_dir for details.
823
:param yield_parents: If True, yield the parents from the root leading
824
down to specific_file_ids that have been requested. This has no
825
impact if specific_file_ids is None.
827
return self.inventory.iter_entries_by_dir(
828
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
830
def get_file_by_path(self, path):
831
return self.get_file(self._inventory.path2id(path), path)
552
old_inv = old_tree.inventory
553
new_inv = new_tree.inventory
554
new_id = new_inv.path2id(filename)
555
old_id = old_inv.path2id(filename)
557
if not new_id and not old_id:
558
# easy: doesn't exist in either; not versioned at all
559
if new_tree.is_ignored(filename):
560
return 'I', None, None
562
return '?', None, None
564
# There is now a file of this name, great.
567
# There is no longer a file of this name, but we can describe
568
# what happened to the file that used to have
569
# this name. There are two possibilities: either it was
570
# deleted entirely, or renamed.
572
if new_inv.has_id(old_id):
573
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
575
return 'D', old_inv.id2path(old_id), None
577
# if the file_id is new in this revision, it is added
578
if new_id and not old_inv.has_id(new_id):
581
# if there used to be a file of this name, but that ID has now
582
# disappeared, it is deleted
583
if old_id and not new_inv.has_id(old_id):
590
def find_renames(old_inv, new_inv):
591
for file_id in old_inv:
592
if file_id not in new_inv:
594
old_name = old_inv.id2path(file_id)
595
new_name = new_inv.id2path(file_id)
596
if old_name != new_name:
597
yield (old_name, new_name)
834
600
def find_ids_across_trees(filenames, trees, require_versioned=True):
835
601
"""Find the ids corresponding to specified filenames.
837
603
All matches in all trees will be used, and all children of matched
838
604
directories will be used.
915
682
will pass through to InterTree as appropriate.
918
# Formats that will be used to test this InterTree. If both are
919
# None, this InterTree will not be tested (e.g. because a complex
921
_matching_from_tree_format = None
922
_matching_to_tree_format = None
927
def is_compatible(kls, source, target):
928
# The default implementation is naive and uses the public API, so
929
# it works for all trees.
932
def _changes_from_entries(self, source_entry, target_entry,
933
source_path=None, target_path=None):
934
"""Generate a iter_changes tuple between source_entry and target_entry.
936
:param source_entry: An inventory entry from self.source, or None.
937
:param target_entry: An inventory entry from self.target, or None.
938
:param source_path: The path of source_entry, if known. If not known
939
it will be looked up.
940
:param target_path: The path of target_entry, if known. If not known
941
it will be looked up.
942
:return: A tuple, item 0 of which is an iter_changes result tuple, and
943
item 1 is True if there are any changes in the result tuple.
945
if source_entry is None:
946
if target_entry is None:
948
file_id = target_entry.file_id
950
file_id = source_entry.file_id
951
if source_entry is not None:
952
source_versioned = True
953
source_name = source_entry.name
954
source_parent = source_entry.parent_id
955
if source_path is None:
956
source_path = self.source.id2path(file_id)
957
source_kind, source_executable, source_stat = \
958
self.source._comparison_data(source_entry, source_path)
960
source_versioned = False
964
source_executable = None
965
if target_entry is not None:
966
target_versioned = True
967
target_name = target_entry.name
968
target_parent = target_entry.parent_id
969
if target_path is None:
970
target_path = self.target.id2path(file_id)
971
target_kind, target_executable, target_stat = \
972
self.target._comparison_data(target_entry, target_path)
974
target_versioned = False
978
target_executable = None
979
versioned = (source_versioned, target_versioned)
980
kind = (source_kind, target_kind)
981
changed_content = False
982
if source_kind != target_kind:
983
changed_content = True
984
elif source_kind == 'file':
985
if not self.file_content_matches(file_id, file_id, source_path,
986
target_path, source_stat, target_stat):
987
changed_content = True
988
elif source_kind == 'symlink':
989
if (self.source.get_symlink_target(file_id) !=
990
self.target.get_symlink_target(file_id)):
991
changed_content = True
992
# XXX: Yes, the indentation below is wrong. But fixing it broke
993
# test_merge.TestMergerEntriesLCAOnDisk.
994
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
995
# the fix from bzr.dev -- vila 2009026
996
elif source_kind == 'tree-reference':
997
if (self.source.get_reference_revision(file_id, source_path)
998
!= self.target.get_reference_revision(file_id, target_path)):
999
changed_content = True
1000
parent = (source_parent, target_parent)
1001
name = (source_name, target_name)
1002
executable = (source_executable, target_executable)
1003
if (changed_content is not False or versioned[0] != versioned[1]
1004
or parent[0] != parent[1] or name[0] != name[1] or
1005
executable[0] != executable[1]):
1009
return (file_id, (source_path, target_path), changed_content,
1010
versioned, parent, name, kind, executable), changes
1012
687
@needs_read_lock
1013
688
def compare(self, want_unchanged=False, specific_files=None,
1014
689
extra_trees=None, require_versioned=False, include_root=False,
1075
752
:param require_versioned: Raise errors.PathsNotVersionedError if a
1076
753
path in the specific_files list is not versioned in one of
1077
754
source, target or extra_trees.
1078
:param specific_files: An optional list of file paths to restrict the
1079
comparison to. When mapping filenames to ids, all matches in all
1080
trees (including optional extra_trees) are used, and all children
1081
of matched directories are included. The parents in the target tree
1082
of the specific files up to and including the root of the tree are
1083
always evaluated for changes too.
1084
755
:param want_unversioned: Should unversioned files be returned in the
1085
756
output. An unversioned file is defined as one with (False, False)
1086
757
for the versioned pair.
1088
760
lookup_trees = [self.source]
1090
762
lookup_trees.extend(extra_trees)
1091
# The ids of items we need to examine to insure delta consistency.
1092
precise_file_ids = set()
1093
changed_file_ids = []
1094
763
if specific_files == []:
1095
764
specific_file_ids = []
1097
766
specific_file_ids = self.target.paths2ids(specific_files,
1098
767
lookup_trees, require_versioned=require_versioned)
1099
if specific_files is not None:
1100
# reparented or added entries must have their parents included
1101
# so that valid deltas can be created. The seen_parents set
1102
# tracks the parents that we need to have.
1103
# The seen_dirs set tracks directory entries we've yielded.
1104
# After outputting version object in to_entries we set difference
1105
# the two seen sets and start checking parents.
1106
seen_parents = set()
1108
768
if want_unversioned:
1109
769
all_unversioned = sorted([(p.split('/'), p) for p in
1110
770
self.target.extras()
1111
771
if specific_files is None or
1112
772
osutils.is_inside_any(specific_files, p)])
1113
all_unversioned = collections.deque(all_unversioned)
773
all_unversioned = deque(all_unversioned)
1115
all_unversioned = collections.deque()
775
all_unversioned = deque()
1117
from_entries_by_dir = list(self.source.iter_entries_by_dir(
777
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1118
778
specific_file_ids=specific_file_ids))
1119
779
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1120
to_entries_by_dir = list(self.target.iter_entries_by_dir(
780
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1121
781
specific_file_ids=specific_file_ids))
1122
782
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1124
# the unversioned path lookup only occurs on real trees - where there
784
# the unversioned path lookup only occurs on real trees - where there
1125
785
# can be extras. So the fake_entry is solely used to look up
1126
786
# executable it values when execute is not supported.
1127
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1128
for target_path, target_entry in to_entries_by_dir:
1129
while (all_unversioned and
1130
all_unversioned[0][0] < target_path.split('/')):
787
fake_entry = InventoryFile('unused', 'unused', 'unused')
788
for to_path, to_entry in to_entries_by_dir:
789
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1131
790
unversioned_path = all_unversioned.popleft()
1132
target_kind, target_executable, target_stat = \
791
to_kind, to_executable, to_stat = \
1133
792
self.target._comparison_data(fake_entry, unversioned_path[1])
1134
793
yield (None, (None, unversioned_path[1]), True, (False, False),
1136
795
(None, unversioned_path[0][-1]),
1137
(None, target_kind),
1138
(None, target_executable))
1139
source_path, source_entry = from_data.get(target_entry.file_id,
1141
result, changes = self._changes_from_entries(source_entry,
1142
target_entry, source_path=source_path, target_path=target_path)
1143
to_paths[result[0]] = result[1][1]
797
(None, to_executable))
798
file_id = to_entry.file_id
799
to_paths[file_id] = to_path
1144
800
entry_count += 1
801
changed_content = False
802
from_path, from_entry = from_data.get(file_id, (None, None))
803
from_versioned = (from_entry is not None)
804
if from_entry is not None:
805
from_versioned = True
806
from_name = from_entry.name
807
from_parent = from_entry.parent_id
808
from_kind, from_executable, from_stat = \
809
self.source._comparison_data(from_entry, from_path)
1146
810
entry_count += 1
812
from_versioned = False
816
from_executable = None
817
versioned = (from_versioned, True)
818
to_kind, to_executable, to_stat = \
819
self.target._comparison_data(to_entry, to_path)
820
kind = (from_kind, to_kind)
821
if kind[0] != kind[1]:
822
changed_content = True
823
elif from_kind == 'file':
824
from_size = self.source._file_size(from_entry, from_stat)
825
to_size = self.target._file_size(to_entry, to_stat)
826
if from_size != to_size:
827
changed_content = True
828
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
829
self.target.get_file_sha1(file_id, to_path, to_stat)):
830
changed_content = True
831
elif from_kind == 'symlink':
832
if (self.source.get_symlink_target(file_id) !=
833
self.target.get_symlink_target(file_id)):
834
changed_content = True
835
elif from_kind == 'tree-reference':
836
if (self.source.get_reference_revision(file_id, from_path)
837
!= self.target.get_reference_revision(file_id, to_path)):
838
changed_content = True
839
parent = (from_parent, to_entry.parent_id)
840
name = (from_name, to_entry.name)
841
executable = (from_executable, to_executable)
1147
842
if pb is not None:
1148
843
pb.update('comparing files', entry_count, num_entries)
1149
if changes or include_unchanged:
1150
if specific_file_ids is not None:
1151
new_parent_id = result[4][1]
1152
precise_file_ids.add(new_parent_id)
1153
changed_file_ids.append(result[0])
1155
# Ensure correct behaviour for reparented/added specific files.
1156
if specific_files is not None:
1157
# Record output dirs
1158
if result[6][1] == 'directory':
1159
seen_dirs.add(result[0])
1160
# Record parents of reparented/added entries.
1161
versioned = result[3]
1163
if not versioned[0] or parents[0] != parents[1]:
1164
seen_parents.add(parents[1])
844
if (changed_content is not False or versioned[0] != versioned[1]
845
or parent[0] != parent[1] or name[0] != name[1] or
846
executable[0] != executable[1] or include_unchanged):
847
yield (file_id, (from_path, to_path), changed_content,
848
versioned, parent, name, kind, executable)
1165
850
while all_unversioned:
1166
851
# yield any trailing unversioned paths
1167
852
unversioned_path = all_unversioned.popleft()
1194
891
self.source._comparison_data(from_entry, path)
1195
892
kind = (from_kind, None)
1196
893
executable = (from_executable, None)
1197
changed_content = from_kind is not None
894
changed_content = True
1198
895
# the parent's path is necessarily known at this point.
1199
changed_file_ids.append(file_id)
1200
896
yield(file_id, (path, to_path), changed_content, versioned, parent,
1201
897
name, kind, executable)
1202
changed_file_ids = set(changed_file_ids)
1203
if specific_file_ids is not None:
1204
for result in self._handle_precise_ids(precise_file_ids,
1208
def _get_entry(self, tree, file_id):
1209
"""Get an inventory entry from a tree, with missing entries as None.
1211
If the tree raises NotImplementedError on accessing .inventory, then
1212
this is worked around using iter_entries_by_dir on just the file id
1215
:param tree: The tree to lookup the entry in.
1216
:param file_id: The file_id to lookup.
1219
inventory = tree.inventory
1220
except NotImplementedError:
1221
# No inventory available.
1223
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1224
return iterator.next()[1]
1225
except StopIteration:
1229
return inventory[file_id]
1230
except errors.NoSuchId:
1233
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1234
discarded_changes=None):
1235
"""Fill out a partial iter_changes to be consistent.
1237
:param precise_file_ids: The file ids of parents that were seen during
1239
:param changed_file_ids: The file ids of already emitted items.
1240
:param discarded_changes: An optional dict of precalculated
1241
iter_changes items which the partial iter_changes had not output
1243
:return: A generator of iter_changes items to output.
1245
# process parents of things that had changed under the users
1246
# requested paths to prevent incorrect paths or parent ids which
1247
# aren't in the tree.
1248
while precise_file_ids:
1249
precise_file_ids.discard(None)
1250
# Don't emit file_ids twice
1251
precise_file_ids.difference_update(changed_file_ids)
1252
if not precise_file_ids:
1254
# If the there was something at a given output path in source, we
1255
# have to include the entry from source in the delta, or we would
1256
# be putting this entry into a used path.
1258
for parent_id in precise_file_ids:
1260
paths.append(self.target.id2path(parent_id))
1261
except errors.NoSuchId:
1262
# This id has been dragged in from the source by delta
1263
# expansion and isn't present in target at all: we don't
1264
# need to check for path collisions on it.
1267
old_id = self.source.path2id(path)
1268
precise_file_ids.add(old_id)
1269
precise_file_ids.discard(None)
1270
current_ids = precise_file_ids
1271
precise_file_ids = set()
1272
# We have to emit all of precise_file_ids that have been altered.
1273
# We may have to output the children of some of those ids if any
1274
# directories have stopped being directories.
1275
for file_id in current_ids:
1277
if discarded_changes:
1278
result = discarded_changes.get(file_id)
1283
old_entry = self._get_entry(self.source, file_id)
1284
new_entry = self._get_entry(self.target, file_id)
1285
result, changes = self._changes_from_entries(
1286
old_entry, new_entry)
1289
# Get this parents parent to examine.
1290
new_parent_id = result[4][1]
1291
precise_file_ids.add(new_parent_id)
1293
if (result[6][0] == 'directory' and
1294
result[6][1] != 'directory'):
1295
# This stopped being a directory, the old children have
1297
if old_entry is None:
1298
# Reusing a discarded change.
1299
old_entry = self._get_entry(self.source, file_id)
1300
for child in old_entry.children.values():
1301
precise_file_ids.add(child.file_id)
1302
changed_file_ids.add(result[0])
1306
def file_content_matches(self, source_file_id, target_file_id,
1307
source_path=None, target_path=None, source_stat=None, target_stat=None):
1308
"""Check if two files are the same in the source and target trees.
1310
This only checks that the contents of the files are the same,
1311
it does not touch anything else.
1313
:param source_file_id: File id of the file in the source tree
1314
:param target_file_id: File id of the file in the target tree
1315
:param source_path: Path of the file in the source tree
1316
:param target_path: Path of the file in the target tree
1317
:param source_stat: Optional stat value of the file in the source tree
1318
:param target_stat: Optional stat value of the file in the target tree
1319
:return: Boolean indicating whether the files have the same contents
1321
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1322
source_file_id, source_path, source_stat)
1323
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1324
target_file_id, target_path, target_stat)
1325
if source_verifier_kind == target_verifier_kind:
1326
return (source_verifier_data == target_verifier_data)
1327
# Fall back to SHA1 for now
1328
if source_verifier_kind != "SHA1":
1329
source_sha1 = self.source.get_file_sha1(source_file_id,
1330
source_path, source_stat)
1332
source_sha1 = source_verifier_data
1333
if target_verifier_kind != "SHA1":
1334
target_sha1 = self.target.get_file_sha1(target_file_id,
1335
target_path, target_stat)
1337
target_sha1 = target_verifier_data
1338
return (source_sha1 == target_sha1)
1340
InterTree.register_optimiser(InterTree)
1343
class MultiWalker(object):
1344
"""Walk multiple trees simultaneously, getting combined results."""
1346
# Note: This could be written to not assume you can do out-of-order
1347
# lookups. Instead any nodes that don't match in all trees could be
1348
# marked as 'deferred', and then returned in the final cleanup loop.
1349
# For now, I think it is "nicer" to return things as close to the
1350
# "master_tree" order as we can.
1352
def __init__(self, master_tree, other_trees):
1353
"""Create a new MultiWalker.
1355
All trees being walked must implement "iter_entries_by_dir()", such
1356
that they yield (path, object) tuples, where that object will have a
1357
'.file_id' member, that can be used to check equality.
1359
:param master_tree: All trees will be 'slaved' to the master_tree such
1360
that nodes in master_tree will be used as 'first-pass' sync points.
1361
Any nodes that aren't in master_tree will be merged in a second
1363
:param other_trees: A list of other trees to walk simultaneously.
1365
self._master_tree = master_tree
1366
self._other_trees = other_trees
1368
# Keep track of any nodes that were properly processed just out of
1369
# order, that way we don't return them at the end, we don't have to
1370
# track *all* processed file_ids, just the out-of-order ones
1371
self._out_of_order_processed = set()
1374
def _step_one(iterator):
1375
"""Step an iter_entries_by_dir iterator.
1377
:return: (has_more, path, ie)
1378
If has_more is False, path and ie will be None.
1381
path, ie = iterator.next()
1382
except StopIteration:
1383
return False, None, None
1385
return True, path, ie
1388
def _cmp_path_by_dirblock(path1, path2):
1389
"""Compare two paths based on what directory they are in.
1391
This generates a sort order, such that all children of a directory are
1392
sorted together, and grandchildren are in the same order as the
1393
children appear. But all grandchildren come after all children.
1395
:param path1: first path
1396
:param path2: the second path
1397
:return: negative number if ``path1`` comes first,
1398
0 if paths are equal
1399
and a positive number if ``path2`` sorts first
1401
# Shortcut this special case
1404
# This is stolen from _dirstate_helpers_py.py, only switching it to
1405
# Unicode objects. Consider using encode_utf8() and then using the
1406
# optimized versions, or maybe writing optimized unicode versions.
1407
if not isinstance(path1, unicode):
1408
raise TypeError("'path1' must be a unicode string, not %s: %r"
1409
% (type(path1), path1))
1410
if not isinstance(path2, unicode):
1411
raise TypeError("'path2' must be a unicode string, not %s: %r"
1412
% (type(path2), path2))
1413
return cmp(MultiWalker._path_to_key(path1),
1414
MultiWalker._path_to_key(path2))
1417
def _path_to_key(path):
1418
dirname, basename = osutils.split(path)
1419
return (dirname.split(u'/'), basename)
1421
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1422
"""Lookup an inventory entry by file_id.
1424
This is called when an entry is missing in the normal order.
1425
Generally this is because a file was either renamed, or it was
1426
deleted/added. If the entry was found in the inventory and not in
1427
extra_entries, it will be added to self._out_of_order_processed
1429
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1430
should be filled with entries that were found before they were
1431
used. If file_id is present, it will be removed from the
1433
:param other_tree: The Tree to search, in case we didn't find the entry
1435
:param file_id: The file_id to look for
1436
:return: (path, ie) if found or (None, None) if not present.
1438
if file_id in extra_entries:
1439
return extra_entries.pop(file_id)
1440
# TODO: Is id2path better as the first call, or is
1441
# inventory[file_id] better as a first check?
1443
cur_path = other_tree.id2path(file_id)
1444
except errors.NoSuchId:
1446
if cur_path is None:
1449
self._out_of_order_processed.add(file_id)
1450
cur_ie = other_tree.inventory[file_id]
1451
return (cur_path, cur_ie)
1454
"""Match up the values in the different trees."""
1455
for result in self._walk_master_tree():
1457
self._finish_others()
1458
for result in self._walk_others():
1461
def _walk_master_tree(self):
1462
"""First pass, walk all trees in lock-step.
1464
When we are done, all nodes in the master_tree will have been
1465
processed. _other_walkers, _other_entries, and _others_extra will be
1466
set on 'self' for future processing.
1468
# This iterator has the most "inlining" done, because it tends to touch
1469
# every file in the tree, while the others only hit nodes that don't
1471
master_iterator = self._master_tree.iter_entries_by_dir()
1473
other_walkers = [other.iter_entries_by_dir()
1474
for other in self._other_trees]
1475
other_entries = [self._step_one(walker) for walker in other_walkers]
1476
# Track extra nodes in the other trees
1477
others_extra = [{} for i in xrange(len(self._other_trees))]
1479
master_has_more = True
1480
step_one = self._step_one
1481
lookup_by_file_id = self._lookup_by_file_id
1482
out_of_order_processed = self._out_of_order_processed
1484
while master_has_more:
1485
(master_has_more, path, master_ie) = step_one(master_iterator)
1486
if not master_has_more:
1489
file_id = master_ie.file_id
1491
other_values_append = other_values.append
1492
next_other_entries = []
1493
next_other_entries_append = next_other_entries.append
1494
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1495
if not other_has_more:
1496
other_values_append(lookup_by_file_id(
1497
others_extra[idx], self._other_trees[idx], file_id))
1498
next_other_entries_append((False, None, None))
1499
elif file_id == other_ie.file_id:
1500
# This is the critical code path, as most of the entries
1501
# should match between most trees.
1502
other_values_append((other_path, other_ie))
1503
next_other_entries_append(step_one(other_walkers[idx]))
1505
# This walker did not match, step it until it either
1506
# matches, or we know we are past the current walker.
1507
other_walker = other_walkers[idx]
1508
other_extra = others_extra[idx]
1509
while (other_has_more and
1510
self._cmp_path_by_dirblock(other_path, path) < 0):
1511
other_file_id = other_ie.file_id
1512
if other_file_id not in out_of_order_processed:
1513
other_extra[other_file_id] = (other_path, other_ie)
1514
other_has_more, other_path, other_ie = \
1515
step_one(other_walker)
1516
if other_has_more and other_ie.file_id == file_id:
1517
# We ended up walking to this point, match and step
1519
other_values_append((other_path, other_ie))
1520
other_has_more, other_path, other_ie = \
1521
step_one(other_walker)
1523
# This record isn't in the normal order, see if it
1525
other_values_append(lookup_by_file_id(
1526
other_extra, self._other_trees[idx], file_id))
1527
next_other_entries_append((other_has_more, other_path,
1529
other_entries = next_other_entries
1531
# We've matched all the walkers, yield this datapoint
1532
yield path, file_id, master_ie, other_values
1533
self._other_walkers = other_walkers
1534
self._other_entries = other_entries
1535
self._others_extra = others_extra
1537
def _finish_others(self):
1538
"""Finish walking the other iterators, so we get all entries."""
1539
for idx, info in enumerate(self._other_entries):
1540
other_extra = self._others_extra[idx]
1541
(other_has_more, other_path, other_ie) = info
1542
while other_has_more:
1543
other_file_id = other_ie.file_id
1544
if other_file_id not in self._out_of_order_processed:
1545
other_extra[other_file_id] = (other_path, other_ie)
1546
other_has_more, other_path, other_ie = \
1547
self._step_one(self._other_walkers[idx])
1548
del self._other_entries
1550
def _walk_others(self):
1551
"""Finish up by walking all the 'deferred' nodes."""
1552
# TODO: One alternative would be to grab all possible unprocessed
1553
# file_ids, and then sort by path, and then yield them. That
1554
# might ensure better ordering, in case a caller strictly
1555
# requires parents before children.
1556
for idx, other_extra in enumerate(self._others_extra):
1557
others = sorted(other_extra.itervalues(),
1558
key=lambda x: self._path_to_key(x[0]))
1559
for other_path, other_ie in others:
1560
file_id = other_ie.file_id
1561
# We don't need to check out_of_order_processed here, because
1562
# the lookup_by_file_id will be removing anything processed
1563
# from the extras cache
1564
other_extra.pop(file_id)
1565
other_values = [(None, None) for i in xrange(idx)]
1566
other_values.append((other_path, other_ie))
1567
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1568
alt_idx = alt_idx + idx + 1
1569
alt_extra = self._others_extra[alt_idx]
1570
alt_tree = self._other_trees[alt_idx]
1571
other_values.append(self._lookup_by_file_id(
1572
alt_extra, alt_tree, file_id))
1573
yield other_path, file_id, None, other_values
900
# This was deprecated before 0.12, but did not have an official warning
901
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
902
def RevisionTree(*args, **kwargs):
903
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
905
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
908
from bzrlib.revisiontree import RevisionTree as _RevisionTree
909
return _RevisionTree(*args, **kwargs)