81
91
specific_files=specific_files,
82
92
extra_trees=extra_trees,
83
93
require_versioned=require_versioned,
94
include_root=include_root,
95
want_unversioned=want_unversioned,
98
def iter_changes(self, from_tree, include_unchanged=False,
99
specific_files=None, pb=None, extra_trees=None,
100
require_versioned=True, want_unversioned=False):
101
intertree = InterTree.get(from_tree, self)
102
return intertree.iter_changes(include_unchanged, specific_files, pb,
103
extra_trees, require_versioned, want_unversioned=want_unversioned)
86
105
def conflicts(self):
87
106
"""Get a list of the conflicts in the tree.
89
108
Each conflict is an instance of bzrlib.conflicts.Conflict.
110
return _mod_conflicts.ConflictList()
113
"""For trees that can have unversioned files, return all such paths."""
93
116
def get_parent_ids(self):
94
"""Get the parent ids for this tree.
117
"""Get the parent ids for this tree.
96
119
:return: a list of parent ids. [] is returned to indicate
97
120
a tree with no parents.
98
121
:raises: BzrError if the parents are not known.
100
123
raise NotImplementedError(self.get_parent_ids)
102
125
def has_filename(self, filename):
103
126
"""True if the tree has given filename."""
104
raise NotImplementedError()
127
raise NotImplementedError(self.has_filename)
106
129
def has_id(self, file_id):
107
130
return self.inventory.has_id(file_id)
109
__contains__ = has_id
132
def __contains__(self, file_id):
133
return self.has_id(file_id)
111
135
def has_or_had_id(self, file_id):
112
if file_id == self.inventory.root.file_id:
114
136
return self.inventory.has_id(file_id)
138
def is_ignored(self, filename):
139
"""Check whether the filename is ignored by this tree.
141
:param filename: The relative filename within the tree.
142
:return: True if the filename is ignored.
116
146
def __iter__(self):
117
147
return iter(self.inventory)
149
def all_file_ids(self):
150
"""Iterate through all file ids, including ids for missing files."""
151
return set(self.inventory)
119
153
def id2path(self, file_id):
154
"""Return the path for a file id.
120
158
return self.inventory.id2path(file_id)
122
def iter_entries_by_dir(self):
160
def is_control_filename(self, filename):
161
"""True if filename is the name of a control file in this tree.
163
:param filename: A filename within the tree. This is a relative path
164
from the root of this tree.
166
This is true IF and ONLY IF the filename is part of the meta data
167
that bzr controls in this tree. I.E. a random .bzr directory placed
168
on disk will not be a control file for this tree.
170
return self.bzrdir.is_control_filename(filename)
173
def iter_entries_by_dir(self, specific_file_ids=None):
123
174
"""Walk the tree in 'by_dir' order.
125
This will yield each entry in the tree as a (path, entry) tuple. The
126
order that they are yielded is: the contents of a directory are
127
preceeded by the parent of a directory, and all the contents of a
128
directory are grouped together.
176
This will yield each entry in the tree as a (path, entry) tuple.
177
The order that they are yielded is:
179
Directories are walked in a depth-first lexicographical order,
180
however, whenever a directory is reached, all of its direct child
181
nodes are yielded in lexicographical order before yielding the
184
For example, in the tree::
194
The yield order (ignoring root) would be::
195
a, f, a/b, a/d, a/b/c, a/d/e, f/g
130
return self.inventory.iter_entries_by_dir()
197
return self.inventory.iter_entries_by_dir(
198
specific_file_ids=specific_file_ids)
200
def iter_references(self):
201
if self.supports_tree_reference():
202
for path, entry in self.iter_entries_by_dir():
203
if entry.kind == 'tree-reference':
204
yield path, entry.file_id
132
206
def kind(self, file_id):
133
raise NotImplementedError("subclasses must implement kind")
207
raise NotImplementedError("Tree subclass %s must implement kind"
208
% self.__class__.__name__)
210
def stored_kind(self, file_id):
211
"""File kind stored for this file_id.
213
May not match kind on disk for working trees. Always available
214
for versioned files, even when the file itself is missing.
216
return self.kind(file_id)
218
def path_content_summary(self, path):
219
"""Get a summary of the information about path.
221
:param path: A relative path within the tree.
222
:return: A tuple containing kind, size, exec, sha1-or-link.
223
Kind is always present (see tree.kind()).
224
size is present if kind is file, None otherwise.
225
exec is None unless kind is file and the platform supports the 'x'
227
sha1-or-link is the link target if kind is symlink, or the sha1 if
228
it can be obtained without reading the file.
230
raise NotImplementedError(self.path_content_summary)
232
def get_reference_revision(self, file_id, path=None):
233
raise NotImplementedError("Tree subclass %s must implement "
234
"get_reference_revision"
235
% self.__class__.__name__)
237
def _comparison_data(self, entry, path):
238
"""Return a tuple of kind, executable, stat_value for a file.
240
entry may be None if there is no inventory entry for the file, but
241
path must always be supplied.
243
kind is None if there is no file present (even if an inventory id is
244
present). executable is False for non-file entries.
246
raise NotImplementedError(self._comparison_data)
248
def _file_size(self, entry, stat_value):
249
raise NotImplementedError(self._file_size)
135
251
def _get_inventory(self):
136
252
return self._inventory
254
def get_file(self, file_id, path=None):
255
"""Return a file object for the file file_id in the tree.
257
If both file_id and path are defined, it is implementation defined as
258
to which one is used.
260
raise NotImplementedError(self.get_file)
262
def get_file_with_stat(self, file_id, path=None):
263
"""Get a file handle and stat object for file_id.
265
The default implementation returns (self.get_file, None) for backwards
268
:param file_id: The file id to read.
269
:param path: The path of the file, if it is known.
270
:return: A tuple (file_handle, stat_value_or_None). If the tree has
271
no stat facility, or need for a stat cache feedback during commit,
272
it may return None for the second element of the tuple.
274
return (self.get_file(file_id, path), None)
276
def get_file_text(self, file_id, path=None):
277
"""Return the byte content of a file.
279
:param file_id: The file_id of the file.
280
:param path: The path of the file.
281
If both file_id and path are supplied, an implementation may use
284
my_file = self.get_file(file_id, path)
286
return my_file.read()
290
def get_file_lines(self, file_id, path=None):
291
"""Return the content of a file, as lines.
293
:param file_id: The file_id of the file.
294
:param path: The path of the file.
295
If both file_id and path are supplied, an implementation may use
298
return osutils.split_lines(self.get_file_text(file_id, path))
300
def get_file_mtime(self, file_id, path=None):
301
"""Return the modification time for a file.
303
:param file_id: The handle for this file.
304
:param path: The path that this file can be found at.
305
These must point to the same object.
307
raise NotImplementedError(self.get_file_mtime)
309
def get_file_size(self, file_id):
310
"""Return the size of a file in bytes.
312
This applies only to regular files. If invoked on directories or
313
symlinks, it will return None.
314
:param file_id: The file-id of the file
316
raise NotImplementedError(self.get_file_size)
138
318
def get_file_by_path(self, path):
139
return self.get_file(self._inventory.path2id(path))
319
return self.get_file(self._inventory.path2id(path), path)
321
def iter_files_bytes(self, desired_files):
322
"""Iterate through file contents.
324
Files will not necessarily be returned in the order they occur in
325
desired_files. No specific order is guaranteed.
327
Yields pairs of identifier, bytes_iterator. identifier is an opaque
328
value supplied by the caller as part of desired_files. It should
329
uniquely identify the file version in the caller's context. (Examples:
330
an index number or a TreeTransform trans_id.)
332
bytes_iterator is an iterable of bytestrings for the file. The
333
kind of iterable and length of the bytestrings are unspecified, but for
334
this implementation, it is a tuple containing a single bytestring with
335
the complete text of the file.
337
:param desired_files: a list of (file_id, identifier) pairs
339
for file_id, identifier in desired_files:
340
# We wrap the string in a tuple so that we can return an iterable
341
# of bytestrings. (Technically, a bytestring is also an iterable
342
# of bytestrings, but iterating through each character is not
344
cur_file = (self.get_file_text(file_id),)
345
yield identifier, cur_file
347
def get_symlink_target(self, file_id):
348
"""Get the target for a given file_id.
350
It is assumed that the caller already knows that file_id is referencing
352
:param file_id: Handle for the symlink entry.
353
:return: The path the symlink points to.
355
raise NotImplementedError(self.get_symlink_target)
357
def get_canonical_inventory_paths(self, paths):
358
"""Like get_canonical_inventory_path() but works on multiple items.
360
:param paths: A sequence of paths relative to the root of the tree.
361
:return: A list of paths, with each item the corresponding input path
362
adjusted to account for existing elements that match case
365
return list(self._yield_canonical_inventory_paths(paths))
367
def get_canonical_inventory_path(self, path):
368
"""Returns the first inventory item that case-insensitively matches path.
370
If a path matches exactly, it is returned. If no path matches exactly
371
but more than one path matches case-insensitively, it is implementation
372
defined which is returned.
374
If no path matches case-insensitively, the input path is returned, but
375
with as many path entries that do exist changed to their canonical
378
If you need to resolve many names from the same tree, you should
379
use get_canonical_inventory_paths() to avoid O(N) behaviour.
381
:param path: A paths relative to the root of the tree.
382
:return: The input path adjusted to account for existing elements
383
that match case insensitively.
385
return self._yield_canonical_inventory_paths([path]).next()
387
def _yield_canonical_inventory_paths(self, paths):
389
# First, if the path as specified exists exactly, just use it.
390
if self.path2id(path) is not None:
394
cur_id = self.get_root_id()
396
bit_iter = iter(path.split("/"))
399
for child in self.iter_children(cur_id):
401
child_base = os.path.basename(self.id2path(child))
402
if child_base.lower() == lelt:
404
cur_path = osutils.pathjoin(cur_path, child_base)
407
# before a change is committed we can see this error...
410
# got to the end of this directory and no entries matched.
411
# Return what matched so far, plus the rest as specified.
412
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
417
def get_root_id(self):
418
"""Return the file_id for the root of this tree."""
419
raise NotImplementedError(self.get_root_id)
421
def annotate_iter(self, file_id,
422
default_revision=_mod_revision.CURRENT_REVISION):
423
"""Return an iterator of revision_id, line tuples.
425
For working trees (and mutable trees in general), the special
426
revision_id 'current:' will be used for lines that are new in this
427
tree, e.g. uncommitted changes.
428
:param file_id: The file to produce an annotated version from
429
:param default_revision: For lines that don't match a basis, mark them
430
with this revision id. Not all implementations will make use of
433
raise NotImplementedError(self.annotate_iter)
435
def _get_plan_merge_data(self, file_id, other, base):
436
from bzrlib import versionedfile
437
vf = versionedfile._PlanMergeVersionedFile(file_id)
438
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
439
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
441
last_revision_base = None
443
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
444
return vf, last_revision_a, last_revision_b, last_revision_base
446
def plan_file_merge(self, file_id, other, base=None):
447
"""Generate a merge plan based on annotations.
449
If the file contains uncommitted changes in this tree, they will be
450
attributed to the 'current:' pseudo-revision. If the file contains
451
uncommitted changes in the other tree, they will be assigned to the
452
'other:' pseudo-revision.
454
data = self._get_plan_merge_data(file_id, other, base)
455
vf, last_revision_a, last_revision_b, last_revision_base = data
456
return vf.plan_merge(last_revision_a, last_revision_b,
459
def plan_file_lca_merge(self, file_id, other, base=None):
460
"""Generate a merge plan based lca-newness.
462
If the file contains uncommitted changes in this tree, they will be
463
attributed to the 'current:' pseudo-revision. If the file contains
464
uncommitted changes in the other tree, they will be assigned to the
465
'other:' pseudo-revision.
467
data = self._get_plan_merge_data(file_id, other, base)
468
vf, last_revision_a, last_revision_b, last_revision_base = data
469
return vf.plan_lca_merge(last_revision_a, last_revision_b,
472
def _iter_parent_trees(self):
473
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
474
for revision_id in self.get_parent_ids():
476
yield self.revision_tree(revision_id)
477
except errors.NoSuchRevisionInTree:
478
yield self.repository.revision_tree(revision_id)
481
def _file_revision(revision_tree, file_id):
482
"""Determine the revision associated with a file in a given tree."""
483
revision_tree.lock_read()
485
return revision_tree.inventory[file_id].revision
487
revision_tree.unlock()
489
def _get_file_revision(self, file_id, vf, tree_revision):
490
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
492
if getattr(self, '_repository', None) is None:
493
last_revision = tree_revision
494
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
495
self._iter_parent_trees()]
496
vf.add_lines((file_id, last_revision), parent_keys,
497
self.get_file(file_id).readlines())
498
repo = self.branch.repository
501
last_revision = self._file_revision(self, file_id)
502
base_vf = self._repository.texts
503
if base_vf not in vf.fallback_versionedfiles:
504
vf.fallback_versionedfiles.append(base_vf)
141
507
inventory = property(_get_inventory,
142
508
doc="Inventory of this Tree")
144
510
def _check_retrieved(self, ie, f):
145
511
if not __debug__:
147
513
fp = fingerprint_file(f)
150
if ie.text_size != None:
516
if ie.text_size is not None:
151
517
if ie.text_size != fp['size']:
152
518
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
153
519
["inventory expects %d bytes" % ie.text_size,
182
586
def filter_unversioned_files(self, paths):
183
"""Filter out paths that are not versioned.
587
"""Filter out paths that are versioned.
185
589
:return: set of paths.
187
591
# NB: we specifically *don't* call self.has_filename, because for
188
# WorkingTrees that can indicate files that exist on disk but that
592
# WorkingTrees that can indicate files that exist on disk but that
189
593
# are not versioned.
190
594
pred = self.inventory.has_filename
191
595
return set((p for p in paths if not pred(p)))
195
from bzrlib.revisiontree import RevisionTree
198
class EmptyTree(Tree):
201
self._inventory = Inventory()
202
warn('EmptyTree is deprecated as of bzr 0.9 please use '
203
'repository.revision_tree instead.',
204
DeprecationWarning, stacklevel=2)
206
def get_parent_ids(self):
209
def get_symlink_target(self, file_id):
212
def has_filename(self, filename):
597
def walkdirs(self, prefix=""):
598
"""Walk the contents of this tree from path down.
600
This yields all the data about the contents of a directory at a time.
601
After each directory has been yielded, if the caller has mutated the
602
list to exclude some directories, they are then not descended into.
604
The data yielded is of the form:
605
((directory-relpath, directory-path-from-root, directory-fileid),
606
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
607
versioned_kind), ...]),
608
- directory-relpath is the containing dirs relpath from prefix
609
- directory-path-from-root is the containing dirs path from /
610
- directory-fileid is the id of the directory if it is versioned.
611
- relpath is the relative path within the subtree being walked.
612
- basename is the basename
613
- kind is the kind of the file now. If unknonwn then the file is not
614
present within the tree - but it may be recorded as versioned. See
616
- lstat is the stat data *if* the file was statted.
617
- path_from_tree_root is the path from the root of the tree.
618
- file_id is the file_id if the entry is versioned.
619
- versioned_kind is the kind of the file as last recorded in the
620
versioning system. If 'unknown' the file is not versioned.
621
One of 'kind' and 'versioned_kind' must not be 'unknown'.
623
:param prefix: Start walking from prefix within the tree rather than
624
at the root. This allows one to walk a subtree but get paths that are
625
relative to a tree rooted higher up.
626
:return: an iterator over the directory data.
628
raise NotImplementedError(self.walkdirs)
630
def supports_content_filtering(self):
215
def kind(self, file_id):
216
assert self._inventory[file_id].kind == "directory"
219
def list_files(self):
222
def __contains__(self, file_id):
223
return file_id in self._inventory
225
def get_file_sha1(self, file_id, path=None):
633
def _content_filter_stack(self, path=None, file_id=None):
634
"""The stack of content filters for a path if filtering is supported.
636
Readers will be applied in first-to-last order.
637
Writers will be applied in last-to-first order.
638
Either the path or the file-id needs to be provided.
640
:param path: path relative to the root of the tree
642
:param file_id: file_id or None if unknown
643
:return: the list of filters - [] if there are none
645
filter_pref_names = filters._get_registered_names()
646
if len(filter_pref_names) == 0:
649
path = self.id2path(file_id)
650
prefs = self.iter_search_rules([path], filter_pref_names).next()
651
stk = filters._get_filter_stack_for(prefs)
652
if 'filters' in debug.debug_flags:
653
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
656
def _content_filter_stack_provider(self):
657
"""A function that returns a stack of ContentFilters.
659
The function takes a path (relative to the top of the tree) and a
660
file-id as parameters.
662
:return: None if content filtering is not supported by this tree.
664
if self.supports_content_filtering():
665
return lambda path, file_id: \
666
self._content_filter_stack(path, file_id)
670
def iter_search_rules(self, path_names, pref_names=None,
671
_default_searcher=None):
672
"""Find the preferences for filenames in a tree.
674
:param path_names: an iterable of paths to find attributes for.
675
Paths are given relative to the root of the tree.
676
:param pref_names: the list of preferences to lookup - None for all
677
:param _default_searcher: private parameter to assist testing - don't use
678
:return: an iterator of tuple sequences, one per path-name.
679
See _RulesSearcher.get_items for details on the tuple sequence.
681
if _default_searcher is None:
682
_default_searcher = rules._per_user_searcher
683
searcher = self._get_rules_searcher(_default_searcher)
684
if searcher is not None:
685
if pref_names is not None:
686
for path in path_names:
687
yield searcher.get_selected_items(path, pref_names)
689
for path in path_names:
690
yield searcher.get_items(path)
693
def _get_rules_searcher(self, default_searcher):
694
"""Get the RulesSearcher for this tree given the default one."""
695
searcher = default_searcher
229
699
######################################################################
394
864
:param require_versioned: An optional boolean (defaults to False). When
395
865
supplied and True all the 'specific_files' must be versioned, or
396
866
a PathsNotVersionedError will be thrown.
867
:param want_unversioned: Scan for unversioned paths.
398
# NB: show_status depends on being able to pass in non-versioned files and
399
# report them as unknown
400
trees = (self.source, self.target)
869
# NB: show_status depends on being able to pass in non-versioned files
870
# and report them as unknown
871
trees = (self.source,)
401
872
if extra_trees is not None:
402
873
trees = trees + tuple(extra_trees)
403
specific_file_ids = find_ids_across_trees(specific_files,
404
trees, require_versioned=require_versioned)
874
# target is usually the newer tree:
875
specific_file_ids = self.target.paths2ids(specific_files, trees,
876
require_versioned=require_versioned)
405
877
if specific_files and not specific_file_ids:
406
878
# All files are unversioned, so just return an empty delta
407
879
# _compare_trees would think we want a complete delta
408
return delta.TreeDelta()
880
result = delta.TreeDelta()
881
fake_entry = InventoryFile('unused', 'unused', 'unused')
882
result.unversioned = [(path, None,
883
self.target._comparison_data(fake_entry, path)[0]) for path in
409
886
return delta._compare_trees(self.source, self.target, want_unchanged,
887
specific_files, include_root, extra_trees=extra_trees,
888
require_versioned=require_versioned,
889
want_unversioned=want_unversioned)
891
def iter_changes(self, include_unchanged=False,
892
specific_files=None, pb=None, extra_trees=[],
893
require_versioned=True, want_unversioned=False):
894
"""Generate an iterator of changes between trees.
897
(file_id, (path_in_source, path_in_target),
898
changed_content, versioned, parent, name, kind,
901
Changed_content is True if the file's content has changed. This
902
includes changes to its kind, and to a symlink's target.
904
versioned, parent, name, kind, executable are tuples of (from, to).
905
If a file is missing in a tree, its kind is None.
907
Iteration is done in parent-to-child order, relative to the target
910
There is no guarantee that all paths are in sorted order: the
911
requirement to expand the search due to renames may result in children
912
that should be found early being found late in the search, after
913
lexically later results have been returned.
914
:param require_versioned: Raise errors.PathsNotVersionedError if a
915
path in the specific_files list is not versioned in one of
916
source, target or extra_trees.
917
:param want_unversioned: Should unversioned files be returned in the
918
output. An unversioned file is defined as one with (False, False)
919
for the versioned pair.
921
lookup_trees = [self.source]
923
lookup_trees.extend(extra_trees)
924
if specific_files == []:
925
specific_file_ids = []
927
specific_file_ids = self.target.paths2ids(specific_files,
928
lookup_trees, require_versioned=require_versioned)
930
all_unversioned = sorted([(p.split('/'), p) for p in
932
if specific_files is None or
933
osutils.is_inside_any(specific_files, p)])
934
all_unversioned = deque(all_unversioned)
936
all_unversioned = deque()
938
from_entries_by_dir = list(self.source.iter_entries_by_dir(
939
specific_file_ids=specific_file_ids))
940
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
941
to_entries_by_dir = list(self.target.iter_entries_by_dir(
942
specific_file_ids=specific_file_ids))
943
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
945
# the unversioned path lookup only occurs on real trees - where there
946
# can be extras. So the fake_entry is solely used to look up
947
# executable it values when execute is not supported.
948
fake_entry = InventoryFile('unused', 'unused', 'unused')
949
for to_path, to_entry in to_entries_by_dir:
950
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
951
unversioned_path = all_unversioned.popleft()
952
to_kind, to_executable, to_stat = \
953
self.target._comparison_data(fake_entry, unversioned_path[1])
954
yield (None, (None, unversioned_path[1]), True, (False, False),
956
(None, unversioned_path[0][-1]),
958
(None, to_executable))
959
file_id = to_entry.file_id
960
to_paths[file_id] = to_path
962
changed_content = False
963
from_path, from_entry = from_data.get(file_id, (None, None))
964
from_versioned = (from_entry is not None)
965
if from_entry is not None:
966
from_versioned = True
967
from_name = from_entry.name
968
from_parent = from_entry.parent_id
969
from_kind, from_executable, from_stat = \
970
self.source._comparison_data(from_entry, from_path)
973
from_versioned = False
977
from_executable = None
978
versioned = (from_versioned, True)
979
to_kind, to_executable, to_stat = \
980
self.target._comparison_data(to_entry, to_path)
981
kind = (from_kind, to_kind)
982
if kind[0] != kind[1]:
983
changed_content = True
984
elif from_kind == 'file':
985
if (self.source.get_file_sha1(file_id, from_path, from_stat) !=
986
self.target.get_file_sha1(file_id, to_path, to_stat)):
987
changed_content = True
988
elif from_kind == 'symlink':
989
if (self.source.get_symlink_target(file_id) !=
990
self.target.get_symlink_target(file_id)):
991
changed_content = True
992
# XXX: Yes, the indentation below is wrong. But fixing it broke
993
# test_merge.TestMergerEntriesLCAOnDisk.
994
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
995
# the fix from bzr.dev -- vila 2009026
996
elif from_kind == 'tree-reference':
997
if (self.source.get_reference_revision(file_id, from_path)
998
!= self.target.get_reference_revision(file_id, to_path)):
999
changed_content = True
1000
parent = (from_parent, to_entry.parent_id)
1001
name = (from_name, to_entry.name)
1002
executable = (from_executable, to_executable)
1004
pb.update('comparing files', entry_count, num_entries)
1005
if (changed_content is not False or versioned[0] != versioned[1]
1006
or parent[0] != parent[1] or name[0] != name[1] or
1007
executable[0] != executable[1] or include_unchanged):
1008
yield (file_id, (from_path, to_path), changed_content,
1009
versioned, parent, name, kind, executable)
1011
while all_unversioned:
1012
# yield any trailing unversioned paths
1013
unversioned_path = all_unversioned.popleft()
1014
to_kind, to_executable, to_stat = \
1015
self.target._comparison_data(fake_entry, unversioned_path[1])
1016
yield (None, (None, unversioned_path[1]), True, (False, False),
1018
(None, unversioned_path[0][-1]),
1020
(None, to_executable))
1022
def get_to_path(to_entry):
1023
if to_entry.parent_id is None:
1024
to_path = '' # the root
1026
if to_entry.parent_id not in to_paths:
1028
return get_to_path(self.target.inventory[to_entry.parent_id])
1029
to_path = osutils.pathjoin(to_paths[to_entry.parent_id],
1031
to_paths[to_entry.file_id] = to_path
1034
for path, from_entry in from_entries_by_dir:
1035
file_id = from_entry.file_id
1036
if file_id in to_paths:
1039
if not file_id in self.target.all_file_ids():
1040
# common case - paths we have not emitted are not present in
1044
to_path = get_to_path(self.target.inventory[file_id])
1047
pb.update('comparing files', entry_count, num_entries)
1048
versioned = (True, False)
1049
parent = (from_entry.parent_id, None)
1050
name = (from_entry.name, None)
1051
from_kind, from_executable, stat_value = \
1052
self.source._comparison_data(from_entry, path)
1053
kind = (from_kind, None)
1054
executable = (from_executable, None)
1055
changed_content = from_kind is not None
1056
# the parent's path is necessarily known at this point.
1057
yield(file_id, (path, to_path), changed_content, versioned, parent,
1058
name, kind, executable)
1061
class MultiWalker(object):
1062
"""Walk multiple trees simultaneously, getting combined results."""
1064
# Note: This could be written to not assume you can do out-of-order
1065
# lookups. Instead any nodes that don't match in all trees could be
1066
# marked as 'deferred', and then returned in the final cleanup loop.
1067
# For now, I think it is "nicer" to return things as close to the
1068
# "master_tree" order as we can.
1070
def __init__(self, master_tree, other_trees):
1071
"""Create a new MultiWalker.
1073
All trees being walked must implement "iter_entries_by_dir()", such
1074
that they yield (path, object) tuples, where that object will have a
1075
'.file_id' member, that can be used to check equality.
1077
:param master_tree: All trees will be 'slaved' to the master_tree such
1078
that nodes in master_tree will be used as 'first-pass' sync points.
1079
Any nodes that aren't in master_tree will be merged in a second
1081
:param other_trees: A list of other trees to walk simultaneously.
1083
self._master_tree = master_tree
1084
self._other_trees = other_trees
1086
# Keep track of any nodes that were properly processed just out of
1087
# order, that way we don't return them at the end, we don't have to
1088
# track *all* processed file_ids, just the out-of-order ones
1089
self._out_of_order_processed = set()
1092
def _step_one(iterator):
1093
"""Step an iter_entries_by_dir iterator.
1095
:return: (has_more, path, ie)
1096
If has_more is False, path and ie will be None.
1099
path, ie = iterator.next()
1100
except StopIteration:
1101
return False, None, None
1103
return True, path, ie
1106
def _cmp_path_by_dirblock(path1, path2):
1107
"""Compare two paths based on what directory they are in.
1109
This generates a sort order, such that all children of a directory are
1110
sorted together, and grandchildren are in the same order as the
1111
children appear. But all grandchildren come after all children.
1113
:param path1: first path
1114
:param path2: the second path
1115
:return: negative number if ``path1`` comes first,
1116
0 if paths are equal
1117
and a positive number if ``path2`` sorts first
1119
# Shortcut this special case
1122
# This is stolen from _dirstate_helpers_py.py, only switching it to
1123
# Unicode objects. Consider using encode_utf8() and then using the
1124
# optimized versions, or maybe writing optimized unicode versions.
1125
if not isinstance(path1, unicode):
1126
raise TypeError("'path1' must be a unicode string, not %s: %r"
1127
% (type(path1), path1))
1128
if not isinstance(path2, unicode):
1129
raise TypeError("'path2' must be a unicode string, not %s: %r"
1130
% (type(path2), path2))
1131
return cmp(MultiWalker._path_to_key(path1),
1132
MultiWalker._path_to_key(path2))
1135
def _path_to_key(path):
1136
dirname, basename = osutils.split(path)
1137
return (dirname.split(u'/'), basename)
1139
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1140
"""Lookup an inventory entry by file_id.
1142
This is called when an entry is missing in the normal order.
1143
Generally this is because a file was either renamed, or it was
1144
deleted/added. If the entry was found in the inventory and not in
1145
extra_entries, it will be added to self._out_of_order_processed
1147
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1148
should be filled with entries that were found before they were
1149
used. If file_id is present, it will be removed from the
1151
:param other_tree: The Tree to search, in case we didn't find the entry
1153
:param file_id: The file_id to look for
1154
:return: (path, ie) if found or (None, None) if not present.
1156
if file_id in extra_entries:
1157
return extra_entries.pop(file_id)
1158
# TODO: Is id2path better as the first call, or is
1159
# inventory[file_id] better as a first check?
1161
cur_path = other_tree.id2path(file_id)
1162
except errors.NoSuchId:
1164
if cur_path is None:
1167
self._out_of_order_processed.add(file_id)
1168
cur_ie = other_tree.inventory[file_id]
1169
return (cur_path, cur_ie)
1172
"""Match up the values in the different trees."""
1173
for result in self._walk_master_tree():
1175
self._finish_others()
1176
for result in self._walk_others():
1179
def _walk_master_tree(self):
1180
"""First pass, walk all trees in lock-step.
1182
When we are done, all nodes in the master_tree will have been
1183
processed. _other_walkers, _other_entries, and _others_extra will be
1184
set on 'self' for future processing.
1186
# This iterator has the most "inlining" done, because it tends to touch
1187
# every file in the tree, while the others only hit nodes that don't
1189
master_iterator = self._master_tree.iter_entries_by_dir()
1191
other_walkers = [other.iter_entries_by_dir()
1192
for other in self._other_trees]
1193
other_entries = [self._step_one(walker) for walker in other_walkers]
1194
# Track extra nodes in the other trees
1195
others_extra = [{} for i in xrange(len(self._other_trees))]
1197
master_has_more = True
1198
step_one = self._step_one
1199
lookup_by_file_id = self._lookup_by_file_id
1200
out_of_order_processed = self._out_of_order_processed
1202
while master_has_more:
1203
(master_has_more, path, master_ie) = step_one(master_iterator)
1204
if not master_has_more:
1207
file_id = master_ie.file_id
1209
other_values_append = other_values.append
1210
next_other_entries = []
1211
next_other_entries_append = next_other_entries.append
1212
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1213
if not other_has_more:
1214
other_values_append(lookup_by_file_id(
1215
others_extra[idx], self._other_trees[idx], file_id))
1216
next_other_entries_append((False, None, None))
1217
elif file_id == other_ie.file_id:
1218
# This is the critical code path, as most of the entries
1219
# should match between most trees.
1220
other_values_append((other_path, other_ie))
1221
next_other_entries_append(step_one(other_walkers[idx]))
1223
# This walker did not match, step it until it either
1224
# matches, or we know we are past the current walker.
1225
other_walker = other_walkers[idx]
1226
other_extra = others_extra[idx]
1227
while (other_has_more and
1228
self._cmp_path_by_dirblock(other_path, path) < 0):
1229
other_file_id = other_ie.file_id
1230
if other_file_id not in out_of_order_processed:
1231
other_extra[other_file_id] = (other_path, other_ie)
1232
other_has_more, other_path, other_ie = \
1233
step_one(other_walker)
1234
if other_has_more and other_ie.file_id == file_id:
1235
# We ended up walking to this point, match and step
1237
other_values_append((other_path, other_ie))
1238
other_has_more, other_path, other_ie = \
1239
step_one(other_walker)
1241
# This record isn't in the normal order, see if it
1243
other_values_append(lookup_by_file_id(
1244
other_extra, self._other_trees[idx], file_id))
1245
next_other_entries_append((other_has_more, other_path,
1247
other_entries = next_other_entries
1249
# We've matched all the walkers, yield this datapoint
1250
yield path, file_id, master_ie, other_values
1251
self._other_walkers = other_walkers
1252
self._other_entries = other_entries
1253
self._others_extra = others_extra
1255
def _finish_others(self):
1256
"""Finish walking the other iterators, so we get all entries."""
1257
for idx, info in enumerate(self._other_entries):
1258
other_extra = self._others_extra[idx]
1259
(other_has_more, other_path, other_ie) = info
1260
while other_has_more:
1261
other_file_id = other_ie.file_id
1262
if other_file_id not in self._out_of_order_processed:
1263
other_extra[other_file_id] = (other_path, other_ie)
1264
other_has_more, other_path, other_ie = \
1265
self._step_one(self._other_walkers[idx])
1266
del self._other_entries
1268
def _walk_others(self):
1269
"""Finish up by walking all the 'deferred' nodes."""
1270
# TODO: One alternative would be to grab all possible unprocessed
1271
# file_ids, and then sort by path, and then yield them. That
1272
# might ensure better ordering, in case a caller strictly
1273
# requires parents before children.
1274
for idx, other_extra in enumerate(self._others_extra):
1275
others = sorted(other_extra.itervalues(),
1276
key=lambda x: self._path_to_key(x[0]))
1277
for other_path, other_ie in others:
1278
file_id = other_ie.file_id
1279
# We don't need to check out_of_order_processed here, because
1280
# the lookup_by_file_id will be removing anything processed
1281
# from the extras cache
1282
other_extra.pop(file_id)
1283
other_values = [(None, None) for i in xrange(idx)]
1284
other_values.append((other_path, other_ie))
1285
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1286
alt_idx = alt_idx + idx + 1
1287
alt_extra = self._others_extra[alt_idx]
1288
alt_tree = self._other_trees[alt_idx]
1289
other_values.append(self._lookup_by_file_id(
1290
alt_extra, alt_tree, file_id))
1291
yield other_path, file_id, None, other_values