81
99
specific_files=specific_files,
82
100
extra_trees=extra_trees,
83
101
require_versioned=require_versioned,
102
include_root=include_root,
103
want_unversioned=want_unversioned,
106
def iter_changes(self, from_tree, include_unchanged=False,
107
specific_files=None, pb=None, extra_trees=None,
108
require_versioned=True, want_unversioned=False):
109
"""See InterTree.iter_changes"""
110
intertree = InterTree.get(from_tree, self)
111
return intertree.iter_changes(include_unchanged, specific_files, pb,
112
extra_trees, require_versioned, want_unversioned=want_unversioned)
86
114
def conflicts(self):
87
115
"""Get a list of the conflicts in the tree.
89
117
Each conflict is an instance of bzrlib.conflicts.Conflict.
119
return _mod_conflicts.ConflictList()
122
"""For trees that can have unversioned files, return all such paths."""
93
125
def get_parent_ids(self):
94
"""Get the parent ids for this tree.
126
"""Get the parent ids for this tree.
96
128
:return: a list of parent ids. [] is returned to indicate
97
129
a tree with no parents.
98
130
:raises: BzrError if the parents are not known.
100
132
raise NotImplementedError(self.get_parent_ids)
102
134
def has_filename(self, filename):
103
135
"""True if the tree has given filename."""
104
raise NotImplementedError()
136
raise NotImplementedError(self.has_filename)
106
138
def has_id(self, file_id):
107
return self.inventory.has_id(file_id)
139
raise NotImplementedError(self.has_id)
109
__contains__ = has_id
141
@deprecated_method(deprecated_in((2, 4, 0)))
142
def __contains__(self, file_id):
143
return self.has_id(file_id)
111
145
def has_or_had_id(self, file_id):
112
if file_id == self.inventory.root.file_id:
114
return self.inventory.has_id(file_id)
117
return iter(self.inventory)
146
raise NotImplementedError(self.has_or_had_id)
148
def is_ignored(self, filename):
149
"""Check whether the filename is ignored by this tree.
151
:param filename: The relative filename within the tree.
152
:return: True if the filename is ignored.
156
def all_file_ids(self):
157
"""Iterate through all file ids, including ids for missing files."""
158
raise NotImplementedError(self.all_file_ids)
119
160
def id2path(self, file_id):
120
return self.inventory.id2path(file_id)
122
def iter_entries_by_dir(self):
161
"""Return the path for a file id.
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
123
168
"""Walk the tree in 'by_dir' order.
125
This will yield each entry in the tree as a (path, entry) tuple. The
126
order that they are yielded is: the contents of a directory are
127
preceeded by the parent of a directory, and all the contents of a
128
directory are grouped together.
130
return self.inventory.iter_entries_by_dir()
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def list_files(self, include_root=False, from_dir=None, recursive=True):
199
"""List all files in this tree.
201
:param include_root: Whether to include the entry for the tree root
202
:param from_dir: Directory under which to list files
203
:param recursive: Whether to list files recursively
204
:return: iterator over tuples of (path, versioned, kind, file_id,
207
raise NotImplementedError(self.list_files)
209
def iter_references(self):
210
if self.supports_tree_reference():
211
for path, entry in self.iter_entries_by_dir():
212
if entry.kind == 'tree-reference':
213
yield path, entry.file_id
132
215
def kind(self, file_id):
133
raise NotImplementedError("subclasses must implement kind")
135
def _get_inventory(self):
136
return self._inventory
138
def get_file_by_path(self, path):
139
return self.get_file(self._inventory.path2id(path))
141
inventory = property(_get_inventory,
142
doc="Inventory of this Tree")
216
raise NotImplementedError("Tree subclass %s must implement kind"
217
% self.__class__.__name__)
219
def stored_kind(self, file_id):
220
"""File kind stored for this file_id.
222
May not match kind on disk for working trees. Always available
223
for versioned files, even when the file itself is missing.
225
return self.kind(file_id)
227
def path_content_summary(self, path):
228
"""Get a summary of the information about path.
230
All the attributes returned are for the canonical form, not the
231
convenient form (if content filters are in use.)
233
:param path: A relative path within the tree.
234
:return: A tuple containing kind, size, exec, sha1-or-link.
235
Kind is always present (see tree.kind()).
236
size is present if kind is file and the size of the
237
canonical form can be cheaply determined, None otherwise.
238
exec is None unless kind is file and the platform supports the 'x'
240
sha1-or-link is the link target if kind is symlink, or the sha1 if
241
it can be obtained without reading the file.
243
raise NotImplementedError(self.path_content_summary)
245
def get_reference_revision(self, file_id, path=None):
246
raise NotImplementedError("Tree subclass %s must implement "
247
"get_reference_revision"
248
% self.__class__.__name__)
250
def _comparison_data(self, entry, path):
251
"""Return a tuple of kind, executable, stat_value for a file.
253
entry may be None if there is no inventory entry for the file, but
254
path must always be supplied.
256
kind is None if there is no file present (even if an inventory id is
257
present). executable is False for non-file entries.
259
raise NotImplementedError(self._comparison_data)
261
def _file_size(self, entry, stat_value):
262
raise NotImplementedError(self._file_size)
264
def get_file(self, file_id, path=None):
265
"""Return a file object for the file file_id in the tree.
267
If both file_id and path are defined, it is implementation defined as
268
to which one is used.
270
raise NotImplementedError(self.get_file)
272
def get_file_with_stat(self, file_id, path=None):
273
"""Get a file handle and stat object for file_id.
275
The default implementation returns (self.get_file, None) for backwards
278
:param file_id: The file id to read.
279
:param path: The path of the file, if it is known.
280
:return: A tuple (file_handle, stat_value_or_None). If the tree has
281
no stat facility, or need for a stat cache feedback during commit,
282
it may return None for the second element of the tuple.
284
return (self.get_file(file_id, path), None)
286
def get_file_text(self, file_id, path=None):
287
"""Return the byte content of a file.
289
:param file_id: The file_id of the file.
290
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
:returns: A single byte string for the whole file.
297
my_file = self.get_file(file_id, path)
299
return my_file.read()
303
def get_file_lines(self, file_id, path=None):
304
"""Return the content of a file, as lines.
306
:param file_id: The file_id of the file.
307
:param path: The path of the file.
309
If both file_id and path are supplied, an implementation may use
312
return osutils.split_lines(self.get_file_text(file_id, path))
314
def get_file_verifier(self, file_id, path=None, stat_value=None):
315
"""Return a verifier for a file.
317
The default implementation returns a sha1.
319
:param file_id: The handle for this file.
320
:param path: The path that this file can be found at.
321
These must point to the same object.
322
:param stat_value: Optional stat value for the object
323
:return: Tuple with verifier name and verifier data
325
return ("SHA1", self.get_file_sha1(file_id, path=path,
326
stat_value=stat_value))
328
def get_file_sha1(self, file_id, path=None, stat_value=None):
329
"""Return the SHA1 file for a file.
331
:note: callers should use get_file_verifier instead
332
where possible, as the underlying repository implementation may
333
have quicker access to a non-sha1 verifier.
335
:param file_id: The handle for this file.
336
:param path: The path that this file can be found at.
337
These must point to the same object.
338
:param stat_value: Optional stat value for the object
340
raise NotImplementedError(self.get_file_sha1)
342
def get_file_mtime(self, file_id, path=None):
343
"""Return the modification time for a file.
345
:param file_id: The handle for this file.
346
:param path: The path that this file can be found at.
347
These must point to the same object.
349
raise NotImplementedError(self.get_file_mtime)
351
def get_file_size(self, file_id):
352
"""Return the size of a file in bytes.
354
This applies only to regular files. If invoked on directories or
355
symlinks, it will return None.
356
:param file_id: The file-id of the file
358
raise NotImplementedError(self.get_file_size)
360
def is_executable(self, file_id, path=None):
361
"""Check if a file is executable.
363
:param file_id: The handle for this file.
364
:param path: The path that this file can be found at.
365
These must point to the same object.
367
raise NotImplementedError(self.is_executable)
369
def iter_files_bytes(self, desired_files):
370
"""Iterate through file contents.
372
Files will not necessarily be returned in the order they occur in
373
desired_files. No specific order is guaranteed.
375
Yields pairs of identifier, bytes_iterator. identifier is an opaque
376
value supplied by the caller as part of desired_files. It should
377
uniquely identify the file version in the caller's context. (Examples:
378
an index number or a TreeTransform trans_id.)
380
bytes_iterator is an iterable of bytestrings for the file. The
381
kind of iterable and length of the bytestrings are unspecified, but for
382
this implementation, it is a tuple containing a single bytestring with
383
the complete text of the file.
385
:param desired_files: a list of (file_id, identifier) pairs
387
for file_id, identifier in desired_files:
388
# We wrap the string in a tuple so that we can return an iterable
389
# of bytestrings. (Technically, a bytestring is also an iterable
390
# of bytestrings, but iterating through each character is not
392
cur_file = (self.get_file_text(file_id),)
393
yield identifier, cur_file
395
def get_symlink_target(self, file_id, path=None):
396
"""Get the target for a given file_id.
398
It is assumed that the caller already knows that file_id is referencing
400
:param file_id: Handle for the symlink entry.
401
:param path: The path of the file.
402
If both file_id and path are supplied, an implementation may use
404
:return: The path the symlink points to.
406
raise NotImplementedError(self.get_symlink_target)
408
def get_root_id(self):
409
"""Return the file_id for the root of this tree."""
410
raise NotImplementedError(self.get_root_id)
412
def annotate_iter(self, file_id,
413
default_revision=_mod_revision.CURRENT_REVISION):
414
"""Return an iterator of revision_id, line tuples.
416
For working trees (and mutable trees in general), the special
417
revision_id 'current:' will be used for lines that are new in this
418
tree, e.g. uncommitted changes.
419
:param file_id: The file to produce an annotated version from
420
:param default_revision: For lines that don't match a basis, mark them
421
with this revision id. Not all implementations will make use of
424
raise NotImplementedError(self.annotate_iter)
426
def _get_plan_merge_data(self, file_id, other, base):
427
from bzrlib import versionedfile
428
vf = versionedfile._PlanMergeVersionedFile(file_id)
429
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
430
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
432
last_revision_base = None
434
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
435
return vf, last_revision_a, last_revision_b, last_revision_base
437
def plan_file_merge(self, file_id, other, base=None):
438
"""Generate a merge plan based on annotations.
440
If the file contains uncommitted changes in this tree, they will be
441
attributed to the 'current:' pseudo-revision. If the file contains
442
uncommitted changes in the other tree, they will be assigned to the
443
'other:' pseudo-revision.
445
data = self._get_plan_merge_data(file_id, other, base)
446
vf, last_revision_a, last_revision_b, last_revision_base = data
447
return vf.plan_merge(last_revision_a, last_revision_b,
450
def plan_file_lca_merge(self, file_id, other, base=None):
451
"""Generate a merge plan based lca-newness.
453
If the file contains uncommitted changes in this tree, they will be
454
attributed to the 'current:' pseudo-revision. If the file contains
455
uncommitted changes in the other tree, they will be assigned to the
456
'other:' pseudo-revision.
458
data = self._get_plan_merge_data(file_id, other, base)
459
vf, last_revision_a, last_revision_b, last_revision_base = data
460
return vf.plan_lca_merge(last_revision_a, last_revision_b,
463
def _iter_parent_trees(self):
464
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
465
for revision_id in self.get_parent_ids():
467
yield self.revision_tree(revision_id)
468
except errors.NoSuchRevisionInTree:
469
yield self.repository.revision_tree(revision_id)
471
def _get_file_revision(self, file_id, vf, tree_revision):
472
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
474
if getattr(self, '_repository', None) is None:
475
last_revision = tree_revision
476
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
477
self._iter_parent_trees()]
478
vf.add_lines((file_id, last_revision), parent_keys,
479
self.get_file_lines(file_id))
480
repo = self.branch.repository
483
last_revision = self.get_file_revision(file_id)
484
base_vf = self._repository.texts
485
if base_vf not in vf.fallback_versionedfiles:
486
vf.fallback_versionedfiles.append(base_vf)
144
489
def _check_retrieved(self, ie, f):
145
490
if not __debug__:
147
fp = fingerprint_file(f)
492
fp = osutils.fingerprint_file(f)
150
if ie.text_size != None:
495
if ie.text_size is not None:
151
496
if ie.text_size != fp['size']:
152
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
497
raise errors.BzrError(
498
"mismatched size for file %r in %r" %
499
(ie.file_id, self._store),
153
500
["inventory expects %d bytes" % ie.text_size,
154
501
"file is actually %d bytes" % fp['size'],
155
502
"store is probably damaged/corrupt"])
157
504
if ie.text_sha1 != fp['sha1']:
158
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
505
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
506
(ie.file_id, self._store),
159
507
["inventory expects %s" % ie.text_sha1,
160
508
"file is actually %s" % fp['sha1'],
161
509
"store is probably damaged/corrupt"])
164
def print_file(self, file_id):
165
"""Print file with id `file_id` to stdout."""
167
sys.stdout.write(self.get_file_text(file_id))
511
def path2id(self, path):
512
"""Return the id for path in this tree."""
513
raise NotImplementedError(self.path2id)
515
def paths2ids(self, paths, trees=[], require_versioned=True):
516
"""Return all the ids that can be reached by walking from paths.
518
Each path is looked up in this tree and any extras provided in
519
trees, and this is repeated recursively: the children in an extra tree
520
of a directory that has been renamed under a provided path in this tree
521
are all returned, even if none exist under a provided path in this
522
tree, and vice versa.
524
:param paths: An iterable of paths to start converting to ids from.
525
Alternatively, if paths is None, no ids should be calculated and None
526
will be returned. This is offered to make calling the api unconditional
527
for code that *might* take a list of files.
528
:param trees: Additional trees to consider.
529
:param require_versioned: If False, do not raise NotVersionedError if
530
an element of paths is not versioned in this tree and all of trees.
532
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
534
def iter_children(self, file_id):
535
"""Iterate over the file ids of the children of an entry.
537
:param file_id: File id of the entry
538
:return: Iterator over child file ids.
540
raise NotImplementedError(self.iter_children)
169
542
def lock_read(self):
543
"""Lock this tree for multiple read only operations.
545
:return: A bzrlib.lock.LogicalLockResult.
549
def revision_tree(self, revision_id):
550
"""Obtain a revision tree for the revision revision_id.
552
The intention of this method is to allow access to possibly cached
553
tree data. Implementors of this method should raise NoSuchRevision if
554
the tree is not locally available, even if they could obtain the
555
tree via a repository or some other means. Callers are responsible
556
for finding the ultimate source for a revision tree.
558
:param revision_id: The revision_id of the requested tree.
560
:raises: NoSuchRevision if the tree cannot be obtained.
562
raise errors.NoSuchRevisionInTree(self, revision_id)
172
564
def unknowns(self):
173
565
"""What files are present in this tree and unknown.
175
567
:return: an iterator over the unknown files.
182
574
def filter_unversioned_files(self, paths):
183
"""Filter out paths that are not versioned.
575
"""Filter out paths that are versioned.
577
:return: set of paths.
579
raise NotImplementedError(self.filter_unversioned_files)
581
def walkdirs(self, prefix=""):
582
"""Walk the contents of this tree from path down.
584
This yields all the data about the contents of a directory at a time.
585
After each directory has been yielded, if the caller has mutated the
586
list to exclude some directories, they are then not descended into.
588
The data yielded is of the form:
589
((directory-relpath, directory-path-from-root, directory-fileid),
590
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
591
versioned_kind), ...]),
592
- directory-relpath is the containing dirs relpath from prefix
593
- directory-path-from-root is the containing dirs path from /
594
- directory-fileid is the id of the directory if it is versioned.
595
- relpath is the relative path within the subtree being walked.
596
- basename is the basename
597
- kind is the kind of the file now. If unknonwn then the file is not
598
present within the tree - but it may be recorded as versioned. See
600
- lstat is the stat data *if* the file was statted.
601
- path_from_tree_root is the path from the root of the tree.
602
- file_id is the file_id if the entry is versioned.
603
- versioned_kind is the kind of the file as last recorded in the
604
versioning system. If 'unknown' the file is not versioned.
605
One of 'kind' and 'versioned_kind' must not be 'unknown'.
607
:param prefix: Start walking from prefix within the tree rather than
608
at the root. This allows one to walk a subtree but get paths that are
609
relative to a tree rooted higher up.
610
:return: an iterator over the directory data.
612
raise NotImplementedError(self.walkdirs)
614
def supports_content_filtering(self):
617
def _content_filter_stack(self, path=None, file_id=None):
618
"""The stack of content filters for a path if filtering is supported.
620
Readers will be applied in first-to-last order.
621
Writers will be applied in last-to-first order.
622
Either the path or the file-id needs to be provided.
624
:param path: path relative to the root of the tree
626
:param file_id: file_id or None if unknown
627
:return: the list of filters - [] if there are none
629
filter_pref_names = filters._get_registered_names()
630
if len(filter_pref_names) == 0:
633
path = self.id2path(file_id)
634
prefs = self.iter_search_rules([path], filter_pref_names).next()
635
stk = filters._get_filter_stack_for(prefs)
636
if 'filters' in debug.debug_flags:
637
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
640
def _content_filter_stack_provider(self):
641
"""A function that returns a stack of ContentFilters.
643
The function takes a path (relative to the top of the tree) and a
644
file-id as parameters.
646
:return: None if content filtering is not supported by this tree.
648
if self.supports_content_filtering():
649
return lambda path, file_id: \
650
self._content_filter_stack(path, file_id)
654
def iter_search_rules(self, path_names, pref_names=None,
655
_default_searcher=None):
656
"""Find the preferences for filenames in a tree.
658
:param path_names: an iterable of paths to find attributes for.
659
Paths are given relative to the root of the tree.
660
:param pref_names: the list of preferences to lookup - None for all
661
:param _default_searcher: private parameter to assist testing - don't use
662
:return: an iterator of tuple sequences, one per path-name.
663
See _RulesSearcher.get_items for details on the tuple sequence.
665
if _default_searcher is None:
666
_default_searcher = rules._per_user_searcher
667
searcher = self._get_rules_searcher(_default_searcher)
668
if searcher is not None:
669
if pref_names is not None:
670
for path in path_names:
671
yield searcher.get_selected_items(path, pref_names)
673
for path in path_names:
674
yield searcher.get_items(path)
676
def _get_rules_searcher(self, default_searcher):
677
"""Get the RulesSearcher for this tree given the default one."""
678
searcher = default_searcher
682
class InventoryTree(Tree):
683
"""A tree that relies on an inventory for its metadata.
685
Trees contain an `Inventory` object, and also know how to retrieve
686
file texts mentioned in the inventory, either from a working
687
directory or from a store.
689
It is possible for trees to contain files that are not described
690
in their inventory or vice versa; for this use `filenames()`.
692
Subclasses should set the _inventory attribute, which is considered
693
private to external API users.
696
def get_canonical_inventory_paths(self, paths):
697
"""Like get_canonical_inventory_path() but works on multiple items.
699
:param paths: A sequence of paths relative to the root of the tree.
700
:return: A list of paths, with each item the corresponding input path
701
adjusted to account for existing elements that match case
704
return list(self._yield_canonical_inventory_paths(paths))
706
def get_canonical_inventory_path(self, path):
707
"""Returns the first inventory item that case-insensitively matches path.
709
If a path matches exactly, it is returned. If no path matches exactly
710
but more than one path matches case-insensitively, it is implementation
711
defined which is returned.
713
If no path matches case-insensitively, the input path is returned, but
714
with as many path entries that do exist changed to their canonical
717
If you need to resolve many names from the same tree, you should
718
use get_canonical_inventory_paths() to avoid O(N) behaviour.
720
:param path: A paths relative to the root of the tree.
721
:return: The input path adjusted to account for existing elements
722
that match case insensitively.
724
return self._yield_canonical_inventory_paths([path]).next()
726
def _yield_canonical_inventory_paths(self, paths):
728
# First, if the path as specified exists exactly, just use it.
729
if self.path2id(path) is not None:
733
cur_id = self.get_root_id()
735
bit_iter = iter(path.split("/"))
739
for child in self.iter_children(cur_id):
741
# XXX: it seem like if the child is known to be in the
742
# tree, we shouldn't need to go from its id back to
743
# its path -- mbp 2010-02-11
745
# XXX: it seems like we could be more efficient
746
# by just directly looking up the original name and
747
# only then searching all children; also by not
748
# chopping paths so much. -- mbp 2010-02-11
749
child_base = os.path.basename(self.id2path(child))
750
if (child_base == elt):
751
# if we found an exact match, we can stop now; if
752
# we found an approximate match we need to keep
753
# searching because there might be an exact match
756
new_path = osutils.pathjoin(cur_path, child_base)
758
elif child_base.lower() == lelt:
760
new_path = osutils.pathjoin(cur_path, child_base)
761
except errors.NoSuchId:
762
# before a change is committed we can see this error...
767
# got to the end of this directory and no entries matched.
768
# Return what matched so far, plus the rest as specified.
769
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
774
@deprecated_method(deprecated_in((2, 5, 0)))
775
def _get_inventory(self):
776
return self._inventory
778
inventory = property(_get_inventory,
779
doc="Inventory of this Tree")
781
def _get_root_inventory(self):
782
return self._inventory
784
root_inventory = property(_get_root_inventory,
785
doc="Root inventory of this tree")
787
def _unpack_file_id(self, file_id):
788
"""Find the inventory and inventory file id for a tree file id.
790
:param file_id: The tree file id, as bytestring or tuple
791
:return: Inventory and inventory file id
793
if isinstance(file_id, tuple):
794
if len(file_id) != 1:
795
raise ValueError("nested trees not yet supported: %r" % file_id)
797
return self.root_inventory, file_id
800
def path2id(self, path):
801
"""Return the id for path in this tree."""
802
return self._path2inv_file_id(path)[1]
804
def _path2inv_file_id(self, path):
805
"""Lookup a inventory and inventory file id by path.
807
:param path: Path to look up
808
:return: tuple with inventory and inventory file id
810
# FIXME: Support nested trees
811
return self.root_inventory, self.root_inventory.path2id(path)
813
def id2path(self, file_id):
814
"""Return the path for a file id.
818
inventory, file_id = self._unpack_file_id(file_id)
819
return inventory.id2path(file_id)
821
def has_id(self, file_id):
822
inventory, file_id = self._unpack_file_id(file_id)
823
return inventory.has_id(file_id)
825
def has_or_had_id(self, file_id):
826
inventory, file_id = self._unpack_file_id(file_id)
827
return inventory.has_id(file_id)
829
def all_file_ids(self):
831
[entry.file_id for path, entry in self.iter_entries_by_dir()])
833
@deprecated_method(deprecated_in((2, 4, 0)))
835
return iter(self.all_file_ids())
837
def filter_unversioned_files(self, paths):
838
"""Filter out paths that are versioned.
185
840
:return: set of paths.
187
842
# NB: we specifically *don't* call self.has_filename, because for
188
# WorkingTrees that can indicate files that exist on disk but that
843
# WorkingTrees that can indicate files that exist on disk but that
189
844
# are not versioned.
190
pred = self.inventory.has_filename
191
return set((p for p in paths if not pred(p)))
195
from bzrlib.revisiontree import RevisionTree
198
class EmptyTree(Tree):
201
self._inventory = Inventory()
202
warn('EmptyTree is deprecated as of bzr 0.9 please use '
203
'repository.revision_tree instead.',
204
DeprecationWarning, stacklevel=2)
206
def get_parent_ids(self):
209
def get_symlink_target(self, file_id):
212
def has_filename(self, filename):
215
def kind(self, file_id):
216
assert self._inventory[file_id].kind == "directory"
219
def list_files(self):
222
def __contains__(self, file_id):
223
return file_id in self._inventory
225
def get_file_sha1(self, file_id, path=None):
229
######################################################################
232
# TODO: Merge these two functions into a single one that can operate
233
# on either a whole tree or a set of files.
235
# TODO: Return the diff in order by filename, not by category or in
236
# random order. Can probably be done by lock-stepping through the
237
# filenames from both trees.
240
def file_status(filename, old_tree, new_tree):
241
"""Return single-letter status, old and new names for a file.
243
The complexity here is in deciding how to represent renames;
244
many complex cases are possible.
246
old_inv = old_tree.inventory
247
new_inv = new_tree.inventory
248
new_id = new_inv.path2id(filename)
249
old_id = old_inv.path2id(filename)
251
if not new_id and not old_id:
252
# easy: doesn't exist in either; not versioned at all
253
if new_tree.is_ignored(filename):
254
return 'I', None, None
256
return '?', None, None
258
# There is now a file of this name, great.
261
# There is no longer a file of this name, but we can describe
262
# what happened to the file that used to have
263
# this name. There are two possibilities: either it was
264
# deleted entirely, or renamed.
266
if new_inv.has_id(old_id):
267
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
269
return 'D', old_inv.id2path(old_id), None
271
# if the file_id is new in this revision, it is added
272
if new_id and not old_inv.has_id(new_id):
275
# if there used to be a file of this name, but that ID has now
276
# disappeared, it is deleted
277
if old_id and not new_inv.has_id(old_id):
284
def find_renames(old_inv, new_inv):
285
for file_id in old_inv:
286
if file_id not in new_inv:
288
old_name = old_inv.id2path(file_id)
289
new_name = new_inv.id2path(file_id)
290
if old_name != new_name:
291
yield (old_name, new_name)
845
return set((p for p in paths if self.path2id(p) is None))
848
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
849
"""Walk the tree in 'by_dir' order.
851
This will yield each entry in the tree as a (path, entry) tuple.
852
The order that they are yielded is:
854
See Tree.iter_entries_by_dir for details.
856
:param yield_parents: If True, yield the parents from the root leading
857
down to specific_file_ids that have been requested. This has no
858
impact if specific_file_ids is None.
860
if specific_file_ids is None:
861
inventory_file_ids = None
863
inventory_file_ids = []
864
for tree_file_id in specific_file_ids:
865
inventory, inv_file_id = self._unpack_file_id(tree_file_id)
866
if not inventory is self.root_inventory: # for now
867
raise AssertionError("%r != %r" % (
868
inventory, self.root_inventory))
869
inventory_file_ids.append(inv_file_id)
870
# FIXME: Handle nested trees
871
return self.root_inventory.iter_entries_by_dir(
872
specific_file_ids=inventory_file_ids, yield_parents=yield_parents)
874
@deprecated_method(deprecated_in((2, 5, 0)))
875
def get_file_by_path(self, path):
876
return self.get_file(self.path2id(path), path)
878
def iter_children(self, file_id, path=None):
879
"""See Tree.iter_children."""
880
entry = self.iter_entries_by_dir([file_id]).next()[1]
881
for child in getattr(entry, 'children', {}).itervalues():
294
885
def find_ids_across_trees(filenames, trees, require_versioned=True):
295
886
"""Find the ids corresponding to specified filenames.
297
888
All matches in all trees will be used, and all children of matched
298
889
directories will be used.
300
:param filenames: The filenames to find file_ids for
891
:param filenames: The filenames to find file_ids for (if None, returns
301
893
:param trees: The trees to find file_ids within
302
894
:param require_versioned: if true, all specified filenames must occur in
304
896
:return: a set of file ids for the specified filenames and their children.
306
898
if not filenames:
308
specified_ids = _find_filename_ids_across_trees(filenames, trees,
310
return _find_children_across_trees(specified_ids, trees)
313
def _find_filename_ids_across_trees(filenames, trees, require_versioned):
900
specified_path_ids = _find_ids_across_trees(filenames, trees,
902
return _find_children_across_trees(specified_path_ids, trees)
905
def _find_ids_across_trees(filenames, trees, require_versioned):
314
906
"""Find the ids corresponding to specified filenames.
316
All matches in all trees will be used.
908
All matches in all trees will be used, but subdirectories are not scanned.
318
910
:param filenames: The filenames to find file_ids for
319
911
:param trees: The trees to find file_ids within
320
912
:param require_versioned: if true, all specified filenames must occur in
322
914
:return: a set of file ids for the specified filenames
324
916
not_versioned = []
394
1074
:param require_versioned: An optional boolean (defaults to False). When
395
1075
supplied and True all the 'specific_files' must be versioned, or
396
1076
a PathsNotVersionedError will be thrown.
1077
:param want_unversioned: Scan for unversioned paths.
398
# NB: show_status depends on being able to pass in non-versioned files and
399
# report them as unknown
400
trees = (self.source, self.target)
1079
trees = (self.source,)
401
1080
if extra_trees is not None:
402
1081
trees = trees + tuple(extra_trees)
403
specific_file_ids = find_ids_across_trees(specific_files,
404
trees, require_versioned=require_versioned)
1082
# target is usually the newer tree:
1083
specific_file_ids = self.target.paths2ids(specific_files, trees,
1084
require_versioned=require_versioned)
405
1085
if specific_files and not specific_file_ids:
406
1086
# All files are unversioned, so just return an empty delta
407
1087
# _compare_trees would think we want a complete delta
408
return delta.TreeDelta()
1088
result = delta.TreeDelta()
1089
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1090
result.unversioned = [(path, None,
1091
self.target._comparison_data(fake_entry, path)[0]) for path in
409
1094
return delta._compare_trees(self.source, self.target, want_unchanged,
1095
specific_files, include_root, extra_trees=extra_trees,
1096
require_versioned=require_versioned,
1097
want_unversioned=want_unversioned)
1099
def iter_changes(self, include_unchanged=False,
1100
specific_files=None, pb=None, extra_trees=[],
1101
require_versioned=True, want_unversioned=False):
1102
"""Generate an iterator of changes between trees.
1104
A tuple is returned:
1105
(file_id, (path_in_source, path_in_target),
1106
changed_content, versioned, parent, name, kind,
1109
Changed_content is True if the file's content has changed. This
1110
includes changes to its kind, and to a symlink's target.
1112
versioned, parent, name, kind, executable are tuples of (from, to).
1113
If a file is missing in a tree, its kind is None.
1115
Iteration is done in parent-to-child order, relative to the target
1118
There is no guarantee that all paths are in sorted order: the
1119
requirement to expand the search due to renames may result in children
1120
that should be found early being found late in the search, after
1121
lexically later results have been returned.
1122
:param require_versioned: Raise errors.PathsNotVersionedError if a
1123
path in the specific_files list is not versioned in one of
1124
source, target or extra_trees.
1125
:param specific_files: An optional list of file paths to restrict the
1126
comparison to. When mapping filenames to ids, all matches in all
1127
trees (including optional extra_trees) are used, and all children
1128
of matched directories are included. The parents in the target tree
1129
of the specific files up to and including the root of the tree are
1130
always evaluated for changes too.
1131
:param want_unversioned: Should unversioned files be returned in the
1132
output. An unversioned file is defined as one with (False, False)
1133
for the versioned pair.
1135
lookup_trees = [self.source]
1137
lookup_trees.extend(extra_trees)
1138
# The ids of items we need to examine to insure delta consistency.
1139
precise_file_ids = set()
1140
changed_file_ids = []
1141
if specific_files == []:
1142
specific_file_ids = []
1144
specific_file_ids = self.target.paths2ids(specific_files,
1145
lookup_trees, require_versioned=require_versioned)
1146
if specific_files is not None:
1147
# reparented or added entries must have their parents included
1148
# so that valid deltas can be created. The seen_parents set
1149
# tracks the parents that we need to have.
1150
# The seen_dirs set tracks directory entries we've yielded.
1151
# After outputting version object in to_entries we set difference
1152
# the two seen sets and start checking parents.
1153
seen_parents = set()
1155
if want_unversioned:
1156
all_unversioned = sorted([(p.split('/'), p) for p in
1157
self.target.extras()
1158
if specific_files is None or
1159
osutils.is_inside_any(specific_files, p)])
1160
all_unversioned = collections.deque(all_unversioned)
1162
all_unversioned = collections.deque()
1164
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1165
specific_file_ids=specific_file_ids))
1166
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1167
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1168
specific_file_ids=specific_file_ids))
1169
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1171
# the unversioned path lookup only occurs on real trees - where there
1172
# can be extras. So the fake_entry is solely used to look up
1173
# executable it values when execute is not supported.
1174
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1175
for target_path, target_entry in to_entries_by_dir:
1176
while (all_unversioned and
1177
all_unversioned[0][0] < target_path.split('/')):
1178
unversioned_path = all_unversioned.popleft()
1179
target_kind, target_executable, target_stat = \
1180
self.target._comparison_data(fake_entry, unversioned_path[1])
1181
yield (None, (None, unversioned_path[1]), True, (False, False),
1183
(None, unversioned_path[0][-1]),
1184
(None, target_kind),
1185
(None, target_executable))
1186
source_path, source_entry = from_data.get(target_entry.file_id,
1188
result, changes = self._changes_from_entries(source_entry,
1189
target_entry, source_path=source_path, target_path=target_path)
1190
to_paths[result[0]] = result[1][1]
1195
pb.update('comparing files', entry_count, num_entries)
1196
if changes or include_unchanged:
1197
if specific_file_ids is not None:
1198
new_parent_id = result[4][1]
1199
precise_file_ids.add(new_parent_id)
1200
changed_file_ids.append(result[0])
1202
# Ensure correct behaviour for reparented/added specific files.
1203
if specific_files is not None:
1204
# Record output dirs
1205
if result[6][1] == 'directory':
1206
seen_dirs.add(result[0])
1207
# Record parents of reparented/added entries.
1208
versioned = result[3]
1210
if not versioned[0] or parents[0] != parents[1]:
1211
seen_parents.add(parents[1])
1212
while all_unversioned:
1213
# yield any trailing unversioned paths
1214
unversioned_path = all_unversioned.popleft()
1215
to_kind, to_executable, to_stat = \
1216
self.target._comparison_data(fake_entry, unversioned_path[1])
1217
yield (None, (None, unversioned_path[1]), True, (False, False),
1219
(None, unversioned_path[0][-1]),
1221
(None, to_executable))
1222
# Yield all remaining source paths
1223
for path, from_entry in from_entries_by_dir:
1224
file_id = from_entry.file_id
1225
if file_id in to_paths:
1228
if not self.target.has_id(file_id):
1229
# common case - paths we have not emitted are not present in
1233
to_path = self.target.id2path(file_id)
1236
pb.update('comparing files', entry_count, num_entries)
1237
versioned = (True, False)
1238
parent = (from_entry.parent_id, None)
1239
name = (from_entry.name, None)
1240
from_kind, from_executable, stat_value = \
1241
self.source._comparison_data(from_entry, path)
1242
kind = (from_kind, None)
1243
executable = (from_executable, None)
1244
changed_content = from_kind is not None
1245
# the parent's path is necessarily known at this point.
1246
changed_file_ids.append(file_id)
1247
yield(file_id, (path, to_path), changed_content, versioned, parent,
1248
name, kind, executable)
1249
changed_file_ids = set(changed_file_ids)
1250
if specific_file_ids is not None:
1251
for result in self._handle_precise_ids(precise_file_ids,
1255
def _get_entry(self, tree, file_id):
1256
"""Get an inventory entry from a tree, with missing entries as None.
1258
If the tree raises NotImplementedError on accessing .inventory, then
1259
this is worked around using iter_entries_by_dir on just the file id
1262
:param tree: The tree to lookup the entry in.
1263
:param file_id: The file_id to lookup.
1266
inventory = tree.root_inventory
1267
except NotImplementedError:
1268
# No inventory available.
1270
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1271
return iterator.next()[1]
1272
except StopIteration:
1276
return inventory[file_id]
1277
except errors.NoSuchId:
1280
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1281
discarded_changes=None):
1282
"""Fill out a partial iter_changes to be consistent.
1284
:param precise_file_ids: The file ids of parents that were seen during
1286
:param changed_file_ids: The file ids of already emitted items.
1287
:param discarded_changes: An optional dict of precalculated
1288
iter_changes items which the partial iter_changes had not output
1290
:return: A generator of iter_changes items to output.
1292
# process parents of things that had changed under the users
1293
# requested paths to prevent incorrect paths or parent ids which
1294
# aren't in the tree.
1295
while precise_file_ids:
1296
precise_file_ids.discard(None)
1297
# Don't emit file_ids twice
1298
precise_file_ids.difference_update(changed_file_ids)
1299
if not precise_file_ids:
1301
# If the there was something at a given output path in source, we
1302
# have to include the entry from source in the delta, or we would
1303
# be putting this entry into a used path.
1305
for parent_id in precise_file_ids:
1307
paths.append(self.target.id2path(parent_id))
1308
except errors.NoSuchId:
1309
# This id has been dragged in from the source by delta
1310
# expansion and isn't present in target at all: we don't
1311
# need to check for path collisions on it.
1314
old_id = self.source.path2id(path)
1315
precise_file_ids.add(old_id)
1316
precise_file_ids.discard(None)
1317
current_ids = precise_file_ids
1318
precise_file_ids = set()
1319
# We have to emit all of precise_file_ids that have been altered.
1320
# We may have to output the children of some of those ids if any
1321
# directories have stopped being directories.
1322
for file_id in current_ids:
1324
if discarded_changes:
1325
result = discarded_changes.get(file_id)
1330
old_entry = self._get_entry(self.source, file_id)
1331
new_entry = self._get_entry(self.target, file_id)
1332
result, changes = self._changes_from_entries(
1333
old_entry, new_entry)
1336
# Get this parents parent to examine.
1337
new_parent_id = result[4][1]
1338
precise_file_ids.add(new_parent_id)
1340
if (result[6][0] == 'directory' and
1341
result[6][1] != 'directory'):
1342
# This stopped being a directory, the old children have
1344
if old_entry is None:
1345
# Reusing a discarded change.
1346
old_entry = self._get_entry(self.source, file_id)
1347
for child in self.source.iter_children(file_id):
1348
precise_file_ids.add(child)
1349
changed_file_ids.add(result[0])
1353
def file_content_matches(self, source_file_id, target_file_id,
1354
source_path=None, target_path=None, source_stat=None, target_stat=None):
1355
"""Check if two files are the same in the source and target trees.
1357
This only checks that the contents of the files are the same,
1358
it does not touch anything else.
1360
:param source_file_id: File id of the file in the source tree
1361
:param target_file_id: File id of the file in the target tree
1362
:param source_path: Path of the file in the source tree
1363
:param target_path: Path of the file in the target tree
1364
:param source_stat: Optional stat value of the file in the source tree
1365
:param target_stat: Optional stat value of the file in the target tree
1366
:return: Boolean indicating whether the files have the same contents
1368
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1369
source_file_id, source_path, source_stat)
1370
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1371
target_file_id, target_path, target_stat)
1372
if source_verifier_kind == target_verifier_kind:
1373
return (source_verifier_data == target_verifier_data)
1374
# Fall back to SHA1 for now
1375
if source_verifier_kind != "SHA1":
1376
source_sha1 = self.source.get_file_sha1(source_file_id,
1377
source_path, source_stat)
1379
source_sha1 = source_verifier_data
1380
if target_verifier_kind != "SHA1":
1381
target_sha1 = self.target.get_file_sha1(target_file_id,
1382
target_path, target_stat)
1384
target_sha1 = target_verifier_data
1385
return (source_sha1 == target_sha1)
1387
InterTree.register_optimiser(InterTree)
1390
class MultiWalker(object):
1391
"""Walk multiple trees simultaneously, getting combined results."""
1393
# Note: This could be written to not assume you can do out-of-order
1394
# lookups. Instead any nodes that don't match in all trees could be
1395
# marked as 'deferred', and then returned in the final cleanup loop.
1396
# For now, I think it is "nicer" to return things as close to the
1397
# "master_tree" order as we can.
1399
def __init__(self, master_tree, other_trees):
1400
"""Create a new MultiWalker.
1402
All trees being walked must implement "iter_entries_by_dir()", such
1403
that they yield (path, object) tuples, where that object will have a
1404
'.file_id' member, that can be used to check equality.
1406
:param master_tree: All trees will be 'slaved' to the master_tree such
1407
that nodes in master_tree will be used as 'first-pass' sync points.
1408
Any nodes that aren't in master_tree will be merged in a second
1410
:param other_trees: A list of other trees to walk simultaneously.
1412
self._master_tree = master_tree
1413
self._other_trees = other_trees
1415
# Keep track of any nodes that were properly processed just out of
1416
# order, that way we don't return them at the end, we don't have to
1417
# track *all* processed file_ids, just the out-of-order ones
1418
self._out_of_order_processed = set()
1421
def _step_one(iterator):
1422
"""Step an iter_entries_by_dir iterator.
1424
:return: (has_more, path, ie)
1425
If has_more is False, path and ie will be None.
1428
path, ie = iterator.next()
1429
except StopIteration:
1430
return False, None, None
1432
return True, path, ie
1435
def _cmp_path_by_dirblock(path1, path2):
1436
"""Compare two paths based on what directory they are in.
1438
This generates a sort order, such that all children of a directory are
1439
sorted together, and grandchildren are in the same order as the
1440
children appear. But all grandchildren come after all children.
1442
:param path1: first path
1443
:param path2: the second path
1444
:return: negative number if ``path1`` comes first,
1445
0 if paths are equal
1446
and a positive number if ``path2`` sorts first
1448
# Shortcut this special case
1451
# This is stolen from _dirstate_helpers_py.py, only switching it to
1452
# Unicode objects. Consider using encode_utf8() and then using the
1453
# optimized versions, or maybe writing optimized unicode versions.
1454
if not isinstance(path1, unicode):
1455
raise TypeError("'path1' must be a unicode string, not %s: %r"
1456
% (type(path1), path1))
1457
if not isinstance(path2, unicode):
1458
raise TypeError("'path2' must be a unicode string, not %s: %r"
1459
% (type(path2), path2))
1460
return cmp(MultiWalker._path_to_key(path1),
1461
MultiWalker._path_to_key(path2))
1464
def _path_to_key(path):
1465
dirname, basename = osutils.split(path)
1466
return (dirname.split(u'/'), basename)
1468
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1469
"""Lookup an inventory entry by file_id.
1471
This is called when an entry is missing in the normal order.
1472
Generally this is because a file was either renamed, or it was
1473
deleted/added. If the entry was found in the inventory and not in
1474
extra_entries, it will be added to self._out_of_order_processed
1476
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1477
should be filled with entries that were found before they were
1478
used. If file_id is present, it will be removed from the
1480
:param other_tree: The Tree to search, in case we didn't find the entry
1482
:param file_id: The file_id to look for
1483
:return: (path, ie) if found or (None, None) if not present.
1485
if file_id in extra_entries:
1486
return extra_entries.pop(file_id)
1487
# TODO: Is id2path better as the first call, or is
1488
# inventory[file_id] better as a first check?
1490
cur_path = other_tree.id2path(file_id)
1491
except errors.NoSuchId:
1493
if cur_path is None:
1496
self._out_of_order_processed.add(file_id)
1497
cur_ie = other_tree.root_inventory[file_id]
1498
return (cur_path, cur_ie)
1501
"""Match up the values in the different trees."""
1502
for result in self._walk_master_tree():
1504
self._finish_others()
1505
for result in self._walk_others():
1508
def _walk_master_tree(self):
1509
"""First pass, walk all trees in lock-step.
1511
When we are done, all nodes in the master_tree will have been
1512
processed. _other_walkers, _other_entries, and _others_extra will be
1513
set on 'self' for future processing.
1515
# This iterator has the most "inlining" done, because it tends to touch
1516
# every file in the tree, while the others only hit nodes that don't
1518
master_iterator = self._master_tree.iter_entries_by_dir()
1520
other_walkers = [other.iter_entries_by_dir()
1521
for other in self._other_trees]
1522
other_entries = [self._step_one(walker) for walker in other_walkers]
1523
# Track extra nodes in the other trees
1524
others_extra = [{} for i in xrange(len(self._other_trees))]
1526
master_has_more = True
1527
step_one = self._step_one
1528
lookup_by_file_id = self._lookup_by_file_id
1529
out_of_order_processed = self._out_of_order_processed
1531
while master_has_more:
1532
(master_has_more, path, master_ie) = step_one(master_iterator)
1533
if not master_has_more:
1536
file_id = master_ie.file_id
1538
other_values_append = other_values.append
1539
next_other_entries = []
1540
next_other_entries_append = next_other_entries.append
1541
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1542
if not other_has_more:
1543
other_values_append(lookup_by_file_id(
1544
others_extra[idx], self._other_trees[idx], file_id))
1545
next_other_entries_append((False, None, None))
1546
elif file_id == other_ie.file_id:
1547
# This is the critical code path, as most of the entries
1548
# should match between most trees.
1549
other_values_append((other_path, other_ie))
1550
next_other_entries_append(step_one(other_walkers[idx]))
1552
# This walker did not match, step it until it either
1553
# matches, or we know we are past the current walker.
1554
other_walker = other_walkers[idx]
1555
other_extra = others_extra[idx]
1556
while (other_has_more and
1557
self._cmp_path_by_dirblock(other_path, path) < 0):
1558
other_file_id = other_ie.file_id
1559
if other_file_id not in out_of_order_processed:
1560
other_extra[other_file_id] = (other_path, other_ie)
1561
other_has_more, other_path, other_ie = \
1562
step_one(other_walker)
1563
if other_has_more and other_ie.file_id == file_id:
1564
# We ended up walking to this point, match and step
1566
other_values_append((other_path, other_ie))
1567
other_has_more, other_path, other_ie = \
1568
step_one(other_walker)
1570
# This record isn't in the normal order, see if it
1572
other_values_append(lookup_by_file_id(
1573
other_extra, self._other_trees[idx], file_id))
1574
next_other_entries_append((other_has_more, other_path,
1576
other_entries = next_other_entries
1578
# We've matched all the walkers, yield this datapoint
1579
yield path, file_id, master_ie, other_values
1580
self._other_walkers = other_walkers
1581
self._other_entries = other_entries
1582
self._others_extra = others_extra
1584
def _finish_others(self):
1585
"""Finish walking the other iterators, so we get all entries."""
1586
for idx, info in enumerate(self._other_entries):
1587
other_extra = self._others_extra[idx]
1588
(other_has_more, other_path, other_ie) = info
1589
while other_has_more:
1590
other_file_id = other_ie.file_id
1591
if other_file_id not in self._out_of_order_processed:
1592
other_extra[other_file_id] = (other_path, other_ie)
1593
other_has_more, other_path, other_ie = \
1594
self._step_one(self._other_walkers[idx])
1595
del self._other_entries
1597
def _walk_others(self):
1598
"""Finish up by walking all the 'deferred' nodes."""
1599
# TODO: One alternative would be to grab all possible unprocessed
1600
# file_ids, and then sort by path, and then yield them. That
1601
# might ensure better ordering, in case a caller strictly
1602
# requires parents before children.
1603
for idx, other_extra in enumerate(self._others_extra):
1604
others = sorted(other_extra.itervalues(),
1605
key=lambda x: self._path_to_key(x[0]))
1606
for other_path, other_ie in others:
1607
file_id = other_ie.file_id
1608
# We don't need to check out_of_order_processed here, because
1609
# the lookup_by_file_id will be removing anything processed
1610
# from the extras cache
1611
other_extra.pop(file_id)
1612
other_values = [(None, None) for i in xrange(idx)]
1613
other_values.append((other_path, other_ie))
1614
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1615
alt_idx = alt_idx + idx + 1
1616
alt_extra = self._others_extra[alt_idx]
1617
alt_tree = self._other_trees[alt_idx]
1618
other_values.append(self._lookup_by_file_id(
1619
alt_extra, alt_tree, file_id))
1620
yield other_path, file_id, None, other_values