1
# Copyright (C) 2005-2011 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
"""Tree classes, representing directory at point in time.
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
27
conflicts as _mod_conflicts,
34
revision as _mod_revision,
40
from bzrlib.decorators import needs_read_lock
41
from bzrlib.inter import InterObject
21
import os.path, os, fnmatch
23
from inventory import Inventory
24
from trace import mutter, note
25
from osutils import pumpfile, compare_files, filesize, quotefn, sha_file, \
26
joinpath, splitpath, appendpath, isdir, isfile, file_kind
27
from errors import bailout
29
from stat import S_ISREG, S_ISDIR, ST_MODE, ST_SIZE
45
34
"""Abstract file tree.
47
36
There are several subclasses:
49
38
* `WorkingTree` exists as files on disk editable by the user.
51
40
* `RevisionTree` is a tree as recorded at some point in the past.
44
Trees contain an `Inventory` object, and also know how to retrieve
45
file texts mentioned in the inventory, either from a working
46
directory or from a store.
48
It is possible for trees to contain files that are not described
49
in their inventory or vice versa; for this use `filenames()`.
53
51
Trees can be compared, etc, regardless of whether they are working
54
52
trees or versioned trees.
57
def changes_from(self, other, want_unchanged=False, specific_files=None,
58
extra_trees=None, require_versioned=False, include_root=False,
59
want_unversioned=False):
60
"""Return a TreeDelta of the changes from other to this tree.
62
:param other: A tree to compare with.
63
:param specific_files: An optional list of file paths to restrict the
64
comparison to. When mapping filenames to ids, all matches in all
65
trees (including optional extra_trees) are used, and all children of
66
matched directories are included.
67
:param want_unchanged: An optional boolean requesting the inclusion of
68
unchanged entries in the result.
69
:param extra_trees: An optional list of additional trees to use when
70
mapping the contents of specific_files (paths) to file_ids.
71
:param require_versioned: An optional boolean (defaults to False). When
72
supplied and True all the 'specific_files' must be versioned, or
73
a PathsNotVersionedError will be thrown.
74
:param want_unversioned: Scan for unversioned paths.
76
The comparison will be performed by an InterTree object looked up on
79
# Martin observes that Tree.changes_from returns a TreeDelta and this
80
# may confuse people, because the class name of the returned object is
81
# a synonym of the object referenced in the method name.
82
return InterTree.get(other, self).compare(
83
want_unchanged=want_unchanged,
84
specific_files=specific_files,
85
extra_trees=extra_trees,
86
require_versioned=require_versioned,
87
include_root=include_root,
88
want_unversioned=want_unversioned,
91
def iter_changes(self, from_tree, include_unchanged=False,
92
specific_files=None, pb=None, extra_trees=None,
93
require_versioned=True, want_unversioned=False):
94
"""See InterTree.iter_changes"""
95
intertree = InterTree.get(from_tree, self)
96
return intertree.iter_changes(include_unchanged, specific_files, pb,
97
extra_trees, require_versioned, want_unversioned=want_unversioned)
100
"""Get a list of the conflicts in the tree.
102
Each conflict is an instance of bzrlib.conflicts.Conflict.
104
return _mod_conflicts.ConflictList()
107
"""For trees that can have unversioned files, return all such paths."""
110
def get_parent_ids(self):
111
"""Get the parent ids for this tree.
113
:return: a list of parent ids. [] is returned to indicate
114
a tree with no parents.
115
:raises: BzrError if the parents are not known.
117
raise NotImplementedError(self.get_parent_ids)
119
55
def has_filename(self, filename):
120
56
"""True if the tree has given filename."""
121
raise NotImplementedError(self.has_filename)
57
raise NotImplementedError()
123
59
def has_id(self, file_id):
124
raise NotImplementedError(self.has_id)
126
def __contains__(self, file_id):
127
return self.has_id(file_id)
129
def has_or_had_id(self, file_id):
130
raise NotImplementedError(self.has_or_had_id)
132
def is_ignored(self, filename):
133
"""Check whether the filename is ignored by this tree.
135
:param filename: The relative filename within the tree.
136
:return: True if the filename is ignored.
141
"""Yield all file ids in this tree."""
142
raise NotImplementedError(self.__iter__)
144
def all_file_ids(self):
145
"""Iterate through all file ids, including ids for missing files."""
146
return set(self.inventory)
60
return self.inventory.has_id(file_id)
63
"""Return set of all ids in this tree."""
64
return self.inventory.id_set()
148
66
def id2path(self, file_id):
149
"""Return the path for a file id.
153
raise NotImplementedError(self.id2path)
155
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
156
"""Walk the tree in 'by_dir' order.
158
This will yield each entry in the tree as a (path, entry) tuple.
159
The order that they are yielded is:
161
Directories are walked in a depth-first lexicographical order,
162
however, whenever a directory is reached, all of its direct child
163
nodes are yielded in lexicographical order before yielding the
166
For example, in the tree::
176
The yield order (ignoring root) would be::
177
a, f, a/b, a/d, a/b/c, a/d/e, f/g
179
:param yield_parents: If True, yield the parents from the root leading
180
down to specific_file_ids that have been requested. This has no
181
impact if specific_file_ids is None.
183
raise NotImplementedError(self.iter_entries_by_dir)
185
def list_files(self, include_root=False, from_dir=None, recursive=True):
186
"""List all files in this tree.
188
:param include_root: Whether to include the entry for the tree root
189
:param from_dir: Directory under which to list files
190
:param recursive: Whether to list files recursively
191
:return: iterator over tuples of (path, versioned, kind, file_id,
194
raise NotImplementedError(self.list_files)
196
def iter_references(self):
197
if self.supports_tree_reference():
198
for path, entry in self.iter_entries_by_dir():
199
if entry.kind == 'tree-reference':
200
yield path, entry.file_id
202
def kind(self, file_id):
203
raise NotImplementedError("Tree subclass %s must implement kind"
204
% self.__class__.__name__)
206
def stored_kind(self, file_id):
207
"""File kind stored for this file_id.
209
May not match kind on disk for working trees. Always available
210
for versioned files, even when the file itself is missing.
212
return self.kind(file_id)
214
def path_content_summary(self, path):
215
"""Get a summary of the information about path.
217
All the attributes returned are for the canonical form, not the
218
convenient form (if content filters are in use.)
220
:param path: A relative path within the tree.
221
:return: A tuple containing kind, size, exec, sha1-or-link.
222
Kind is always present (see tree.kind()).
223
size is present if kind is file and the size of the
224
canonical form can be cheaply determined, None otherwise.
225
exec is None unless kind is file and the platform supports the 'x'
227
sha1-or-link is the link target if kind is symlink, or the sha1 if
228
it can be obtained without reading the file.
230
raise NotImplementedError(self.path_content_summary)
232
def get_reference_revision(self, file_id, path=None):
233
raise NotImplementedError("Tree subclass %s must implement "
234
"get_reference_revision"
235
% self.__class__.__name__)
237
def _comparison_data(self, entry, path):
238
"""Return a tuple of kind, executable, stat_value for a file.
240
entry may be None if there is no inventory entry for the file, but
241
path must always be supplied.
243
kind is None if there is no file present (even if an inventory id is
244
present). executable is False for non-file entries.
246
raise NotImplementedError(self._comparison_data)
248
def _file_size(self, entry, stat_value):
249
raise NotImplementedError(self._file_size)
251
def get_file(self, file_id, path=None):
252
"""Return a file object for the file file_id in the tree.
254
If both file_id and path are defined, it is implementation defined as
255
to which one is used.
257
raise NotImplementedError(self.get_file)
259
def get_file_with_stat(self, file_id, path=None):
260
"""Get a file handle and stat object for file_id.
262
The default implementation returns (self.get_file, None) for backwards
265
:param file_id: The file id to read.
266
:param path: The path of the file, if it is known.
267
:return: A tuple (file_handle, stat_value_or_None). If the tree has
268
no stat facility, or need for a stat cache feedback during commit,
269
it may return None for the second element of the tuple.
271
return (self.get_file(file_id, path), None)
273
def get_file_text(self, file_id, path=None):
274
"""Return the byte content of a file.
276
:param file_id: The file_id of the file.
277
:param path: The path of the file.
278
If both file_id and path are supplied, an implementation may use
281
my_file = self.get_file(file_id, path)
283
return my_file.read()
287
def get_file_lines(self, file_id, path=None):
288
"""Return the content of a file, as lines.
290
:param file_id: The file_id of the file.
291
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
return osutils.split_lines(self.get_file_text(file_id, path))
297
def get_file_sha1(self, file_id, path=None):
298
"""Return the SHA1 file for a file.
300
:param file_id: The handle for this file.
301
:param path: The path that this file can be found at.
302
These must point to the same object.
304
raise NotImplementedError(self.get_file_sha1)
306
def get_file_mtime(self, file_id, path=None):
307
"""Return the modification time for a file.
309
:param file_id: The handle for this file.
310
:param path: The path that this file can be found at.
311
These must point to the same object.
313
raise NotImplementedError(self.get_file_mtime)
315
def get_file_size(self, file_id):
316
"""Return the size of a file in bytes.
318
This applies only to regular files. If invoked on directories or
319
symlinks, it will return None.
320
:param file_id: The file-id of the file
322
raise NotImplementedError(self.get_file_size)
324
def get_file_by_path(self, path):
325
raise NotImplementedError(self.get_file_by_path)
327
def is_executable(self, file_id, path=None):
328
"""Check if a file is executable.
330
:param file_id: The handle for this file.
331
:param path: The path that this file can be found at.
332
These must point to the same object.
334
raise NotImplementedError(self.is_executable)
336
def iter_files_bytes(self, desired_files):
337
"""Iterate through file contents.
339
Files will not necessarily be returned in the order they occur in
340
desired_files. No specific order is guaranteed.
342
Yields pairs of identifier, bytes_iterator. identifier is an opaque
343
value supplied by the caller as part of desired_files. It should
344
uniquely identify the file version in the caller's context. (Examples:
345
an index number or a TreeTransform trans_id.)
347
bytes_iterator is an iterable of bytestrings for the file. The
348
kind of iterable and length of the bytestrings are unspecified, but for
349
this implementation, it is a tuple containing a single bytestring with
350
the complete text of the file.
352
:param desired_files: a list of (file_id, identifier) pairs
354
for file_id, identifier in desired_files:
355
# We wrap the string in a tuple so that we can return an iterable
356
# of bytestrings. (Technically, a bytestring is also an iterable
357
# of bytestrings, but iterating through each character is not
359
cur_file = (self.get_file_text(file_id),)
360
yield identifier, cur_file
362
def get_symlink_target(self, file_id):
363
"""Get the target for a given file_id.
365
It is assumed that the caller already knows that file_id is referencing
367
:param file_id: Handle for the symlink entry.
368
:return: The path the symlink points to.
370
raise NotImplementedError(self.get_symlink_target)
373
def get_root_id(self):
374
"""Return the file_id for the root of this tree."""
375
raise NotImplementedError(self.get_root_id)
377
def annotate_iter(self, file_id,
378
default_revision=_mod_revision.CURRENT_REVISION):
379
"""Return an iterator of revision_id, line tuples.
381
For working trees (and mutable trees in general), the special
382
revision_id 'current:' will be used for lines that are new in this
383
tree, e.g. uncommitted changes.
384
:param file_id: The file to produce an annotated version from
385
:param default_revision: For lines that don't match a basis, mark them
386
with this revision id. Not all implementations will make use of
389
raise NotImplementedError(self.annotate_iter)
391
def _get_plan_merge_data(self, file_id, other, base):
392
from bzrlib import versionedfile
393
vf = versionedfile._PlanMergeVersionedFile(file_id)
394
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
395
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
397
last_revision_base = None
399
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
400
return vf, last_revision_a, last_revision_b, last_revision_base
402
def plan_file_merge(self, file_id, other, base=None):
403
"""Generate a merge plan based on annotations.
405
If the file contains uncommitted changes in this tree, they will be
406
attributed to the 'current:' pseudo-revision. If the file contains
407
uncommitted changes in the other tree, they will be assigned to the
408
'other:' pseudo-revision.
410
data = self._get_plan_merge_data(file_id, other, base)
411
vf, last_revision_a, last_revision_b, last_revision_base = data
412
return vf.plan_merge(last_revision_a, last_revision_b,
415
def plan_file_lca_merge(self, file_id, other, base=None):
416
"""Generate a merge plan based lca-newness.
418
If the file contains uncommitted changes in this tree, they will be
419
attributed to the 'current:' pseudo-revision. If the file contains
420
uncommitted changes in the other tree, they will be assigned to the
421
'other:' pseudo-revision.
423
data = self._get_plan_merge_data(file_id, other, base)
424
vf, last_revision_a, last_revision_b, last_revision_base = data
425
return vf.plan_lca_merge(last_revision_a, last_revision_b,
428
def _iter_parent_trees(self):
429
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
430
for revision_id in self.get_parent_ids():
432
yield self.revision_tree(revision_id)
433
except errors.NoSuchRevisionInTree:
434
yield self.repository.revision_tree(revision_id)
436
def _get_file_revision(self, file_id, vf, tree_revision):
437
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
439
if getattr(self, '_repository', None) is None:
440
last_revision = tree_revision
441
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
442
self._iter_parent_trees()]
443
vf.add_lines((file_id, last_revision), parent_keys,
444
self.get_file_lines(file_id))
445
repo = self.branch.repository
448
last_revision = self.get_file_revision(file_id)
449
base_vf = self._repository.texts
450
if base_vf not in vf.fallback_versionedfiles:
451
vf.fallback_versionedfiles.append(base_vf)
67
return self.inventory.id2path(file_id)
69
def _get_inventory(self):
70
return self._inventory
72
inventory = property(_get_inventory,
73
doc="Inventory of this Tree")
454
75
def _check_retrieved(self, ie, f):
457
fp = osutils.fingerprint_file(f)
76
# TODO: Test this check by damaging the store?
460
77
if ie.text_size is not None:
461
if ie.text_size != fp['size']:
462
raise errors.BzrError(
463
"mismatched size for file %r in %r" %
464
(ie.file_id, self._store),
79
if fs != ie.text_size:
80
bailout("mismatched size for file %r in %r" % (ie.file_id, self._store),
465
81
["inventory expects %d bytes" % ie.text_size,
466
"file is actually %d bytes" % fp['size'],
82
"file is actually %d bytes" % fs,
467
83
"store is probably damaged/corrupt"])
469
if ie.text_sha1 != fp['sha1']:
470
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
471
(ie.file_id, self._store),
87
if ie.text_sha1 != f_hash:
88
bailout("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
472
89
["inventory expects %s" % ie.text_sha1,
473
"file is actually %s" % fp['sha1'],
90
"file is actually %s" % f_hash,
474
91
"store is probably damaged/corrupt"])
476
def path2id(self, path):
477
"""Return the id for path in this tree."""
478
raise NotImplementedError(self.path2id)
480
def paths2ids(self, paths, trees=[], require_versioned=True):
481
"""Return all the ids that can be reached by walking from paths.
483
Each path is looked up in this tree and any extras provided in
484
trees, and this is repeated recursively: the children in an extra tree
485
of a directory that has been renamed under a provided path in this tree
486
are all returned, even if none exist under a provided path in this
487
tree, and vice versa.
489
:param paths: An iterable of paths to start converting to ids from.
490
Alternatively, if paths is None, no ids should be calculated and None
491
will be returned. This is offered to make calling the api unconditional
492
for code that *might* take a list of files.
493
:param trees: Additional trees to consider.
494
:param require_versioned: If False, do not raise NotVersionedError if
495
an element of paths is not versioned in this tree and all of trees.
497
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
499
def iter_children(self, file_id):
500
entry = self.iter_entries_by_dir([file_id]).next()[1]
501
for child in getattr(entry, 'children', {}).itervalues():
505
"""Lock this tree for multiple read only operations.
507
:return: A bzrlib.lock.LogicalLockResult.
511
def revision_tree(self, revision_id):
512
"""Obtain a revision tree for the revision revision_id.
514
The intention of this method is to allow access to possibly cached
515
tree data. Implementors of this method should raise NoSuchRevision if
516
the tree is not locally available, even if they could obtain the
517
tree via a repository or some other means. Callers are responsible
518
for finding the ultimate source for a revision tree.
520
:param revision_id: The revision_id of the requested tree.
522
:raises: NoSuchRevision if the tree cannot be obtained.
524
raise errors.NoSuchRevisionInTree(self, revision_id)
527
"""What files are present in this tree and unknown.
529
:return: an iterator over the unknown files.
536
def filter_unversioned_files(self, paths):
537
"""Filter out paths that are versioned.
539
:return: set of paths.
541
raise NotImplementedError(self.filter_unversioned_files)
543
def walkdirs(self, prefix=""):
544
"""Walk the contents of this tree from path down.
546
This yields all the data about the contents of a directory at a time.
547
After each directory has been yielded, if the caller has mutated the
548
list to exclude some directories, they are then not descended into.
550
The data yielded is of the form:
551
((directory-relpath, directory-path-from-root, directory-fileid),
552
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
553
versioned_kind), ...]),
554
- directory-relpath is the containing dirs relpath from prefix
555
- directory-path-from-root is the containing dirs path from /
556
- directory-fileid is the id of the directory if it is versioned.
557
- relpath is the relative path within the subtree being walked.
558
- basename is the basename
559
- kind is the kind of the file now. If unknonwn then the file is not
560
present within the tree - but it may be recorded as versioned. See
562
- lstat is the stat data *if* the file was statted.
563
- path_from_tree_root is the path from the root of the tree.
564
- file_id is the file_id if the entry is versioned.
565
- versioned_kind is the kind of the file as last recorded in the
566
versioning system. If 'unknown' the file is not versioned.
567
One of 'kind' and 'versioned_kind' must not be 'unknown'.
569
:param prefix: Start walking from prefix within the tree rather than
570
at the root. This allows one to walk a subtree but get paths that are
571
relative to a tree rooted higher up.
572
:return: an iterator over the directory data.
574
raise NotImplementedError(self.walkdirs)
576
def supports_content_filtering(self):
579
def _content_filter_stack(self, path=None, file_id=None):
580
"""The stack of content filters for a path if filtering is supported.
582
Readers will be applied in first-to-last order.
583
Writers will be applied in last-to-first order.
584
Either the path or the file-id needs to be provided.
586
:param path: path relative to the root of the tree
588
:param file_id: file_id or None if unknown
589
:return: the list of filters - [] if there are none
591
filter_pref_names = filters._get_registered_names()
592
if len(filter_pref_names) == 0:
595
path = self.id2path(file_id)
596
prefs = self.iter_search_rules([path], filter_pref_names).next()
597
stk = filters._get_filter_stack_for(prefs)
598
if 'filters' in debug.debug_flags:
599
trace.note("*** %s content-filter: %s => %r" % (path,prefs,stk))
602
def _content_filter_stack_provider(self):
603
"""A function that returns a stack of ContentFilters.
605
The function takes a path (relative to the top of the tree) and a
606
file-id as parameters.
608
:return: None if content filtering is not supported by this tree.
610
if self.supports_content_filtering():
611
return lambda path, file_id: \
612
self._content_filter_stack(path, file_id)
616
def iter_search_rules(self, path_names, pref_names=None,
617
_default_searcher=None):
618
"""Find the preferences for filenames in a tree.
620
:param path_names: an iterable of paths to find attributes for.
621
Paths are given relative to the root of the tree.
622
:param pref_names: the list of preferences to lookup - None for all
623
:param _default_searcher: private parameter to assist testing - don't use
624
:return: an iterator of tuple sequences, one per path-name.
625
See _RulesSearcher.get_items for details on the tuple sequence.
627
if _default_searcher is None:
628
_default_searcher = rules._per_user_searcher
629
searcher = self._get_rules_searcher(_default_searcher)
630
if searcher is not None:
631
if pref_names is not None:
632
for path in path_names:
633
yield searcher.get_selected_items(path, pref_names)
94
def export(self, dest):
95
"""Export this tree to a new directory.
97
`dest` should not exist, and will be created holding the
98
contents of this tree.
100
:todo: To handle subdirectories we need to create the
103
:note: If the export fails, the destination directory will be
104
left in a half-assed state.
107
mutter('export version %r' % self)
109
for dp, ie in inv.iter_entries():
111
fullpath = appendpath(dest, dp)
112
if kind == 'directory':
115
pumpfile(self.get_file(ie.file_id), file(fullpath, 'wb'))
635
for path in path_names:
636
yield searcher.get_items(path)
638
def _get_rules_searcher(self, default_searcher):
639
"""Get the RulesSearcher for this tree given the default one."""
640
searcher = default_searcher
644
class InventoryTree(Tree):
645
"""A tree that relies on an inventory for its metadata.
647
Trees contain an `Inventory` object, and also know how to retrieve
648
file texts mentioned in the inventory, either from a working
649
directory or from a store.
651
It is possible for trees to contain files that are not described
652
in their inventory or vice versa; for this use `filenames()`.
654
Subclasses should set the _inventory attribute, which is considered
655
private to external API users.
117
bailout("don't know how to export {%s} of kind %r", fid, kind)
118
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
122
class WorkingTree(Tree):
123
"""Working copy tree.
125
The inventory is held in the `Branch` working-inventory, and the
126
files are in a directory on disk.
128
It is possible for a `WorkingTree` to have a filename which is
129
not listed in the Inventory and vice versa.
658
def get_canonical_inventory_paths(self, paths):
659
"""Like get_canonical_inventory_path() but works on multiple items.
661
:param paths: A sequence of paths relative to the root of the tree.
662
:return: A list of paths, with each item the corresponding input path
663
adjusted to account for existing elements that match case
666
return list(self._yield_canonical_inventory_paths(paths))
668
def get_canonical_inventory_path(self, path):
669
"""Returns the first inventory item that case-insensitively matches path.
671
If a path matches exactly, it is returned. If no path matches exactly
672
but more than one path matches case-insensitively, it is implementation
673
defined which is returned.
675
If no path matches case-insensitively, the input path is returned, but
676
with as many path entries that do exist changed to their canonical
679
If you need to resolve many names from the same tree, you should
680
use get_canonical_inventory_paths() to avoid O(N) behaviour.
682
:param path: A paths relative to the root of the tree.
683
:return: The input path adjusted to account for existing elements
684
that match case insensitively.
686
return self._yield_canonical_inventory_paths([path]).next()
688
def _yield_canonical_inventory_paths(self, paths):
690
# First, if the path as specified exists exactly, just use it.
691
if self.path2id(path) is not None:
695
cur_id = self.get_root_id()
697
bit_iter = iter(path.split("/"))
701
for child in self.iter_children(cur_id):
703
# XXX: it seem like if the child is known to be in the
704
# tree, we shouldn't need to go from its id back to
705
# its path -- mbp 2010-02-11
707
# XXX: it seems like we could be more efficient
708
# by just directly looking up the original name and
709
# only then searching all children; also by not
710
# chopping paths so much. -- mbp 2010-02-11
711
child_base = os.path.basename(self.id2path(child))
712
if (child_base == elt):
713
# if we found an exact match, we can stop now; if
714
# we found an approximate match we need to keep
715
# searching because there might be an exact match
718
new_path = osutils.pathjoin(cur_path, child_base)
720
elif child_base.lower() == lelt:
722
new_path = osutils.pathjoin(cur_path, child_base)
723
except errors.NoSuchId:
724
# before a change is committed we can see this error...
131
def __init__(self, basedir, inv):
132
self._inventory = inv
133
self.basedir = basedir
134
self.path2id = inv.path2id
137
return "<%s of %s>" % (self.__class__.__name__,
140
def abspath(self, filename):
141
return os.path.join(self.basedir, filename)
143
def has_filename(self, filename):
144
return os.path.exists(self.abspath(filename))
146
def get_file(self, file_id):
147
return self.get_file_byname(self.id2path(file_id))
149
def get_file_byname(self, filename):
150
return file(self.abspath(filename), 'rb')
152
def _get_store_filename(self, file_id):
153
return self.abspath(self.id2path(file_id))
155
def has_id(self, file_id):
156
# files that have been deleted are excluded
157
if not self.inventory.has_id(file_id):
159
return os.access(self.abspath(self.inventory.id2path(file_id)), os.F_OK)
161
def get_file_size(self, file_id):
162
return os.stat(self._get_store_filename(file_id))[ST_SIZE]
164
def get_file_sha1(self, file_id):
165
f = self.get_file(file_id)
169
def file_class(self, filename):
170
if self.path2id(filename):
172
elif self.is_ignored(filename):
178
def file_kind(self, filename):
179
if isfile(self.abspath(filename)):
181
elif isdir(self.abspath(filename)):
187
def list_files(self):
188
"""Recursively list all files as (path, class, kind, id).
190
Lists, but does not descend into unversioned directories.
192
This does not include files that have been deleted in this
195
Skips the control directory.
199
def descend(from_dir, from_dir_id, dp):
203
if bzrlib.BZRDIR == f:
207
fp = appendpath(from_dir, f)
210
fap = appendpath(dp, f)
212
f_ie = inv.get_child(from_dir_id, f)
215
elif self.is_ignored(fp):
729
# got to the end of this directory and no entries matched.
730
# Return what matched so far, plus the rest as specified.
731
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
736
def _get_inventory(self):
737
return self._inventory
739
inventory = property(_get_inventory,
740
doc="Inventory of this Tree")
743
def path2id(self, path):
744
"""Return the id for path in this tree."""
745
return self._inventory.path2id(path)
747
def id2path(self, file_id):
748
"""Return the path for a file id.
752
return self.inventory.id2path(file_id)
754
def has_id(self, file_id):
755
return self.inventory.has_id(file_id)
757
def has_or_had_id(self, file_id):
758
return self.inventory.has_id(file_id)
761
return iter(self.inventory)
763
def filter_unversioned_files(self, paths):
764
"""Filter out paths that are versioned.
766
:return: set of paths.
768
# NB: we specifically *don't* call self.has_filename, because for
769
# WorkingTrees that can indicate files that exist on disk but that
771
pred = self.inventory.has_filename
772
return set((p for p in paths if not pred(p)))
775
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
776
"""Walk the tree in 'by_dir' order.
778
This will yield each entry in the tree as a (path, entry) tuple.
779
The order that they are yielded is:
781
See Tree.iter_entries_by_dir for details.
783
:param yield_parents: If True, yield the parents from the root leading
784
down to specific_file_ids that have been requested. This has no
785
impact if specific_file_ids is None.
787
return self.inventory.iter_entries_by_dir(
788
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
790
def get_file_by_path(self, path):
791
return self.get_file(self._inventory.path2id(path), path)
224
bailout("file %r entered as kind %r id %r, now of kind %r"
225
% (fap, f_ie.kind, f_ie.file_id, fk))
227
yield fp, c, fk, (f_ie and f_ie.file_id)
229
if fk != 'directory':
233
# don't descend unversioned directories
236
for ff in descend(fp, f_ie.file_id, fap):
239
for f in descend('', None, self.basedir):
244
def unknowns(self, path='', dir_id=None):
245
"""Yield names of unknown files in this WorkingTree.
247
If there are any unknown directories then only the directory is
248
returned, not all its children. But if there are unknown files
249
under a versioned subdirectory, they are returned.
251
Currently returned depth-first, sorted by name within directories.
253
for fpath, fclass, fkind, fid in self.list_files():
258
def ignored_files(self):
259
for fpath, fclass, fkind, fid in self.list_files():
264
def get_ignore_list(self):
265
"""Return list of ignore patterns."""
266
if self.has_filename(bzrlib.IGNORE_FILENAME):
267
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
268
return [line.rstrip("\n\r") for line in f.readlines()]
270
return bzrlib.DEFAULT_IGNORE
273
def is_ignored(self, filename):
274
"""Check whether the filename matches an ignore pattern.
276
Patterns containing '/' need to match the whole path; others
277
match against only the last component."""
278
## TODO: Take them from a file, not hardcoded
279
## TODO: Use extended zsh-style globs maybe?
280
## TODO: Use '**' to match directories?
281
for pat in self.get_ignore_list():
283
if fnmatch.fnmatchcase(filename, pat):
286
if fnmatch.fnmatchcase(splitpath(filename)[-1], pat):
294
class RevisionTree(Tree):
295
"""Tree viewing a previous revision.
297
File text can be retrieved from the text store.
299
:todo: Some kind of `__repr__` method, but a good one
300
probably means knowing the branch and revision number,
301
or at least passing a description to the constructor.
304
def __init__(self, store, inv):
306
self._inventory = inv
308
def get_file(self, file_id):
309
ie = self._inventory[file_id]
310
f = self._store[ie.text_id]
311
mutter(" get fileid{%s} from %r" % (file_id, self))
313
if ie.text_size is None:
314
note("warning: no text size recorded on %r" % ie)
315
self._check_retrieved(ie, f)
318
def get_file_size(self, file_id):
319
return self._inventory[file_id].text_size
321
def get_file_sha1(self, file_id):
322
ie = self._inventory[file_id]
325
def has_filename(self, filename):
326
return bool(self.inventory.path2id(filename))
328
def list_files(self):
329
# The only files returned by this are those from the version
330
for path, entry in self.inventory.iter_entries():
331
yield path, 'V', entry.kind, entry.file_id
334
class EmptyTree(Tree):
336
self._inventory = Inventory()
338
def has_filename(self, filename):
341
def list_files(self):
342
if False: # just to make it a generator
794
347
######################################################################
847
def find_ids_across_trees(filenames, trees, require_versioned=True):
848
"""Find the ids corresponding to specified filenames.
850
All matches in all trees will be used, and all children of matched
851
directories will be used.
853
:param filenames: The filenames to find file_ids for (if None, returns
855
:param trees: The trees to find file_ids within
856
:param require_versioned: if true, all specified filenames must occur in
858
:return: a set of file ids for the specified filenames and their children.
862
specified_path_ids = _find_ids_across_trees(filenames, trees,
864
return _find_children_across_trees(specified_path_ids, trees)
867
def _find_ids_across_trees(filenames, trees, require_versioned):
868
"""Find the ids corresponding to specified filenames.
870
All matches in all trees will be used, but subdirectories are not scanned.
872
:param filenames: The filenames to find file_ids for
873
:param trees: The trees to find file_ids within
874
:param require_versioned: if true, all specified filenames must occur in
876
:return: a set of file ids for the specified filenames
879
interesting_ids = set()
880
for tree_path in filenames:
883
file_id = tree.path2id(tree_path)
884
if file_id is not None:
885
interesting_ids.add(file_id)
888
not_versioned.append(tree_path)
889
if len(not_versioned) > 0 and require_versioned:
890
raise errors.PathsNotVersionedError(not_versioned)
891
return interesting_ids
894
def _find_children_across_trees(specified_ids, trees):
895
"""Return a set including specified ids and their children.
897
All matches in all trees will be used.
899
:param trees: The trees to find file_ids within
900
:return: a set containing all specified ids and their children
902
interesting_ids = set(specified_ids)
903
pending = interesting_ids
904
# now handle children of interesting ids
905
# we loop so that we handle all children of each id in both trees
906
while len(pending) > 0:
908
for file_id in pending:
910
if not tree.has_or_had_id(file_id):
912
for child_id in tree.iter_children(file_id):
913
if child_id not in interesting_ids:
914
new_pending.add(child_id)
915
interesting_ids.update(new_pending)
916
pending = new_pending
917
return interesting_ids
920
class InterTree(InterObject):
921
"""This class represents operations taking place between two Trees.
923
Its instances have methods like 'compare' and contain references to the
924
source and target trees these operations are to be carried out on.
926
Clients of bzrlib should not need to use InterTree directly, rather they
927
should use the convenience methods on Tree such as 'Tree.compare()' which
928
will pass through to InterTree as appropriate.
931
# Formats that will be used to test this InterTree. If both are
932
# None, this InterTree will not be tested (e.g. because a complex
934
_matching_from_tree_format = None
935
_matching_to_tree_format = None
939
def _changes_from_entries(self, source_entry, target_entry,
940
source_path=None, target_path=None):
941
"""Generate a iter_changes tuple between source_entry and target_entry.
943
:param source_entry: An inventory entry from self.source, or None.
944
:param target_entry: An inventory entry from self.target, or None.
945
:param source_path: The path of source_entry, if known. If not known
946
it will be looked up.
947
:param target_path: The path of target_entry, if known. If not known
948
it will be looked up.
949
:return: A tuple, item 0 of which is an iter_changes result tuple, and
950
item 1 is True if there are any changes in the result tuple.
952
if source_entry is None:
953
if target_entry is None:
955
file_id = target_entry.file_id
957
file_id = source_entry.file_id
958
if source_entry is not None:
959
source_versioned = True
960
source_name = source_entry.name
961
source_parent = source_entry.parent_id
962
if source_path is None:
963
source_path = self.source.id2path(file_id)
964
source_kind, source_executable, source_stat = \
965
self.source._comparison_data(source_entry, source_path)
967
source_versioned = False
971
source_executable = None
972
if target_entry is not None:
973
target_versioned = True
974
target_name = target_entry.name
975
target_parent = target_entry.parent_id
976
if target_path is None:
977
target_path = self.target.id2path(file_id)
978
target_kind, target_executable, target_stat = \
979
self.target._comparison_data(target_entry, target_path)
981
target_versioned = False
985
target_executable = None
986
versioned = (source_versioned, target_versioned)
987
kind = (source_kind, target_kind)
988
changed_content = False
989
if source_kind != target_kind:
990
changed_content = True
991
elif source_kind == 'file':
992
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
993
self.target.get_file_sha1(file_id, target_path, target_stat)):
994
changed_content = True
995
elif source_kind == 'symlink':
996
if (self.source.get_symlink_target(file_id) !=
997
self.target.get_symlink_target(file_id)):
998
changed_content = True
999
# XXX: Yes, the indentation below is wrong. But fixing it broke
1000
# test_merge.TestMergerEntriesLCAOnDisk.
1001
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
1002
# the fix from bzr.dev -- vila 2009026
1003
elif source_kind == 'tree-reference':
1004
if (self.source.get_reference_revision(file_id, source_path)
1005
!= self.target.get_reference_revision(file_id, target_path)):
1006
changed_content = True
1007
parent = (source_parent, target_parent)
1008
name = (source_name, target_name)
1009
executable = (source_executable, target_executable)
1010
if (changed_content is not False or versioned[0] != versioned[1]
1011
or parent[0] != parent[1] or name[0] != name[1] or
1012
executable[0] != executable[1]):
1016
return (file_id, (source_path, target_path), changed_content,
1017
versioned, parent, name, kind, executable), changes
1020
def compare(self, want_unchanged=False, specific_files=None,
1021
extra_trees=None, require_versioned=False, include_root=False,
1022
want_unversioned=False):
1023
"""Return the changes from source to target.
1025
:return: A TreeDelta.
1026
:param specific_files: An optional list of file paths to restrict the
1027
comparison to. When mapping filenames to ids, all matches in all
1028
trees (including optional extra_trees) are used, and all children of
1029
matched directories are included.
1030
:param want_unchanged: An optional boolean requesting the inclusion of
1031
unchanged entries in the result.
1032
:param extra_trees: An optional list of additional trees to use when
1033
mapping the contents of specific_files (paths) to file_ids.
1034
:param require_versioned: An optional boolean (defaults to False). When
1035
supplied and True all the 'specific_files' must be versioned, or
1036
a PathsNotVersionedError will be thrown.
1037
:param want_unversioned: Scan for unversioned paths.
1039
trees = (self.source,)
1040
if extra_trees is not None:
1041
trees = trees + tuple(extra_trees)
1042
# target is usually the newer tree:
1043
specific_file_ids = self.target.paths2ids(specific_files, trees,
1044
require_versioned=require_versioned)
1045
if specific_files and not specific_file_ids:
1046
# All files are unversioned, so just return an empty delta
1047
# _compare_trees would think we want a complete delta
1048
result = delta.TreeDelta()
1049
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1050
result.unversioned = [(path, None,
1051
self.target._comparison_data(fake_entry, path)[0]) for path in
1054
return delta._compare_trees(self.source, self.target, want_unchanged,
1055
specific_files, include_root, extra_trees=extra_trees,
1056
require_versioned=require_versioned,
1057
want_unversioned=want_unversioned)
1059
def iter_changes(self, include_unchanged=False,
1060
specific_files=None, pb=None, extra_trees=[],
1061
require_versioned=True, want_unversioned=False):
1062
"""Generate an iterator of changes between trees.
1064
A tuple is returned:
1065
(file_id, (path_in_source, path_in_target),
1066
changed_content, versioned, parent, name, kind,
1069
Changed_content is True if the file's content has changed. This
1070
includes changes to its kind, and to a symlink's target.
1072
versioned, parent, name, kind, executable are tuples of (from, to).
1073
If a file is missing in a tree, its kind is None.
1075
Iteration is done in parent-to-child order, relative to the target
1078
There is no guarantee that all paths are in sorted order: the
1079
requirement to expand the search due to renames may result in children
1080
that should be found early being found late in the search, after
1081
lexically later results have been returned.
1082
:param require_versioned: Raise errors.PathsNotVersionedError if a
1083
path in the specific_files list is not versioned in one of
1084
source, target or extra_trees.
1085
:param specific_files: An optional list of file paths to restrict the
1086
comparison to. When mapping filenames to ids, all matches in all
1087
trees (including optional extra_trees) are used, and all children
1088
of matched directories are included. The parents in the target tree
1089
of the specific files up to and including the root of the tree are
1090
always evaluated for changes too.
1091
:param want_unversioned: Should unversioned files be returned in the
1092
output. An unversioned file is defined as one with (False, False)
1093
for the versioned pair.
1095
lookup_trees = [self.source]
1097
lookup_trees.extend(extra_trees)
1098
# The ids of items we need to examine to insure delta consistency.
1099
precise_file_ids = set()
1100
changed_file_ids = []
1101
if specific_files == []:
1102
specific_file_ids = []
1104
specific_file_ids = self.target.paths2ids(specific_files,
1105
lookup_trees, require_versioned=require_versioned)
1106
if specific_files is not None:
1107
# reparented or added entries must have their parents included
1108
# so that valid deltas can be created. The seen_parents set
1109
# tracks the parents that we need to have.
1110
# The seen_dirs set tracks directory entries we've yielded.
1111
# After outputting version object in to_entries we set difference
1112
# the two seen sets and start checking parents.
1113
seen_parents = set()
1115
if want_unversioned:
1116
all_unversioned = sorted([(p.split('/'), p) for p in
1117
self.target.extras()
1118
if specific_files is None or
1119
osutils.is_inside_any(specific_files, p)])
1120
all_unversioned = collections.deque(all_unversioned)
1122
all_unversioned = collections.deque()
1124
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1125
specific_file_ids=specific_file_ids))
1126
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1127
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1128
specific_file_ids=specific_file_ids))
1129
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1131
# the unversioned path lookup only occurs on real trees - where there
1132
# can be extras. So the fake_entry is solely used to look up
1133
# executable it values when execute is not supported.
1134
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1135
for target_path, target_entry in to_entries_by_dir:
1136
while (all_unversioned and
1137
all_unversioned[0][0] < target_path.split('/')):
1138
unversioned_path = all_unversioned.popleft()
1139
target_kind, target_executable, target_stat = \
1140
self.target._comparison_data(fake_entry, unversioned_path[1])
1141
yield (None, (None, unversioned_path[1]), True, (False, False),
1143
(None, unversioned_path[0][-1]),
1144
(None, target_kind),
1145
(None, target_executable))
1146
source_path, source_entry = from_data.get(target_entry.file_id,
1148
result, changes = self._changes_from_entries(source_entry,
1149
target_entry, source_path=source_path, target_path=target_path)
1150
to_paths[result[0]] = result[1][1]
1155
pb.update('comparing files', entry_count, num_entries)
1156
if changes or include_unchanged:
1157
if specific_file_ids is not None:
1158
new_parent_id = result[4][1]
1159
precise_file_ids.add(new_parent_id)
1160
changed_file_ids.append(result[0])
1162
# Ensure correct behaviour for reparented/added specific files.
1163
if specific_files is not None:
1164
# Record output dirs
1165
if result[6][1] == 'directory':
1166
seen_dirs.add(result[0])
1167
# Record parents of reparented/added entries.
1168
versioned = result[3]
1170
if not versioned[0] or parents[0] != parents[1]:
1171
seen_parents.add(parents[1])
1172
while all_unversioned:
1173
# yield any trailing unversioned paths
1174
unversioned_path = all_unversioned.popleft()
1175
to_kind, to_executable, to_stat = \
1176
self.target._comparison_data(fake_entry, unversioned_path[1])
1177
yield (None, (None, unversioned_path[1]), True, (False, False),
1179
(None, unversioned_path[0][-1]),
1181
(None, to_executable))
1182
# Yield all remaining source paths
1183
for path, from_entry in from_entries_by_dir:
1184
file_id = from_entry.file_id
1185
if file_id in to_paths:
1188
if not self.target.has_id(file_id):
1189
# common case - paths we have not emitted are not present in
1193
to_path = self.target.id2path(file_id)
1196
pb.update('comparing files', entry_count, num_entries)
1197
versioned = (True, False)
1198
parent = (from_entry.parent_id, None)
1199
name = (from_entry.name, None)
1200
from_kind, from_executable, stat_value = \
1201
self.source._comparison_data(from_entry, path)
1202
kind = (from_kind, None)
1203
executable = (from_executable, None)
1204
changed_content = from_kind is not None
1205
# the parent's path is necessarily known at this point.
1206
changed_file_ids.append(file_id)
1207
yield(file_id, (path, to_path), changed_content, versioned, parent,
1208
name, kind, executable)
1209
changed_file_ids = set(changed_file_ids)
1210
if specific_file_ids is not None:
1211
for result in self._handle_precise_ids(precise_file_ids,
1215
def _get_entry(self, tree, file_id):
1216
"""Get an inventory entry from a tree, with missing entries as None.
1218
If the tree raises NotImplementedError on accessing .inventory, then
1219
this is worked around using iter_entries_by_dir on just the file id
1222
:param tree: The tree to lookup the entry in.
1223
:param file_id: The file_id to lookup.
1226
inventory = tree.inventory
1227
except NotImplementedError:
1228
# No inventory available.
1230
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1231
return iterator.next()[1]
1232
except StopIteration:
1236
return inventory[file_id]
1237
except errors.NoSuchId:
1240
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1241
discarded_changes=None):
1242
"""Fill out a partial iter_changes to be consistent.
1244
:param precise_file_ids: The file ids of parents that were seen during
1246
:param changed_file_ids: The file ids of already emitted items.
1247
:param discarded_changes: An optional dict of precalculated
1248
iter_changes items which the partial iter_changes had not output
1250
:return: A generator of iter_changes items to output.
1252
# process parents of things that had changed under the users
1253
# requested paths to prevent incorrect paths or parent ids which
1254
# aren't in the tree.
1255
while precise_file_ids:
1256
precise_file_ids.discard(None)
1257
# Don't emit file_ids twice
1258
precise_file_ids.difference_update(changed_file_ids)
1259
if not precise_file_ids:
1261
# If the there was something at a given output path in source, we
1262
# have to include the entry from source in the delta, or we would
1263
# be putting this entry into a used path.
1265
for parent_id in precise_file_ids:
1267
paths.append(self.target.id2path(parent_id))
1268
except errors.NoSuchId:
1269
# This id has been dragged in from the source by delta
1270
# expansion and isn't present in target at all: we don't
1271
# need to check for path collisions on it.
1274
old_id = self.source.path2id(path)
1275
precise_file_ids.add(old_id)
1276
precise_file_ids.discard(None)
1277
current_ids = precise_file_ids
1278
precise_file_ids = set()
1279
# We have to emit all of precise_file_ids that have been altered.
1280
# We may have to output the children of some of those ids if any
1281
# directories have stopped being directories.
1282
for file_id in current_ids:
1284
if discarded_changes:
1285
result = discarded_changes.get(file_id)
1290
old_entry = self._get_entry(self.source, file_id)
1291
new_entry = self._get_entry(self.target, file_id)
1292
result, changes = self._changes_from_entries(
1293
old_entry, new_entry)
1296
# Get this parents parent to examine.
1297
new_parent_id = result[4][1]
1298
precise_file_ids.add(new_parent_id)
1300
if (result[6][0] == 'directory' and
1301
result[6][1] != 'directory'):
1302
# This stopped being a directory, the old children have
1304
if old_entry is None:
1305
# Reusing a discarded change.
1306
old_entry = self._get_entry(self.source, file_id)
1307
for child in old_entry.children.values():
1308
precise_file_ids.add(child.file_id)
1309
changed_file_ids.add(result[0])
1313
class MultiWalker(object):
1314
"""Walk multiple trees simultaneously, getting combined results."""
1316
# Note: This could be written to not assume you can do out-of-order
1317
# lookups. Instead any nodes that don't match in all trees could be
1318
# marked as 'deferred', and then returned in the final cleanup loop.
1319
# For now, I think it is "nicer" to return things as close to the
1320
# "master_tree" order as we can.
1322
def __init__(self, master_tree, other_trees):
1323
"""Create a new MultiWalker.
1325
All trees being walked must implement "iter_entries_by_dir()", such
1326
that they yield (path, object) tuples, where that object will have a
1327
'.file_id' member, that can be used to check equality.
1329
:param master_tree: All trees will be 'slaved' to the master_tree such
1330
that nodes in master_tree will be used as 'first-pass' sync points.
1331
Any nodes that aren't in master_tree will be merged in a second
1333
:param other_trees: A list of other trees to walk simultaneously.
1335
self._master_tree = master_tree
1336
self._other_trees = other_trees
1338
# Keep track of any nodes that were properly processed just out of
1339
# order, that way we don't return them at the end, we don't have to
1340
# track *all* processed file_ids, just the out-of-order ones
1341
self._out_of_order_processed = set()
1344
def _step_one(iterator):
1345
"""Step an iter_entries_by_dir iterator.
1347
:return: (has_more, path, ie)
1348
If has_more is False, path and ie will be None.
1351
path, ie = iterator.next()
1352
except StopIteration:
1353
return False, None, None
1355
return True, path, ie
1358
def _cmp_path_by_dirblock(path1, path2):
1359
"""Compare two paths based on what directory they are in.
1361
This generates a sort order, such that all children of a directory are
1362
sorted together, and grandchildren are in the same order as the
1363
children appear. But all grandchildren come after all children.
1365
:param path1: first path
1366
:param path2: the second path
1367
:return: negative number if ``path1`` comes first,
1368
0 if paths are equal
1369
and a positive number if ``path2`` sorts first
1371
# Shortcut this special case
1374
# This is stolen from _dirstate_helpers_py.py, only switching it to
1375
# Unicode objects. Consider using encode_utf8() and then using the
1376
# optimized versions, or maybe writing optimized unicode versions.
1377
if not isinstance(path1, unicode):
1378
raise TypeError("'path1' must be a unicode string, not %s: %r"
1379
% (type(path1), path1))
1380
if not isinstance(path2, unicode):
1381
raise TypeError("'path2' must be a unicode string, not %s: %r"
1382
% (type(path2), path2))
1383
return cmp(MultiWalker._path_to_key(path1),
1384
MultiWalker._path_to_key(path2))
1387
def _path_to_key(path):
1388
dirname, basename = osutils.split(path)
1389
return (dirname.split(u'/'), basename)
1391
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1392
"""Lookup an inventory entry by file_id.
1394
This is called when an entry is missing in the normal order.
1395
Generally this is because a file was either renamed, or it was
1396
deleted/added. If the entry was found in the inventory and not in
1397
extra_entries, it will be added to self._out_of_order_processed
1399
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1400
should be filled with entries that were found before they were
1401
used. If file_id is present, it will be removed from the
1403
:param other_tree: The Tree to search, in case we didn't find the entry
1405
:param file_id: The file_id to look for
1406
:return: (path, ie) if found or (None, None) if not present.
1408
if file_id in extra_entries:
1409
return extra_entries.pop(file_id)
1410
# TODO: Is id2path better as the first call, or is
1411
# inventory[file_id] better as a first check?
1413
cur_path = other_tree.id2path(file_id)
1414
except errors.NoSuchId:
1416
if cur_path is None:
1419
self._out_of_order_processed.add(file_id)
1420
cur_ie = other_tree.inventory[file_id]
1421
return (cur_path, cur_ie)
1424
"""Match up the values in the different trees."""
1425
for result in self._walk_master_tree():
1427
self._finish_others()
1428
for result in self._walk_others():
1431
def _walk_master_tree(self):
1432
"""First pass, walk all trees in lock-step.
1434
When we are done, all nodes in the master_tree will have been
1435
processed. _other_walkers, _other_entries, and _others_extra will be
1436
set on 'self' for future processing.
1438
# This iterator has the most "inlining" done, because it tends to touch
1439
# every file in the tree, while the others only hit nodes that don't
1441
master_iterator = self._master_tree.iter_entries_by_dir()
1443
other_walkers = [other.iter_entries_by_dir()
1444
for other in self._other_trees]
1445
other_entries = [self._step_one(walker) for walker in other_walkers]
1446
# Track extra nodes in the other trees
1447
others_extra = [{} for i in xrange(len(self._other_trees))]
1449
master_has_more = True
1450
step_one = self._step_one
1451
lookup_by_file_id = self._lookup_by_file_id
1452
out_of_order_processed = self._out_of_order_processed
1454
while master_has_more:
1455
(master_has_more, path, master_ie) = step_one(master_iterator)
1456
if not master_has_more:
1459
file_id = master_ie.file_id
1461
other_values_append = other_values.append
1462
next_other_entries = []
1463
next_other_entries_append = next_other_entries.append
1464
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1465
if not other_has_more:
1466
other_values_append(lookup_by_file_id(
1467
others_extra[idx], self._other_trees[idx], file_id))
1468
next_other_entries_append((False, None, None))
1469
elif file_id == other_ie.file_id:
1470
# This is the critical code path, as most of the entries
1471
# should match between most trees.
1472
other_values_append((other_path, other_ie))
1473
next_other_entries_append(step_one(other_walkers[idx]))
1475
# This walker did not match, step it until it either
1476
# matches, or we know we are past the current walker.
1477
other_walker = other_walkers[idx]
1478
other_extra = others_extra[idx]
1479
while (other_has_more and
1480
self._cmp_path_by_dirblock(other_path, path) < 0):
1481
other_file_id = other_ie.file_id
1482
if other_file_id not in out_of_order_processed:
1483
other_extra[other_file_id] = (other_path, other_ie)
1484
other_has_more, other_path, other_ie = \
1485
step_one(other_walker)
1486
if other_has_more and other_ie.file_id == file_id:
1487
# We ended up walking to this point, match and step
1489
other_values_append((other_path, other_ie))
1490
other_has_more, other_path, other_ie = \
1491
step_one(other_walker)
1493
# This record isn't in the normal order, see if it
1495
other_values_append(lookup_by_file_id(
1496
other_extra, self._other_trees[idx], file_id))
1497
next_other_entries_append((other_has_more, other_path,
1499
other_entries = next_other_entries
1501
# We've matched all the walkers, yield this datapoint
1502
yield path, file_id, master_ie, other_values
1503
self._other_walkers = other_walkers
1504
self._other_entries = other_entries
1505
self._others_extra = others_extra
1507
def _finish_others(self):
1508
"""Finish walking the other iterators, so we get all entries."""
1509
for idx, info in enumerate(self._other_entries):
1510
other_extra = self._others_extra[idx]
1511
(other_has_more, other_path, other_ie) = info
1512
while other_has_more:
1513
other_file_id = other_ie.file_id
1514
if other_file_id not in self._out_of_order_processed:
1515
other_extra[other_file_id] = (other_path, other_ie)
1516
other_has_more, other_path, other_ie = \
1517
self._step_one(self._other_walkers[idx])
1518
del self._other_entries
1520
def _walk_others(self):
1521
"""Finish up by walking all the 'deferred' nodes."""
1522
# TODO: One alternative would be to grab all possible unprocessed
1523
# file_ids, and then sort by path, and then yield them. That
1524
# might ensure better ordering, in case a caller strictly
1525
# requires parents before children.
1526
for idx, other_extra in enumerate(self._others_extra):
1527
others = sorted(other_extra.itervalues(),
1528
key=lambda x: self._path_to_key(x[0]))
1529
for other_path, other_ie in others:
1530
file_id = other_ie.file_id
1531
# We don't need to check out_of_order_processed here, because
1532
# the lookup_by_file_id will be removing anything processed
1533
# from the extras cache
1534
other_extra.pop(file_id)
1535
other_values = [(None, None) for i in xrange(idx)]
1536
other_values.append((other_path, other_ie))
1537
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1538
alt_idx = alt_idx + idx + 1
1539
alt_extra = self._others_extra[alt_idx]
1540
alt_tree = self._other_trees[alt_idx]
1541
other_values.append(self._lookup_by_file_id(
1542
alt_extra, alt_tree, file_id))
1543
yield other_path, file_id, None, other_values