1
# Copyright (C) 2005 Canonical Ltd
1
# Copyright (C) 2005-2011 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
"""Tree classes, representing directory at point in time.
20
from __future__ import absolute_import
21
from cStringIO import StringIO
24
from bzrlib.trace import mutter, note
25
from bzrlib.errors import BzrError, BzrCheckError
26
from bzrlib.inventory import Inventory
27
from bzrlib.osutils import pumpfile, appendpath, fingerprint_file
24
from bzrlib.lazy_import import lazy_import
25
lazy_import(globals(), """
29
conflicts as _mod_conflicts,
36
revision as _mod_revision,
40
from bzrlib.i18n import gettext
43
from bzrlib.decorators import needs_read_lock
44
from bzrlib.inter import InterObject
45
from bzrlib.symbol_versioning import (
32
51
class Tree(object):
33
52
"""Abstract file tree.
35
54
There are several subclasses:
37
56
* `WorkingTree` exists as files on disk editable by the user.
39
58
* `RevisionTree` is a tree as recorded at some point in the past.
43
Trees contain an `Inventory` object, and also know how to retrieve
44
file texts mentioned in the inventory, either from a working
45
directory or from a store.
47
It is possible for trees to contain files that are not described
48
in their inventory or vice versa; for this use `filenames()`.
50
60
Trees can be compared, etc, regardless of whether they are working
51
61
trees or versioned trees.
64
def has_versioned_directories(self):
65
"""Whether this tree can contain explicitly versioned directories.
67
This defaults to True, but some implementations may want to override
72
def changes_from(self, other, want_unchanged=False, specific_files=None,
73
extra_trees=None, require_versioned=False, include_root=False,
74
want_unversioned=False):
75
"""Return a TreeDelta of the changes from other to this tree.
77
:param other: A tree to compare with.
78
:param specific_files: An optional list of file paths to restrict the
79
comparison to. When mapping filenames to ids, all matches in all
80
trees (including optional extra_trees) are used, and all children of
81
matched directories are included.
82
:param want_unchanged: An optional boolean requesting the inclusion of
83
unchanged entries in the result.
84
:param extra_trees: An optional list of additional trees to use when
85
mapping the contents of specific_files (paths) to file_ids.
86
:param require_versioned: An optional boolean (defaults to False). When
87
supplied and True all the 'specific_files' must be versioned, or
88
a PathsNotVersionedError will be thrown.
89
:param want_unversioned: Scan for unversioned paths.
91
The comparison will be performed by an InterTree object looked up on
94
# Martin observes that Tree.changes_from returns a TreeDelta and this
95
# may confuse people, because the class name of the returned object is
96
# a synonym of the object referenced in the method name.
97
return InterTree.get(other, self).compare(
98
want_unchanged=want_unchanged,
99
specific_files=specific_files,
100
extra_trees=extra_trees,
101
require_versioned=require_versioned,
102
include_root=include_root,
103
want_unversioned=want_unversioned,
106
def iter_changes(self, from_tree, include_unchanged=False,
107
specific_files=None, pb=None, extra_trees=None,
108
require_versioned=True, want_unversioned=False):
109
"""See InterTree.iter_changes"""
110
intertree = InterTree.get(from_tree, self)
111
return intertree.iter_changes(include_unchanged, specific_files, pb,
112
extra_trees, require_versioned, want_unversioned=want_unversioned)
115
"""Get a list of the conflicts in the tree.
117
Each conflict is an instance of bzrlib.conflicts.Conflict.
119
return _mod_conflicts.ConflictList()
122
"""For trees that can have unversioned files, return all such paths."""
125
def get_parent_ids(self):
126
"""Get the parent ids for this tree.
128
:return: a list of parent ids. [] is returned to indicate
129
a tree with no parents.
130
:raises: BzrError if the parents are not known.
132
raise NotImplementedError(self.get_parent_ids)
54
134
def has_filename(self, filename):
55
135
"""True if the tree has given filename."""
56
raise NotImplementedError()
136
raise NotImplementedError(self.has_filename)
58
138
def has_id(self, file_id):
59
return self.inventory.has_id(file_id)
64
return iter(self.inventory)
139
raise NotImplementedError(self.has_id)
141
@deprecated_method(deprecated_in((2, 4, 0)))
142
def __contains__(self, file_id):
143
return self.has_id(file_id)
145
def has_or_had_id(self, file_id):
146
raise NotImplementedError(self.has_or_had_id)
148
def is_ignored(self, filename):
149
"""Check whether the filename is ignored by this tree.
151
:param filename: The relative filename within the tree.
152
:return: True if the filename is ignored.
156
def all_file_ids(self):
157
"""Iterate through all file ids, including ids for missing files."""
158
raise NotImplementedError(self.all_file_ids)
66
160
def id2path(self, file_id):
67
return self.inventory.id2path(file_id)
69
def _get_inventory(self):
70
return self._inventory
72
def get_file_by_path(self, path):
73
return self.get_file(self._inventory.path2id(path))
75
inventory = property(_get_inventory,
76
doc="Inventory of this Tree")
161
"""Return the path for a file id.
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
168
"""Walk the tree in 'by_dir' order.
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def iter_child_entries(self, file_id, path=None):
199
"""Iterate over the children of a directory or tree reference.
201
:param file_id: File id of the directory/tree-reference
202
:param path: Optional path of the directory
203
:raise NoSuchId: When the file_id does not exist
204
:return: Iterator over entries in the directory
206
raise NotImplementedError(self.iter_child_entries)
208
def list_files(self, include_root=False, from_dir=None, recursive=True):
209
"""List all files in this tree.
211
:param include_root: Whether to include the entry for the tree root
212
:param from_dir: Directory under which to list files
213
:param recursive: Whether to list files recursively
214
:return: iterator over tuples of (path, versioned, kind, file_id,
217
raise NotImplementedError(self.list_files)
219
def iter_references(self):
220
if self.supports_tree_reference():
221
for path, entry in self.iter_entries_by_dir():
222
if entry.kind == 'tree-reference':
223
yield path, entry.file_id
225
def kind(self, file_id):
226
raise NotImplementedError("Tree subclass %s must implement kind"
227
% self.__class__.__name__)
229
def stored_kind(self, file_id):
230
"""File kind stored for this file_id.
232
May not match kind on disk for working trees. Always available
233
for versioned files, even when the file itself is missing.
235
return self.kind(file_id)
237
def path_content_summary(self, path):
238
"""Get a summary of the information about path.
240
All the attributes returned are for the canonical form, not the
241
convenient form (if content filters are in use.)
243
:param path: A relative path within the tree.
244
:return: A tuple containing kind, size, exec, sha1-or-link.
245
Kind is always present (see tree.kind()).
246
size is present if kind is file and the size of the
247
canonical form can be cheaply determined, None otherwise.
248
exec is None unless kind is file and the platform supports the 'x'
250
sha1-or-link is the link target if kind is symlink, or the sha1 if
251
it can be obtained without reading the file.
253
raise NotImplementedError(self.path_content_summary)
255
def get_reference_revision(self, file_id, path=None):
256
raise NotImplementedError("Tree subclass %s must implement "
257
"get_reference_revision"
258
% self.__class__.__name__)
260
def _comparison_data(self, entry, path):
261
"""Return a tuple of kind, executable, stat_value for a file.
263
entry may be None if there is no inventory entry for the file, but
264
path must always be supplied.
266
kind is None if there is no file present (even if an inventory id is
267
present). executable is False for non-file entries.
269
raise NotImplementedError(self._comparison_data)
271
def _file_size(self, entry, stat_value):
272
raise NotImplementedError(self._file_size)
274
def get_file(self, file_id, path=None):
275
"""Return a file object for the file file_id in the tree.
277
If both file_id and path are defined, it is implementation defined as
278
to which one is used.
280
raise NotImplementedError(self.get_file)
282
def get_file_with_stat(self, file_id, path=None):
283
"""Get a file handle and stat object for file_id.
285
The default implementation returns (self.get_file, None) for backwards
288
:param file_id: The file id to read.
289
:param path: The path of the file, if it is known.
290
:return: A tuple (file_handle, stat_value_or_None). If the tree has
291
no stat facility, or need for a stat cache feedback during commit,
292
it may return None for the second element of the tuple.
294
return (self.get_file(file_id, path), None)
296
def get_file_text(self, file_id, path=None):
297
"""Return the byte content of a file.
299
:param file_id: The file_id of the file.
300
:param path: The path of the file.
302
If both file_id and path are supplied, an implementation may use
305
:returns: A single byte string for the whole file.
307
my_file = self.get_file(file_id, path)
309
return my_file.read()
313
def get_file_lines(self, file_id, path=None):
314
"""Return the content of a file, as lines.
316
:param file_id: The file_id of the file.
317
:param path: The path of the file.
319
If both file_id and path are supplied, an implementation may use
322
return osutils.split_lines(self.get_file_text(file_id, path))
324
def get_file_verifier(self, file_id, path=None, stat_value=None):
325
"""Return a verifier for a file.
327
The default implementation returns a sha1.
329
:param file_id: The handle for this file.
330
:param path: The path that this file can be found at.
331
These must point to the same object.
332
:param stat_value: Optional stat value for the object
333
:return: Tuple with verifier name and verifier data
335
return ("SHA1", self.get_file_sha1(file_id, path=path,
336
stat_value=stat_value))
338
def get_file_sha1(self, file_id, path=None, stat_value=None):
339
"""Return the SHA1 file for a file.
341
:note: callers should use get_file_verifier instead
342
where possible, as the underlying repository implementation may
343
have quicker access to a non-sha1 verifier.
345
:param file_id: The handle for this file.
346
:param path: The path that this file can be found at.
347
These must point to the same object.
348
:param stat_value: Optional stat value for the object
350
raise NotImplementedError(self.get_file_sha1)
352
def get_file_mtime(self, file_id, path=None):
353
"""Return the modification time for a file.
355
:param file_id: The handle for this file.
356
:param path: The path that this file can be found at.
357
These must point to the same object.
359
raise NotImplementedError(self.get_file_mtime)
361
def get_file_size(self, file_id):
362
"""Return the size of a file in bytes.
364
This applies only to regular files. If invoked on directories or
365
symlinks, it will return None.
366
:param file_id: The file-id of the file
368
raise NotImplementedError(self.get_file_size)
370
def is_executable(self, file_id, path=None):
371
"""Check if a file is executable.
373
:param file_id: The handle for this file.
374
:param path: The path that this file can be found at.
375
These must point to the same object.
377
raise NotImplementedError(self.is_executable)
379
def iter_files_bytes(self, desired_files):
380
"""Iterate through file contents.
382
Files will not necessarily be returned in the order they occur in
383
desired_files. No specific order is guaranteed.
385
Yields pairs of identifier, bytes_iterator. identifier is an opaque
386
value supplied by the caller as part of desired_files. It should
387
uniquely identify the file version in the caller's context. (Examples:
388
an index number or a TreeTransform trans_id.)
390
bytes_iterator is an iterable of bytestrings for the file. The
391
kind of iterable and length of the bytestrings are unspecified, but for
392
this implementation, it is a tuple containing a single bytestring with
393
the complete text of the file.
395
:param desired_files: a list of (file_id, identifier) pairs
397
for file_id, identifier in desired_files:
398
# We wrap the string in a tuple so that we can return an iterable
399
# of bytestrings. (Technically, a bytestring is also an iterable
400
# of bytestrings, but iterating through each character is not
402
cur_file = (self.get_file_text(file_id),)
403
yield identifier, cur_file
405
def get_symlink_target(self, file_id, path=None):
406
"""Get the target for a given file_id.
408
It is assumed that the caller already knows that file_id is referencing
410
:param file_id: Handle for the symlink entry.
411
:param path: The path of the file.
412
If both file_id and path are supplied, an implementation may use
414
:return: The path the symlink points to.
416
raise NotImplementedError(self.get_symlink_target)
418
def get_root_id(self):
419
"""Return the file_id for the root of this tree."""
420
raise NotImplementedError(self.get_root_id)
422
def annotate_iter(self, file_id,
423
default_revision=_mod_revision.CURRENT_REVISION):
424
"""Return an iterator of revision_id, line tuples.
426
For working trees (and mutable trees in general), the special
427
revision_id 'current:' will be used for lines that are new in this
428
tree, e.g. uncommitted changes.
429
:param file_id: The file to produce an annotated version from
430
:param default_revision: For lines that don't match a basis, mark them
431
with this revision id. Not all implementations will make use of
434
raise NotImplementedError(self.annotate_iter)
436
def _get_plan_merge_data(self, file_id, other, base):
437
from bzrlib import versionedfile
438
vf = versionedfile._PlanMergeVersionedFile(file_id)
439
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
440
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
442
last_revision_base = None
444
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
445
return vf, last_revision_a, last_revision_b, last_revision_base
447
def plan_file_merge(self, file_id, other, base=None):
448
"""Generate a merge plan based on annotations.
450
If the file contains uncommitted changes in this tree, they will be
451
attributed to the 'current:' pseudo-revision. If the file contains
452
uncommitted changes in the other tree, they will be assigned to the
453
'other:' pseudo-revision.
455
data = self._get_plan_merge_data(file_id, other, base)
456
vf, last_revision_a, last_revision_b, last_revision_base = data
457
return vf.plan_merge(last_revision_a, last_revision_b,
460
def plan_file_lca_merge(self, file_id, other, base=None):
461
"""Generate a merge plan based lca-newness.
463
If the file contains uncommitted changes in this tree, they will be
464
attributed to the 'current:' pseudo-revision. If the file contains
465
uncommitted changes in the other tree, they will be assigned to the
466
'other:' pseudo-revision.
468
data = self._get_plan_merge_data(file_id, other, base)
469
vf, last_revision_a, last_revision_b, last_revision_base = data
470
return vf.plan_lca_merge(last_revision_a, last_revision_b,
473
def _iter_parent_trees(self):
474
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
475
for revision_id in self.get_parent_ids():
477
yield self.revision_tree(revision_id)
478
except errors.NoSuchRevisionInTree:
479
yield self.repository.revision_tree(revision_id)
481
def _get_file_revision(self, file_id, vf, tree_revision):
482
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
484
if getattr(self, '_repository', None) is None:
485
last_revision = tree_revision
486
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
487
self._iter_parent_trees()]
488
vf.add_lines((file_id, last_revision), parent_keys,
489
self.get_file_lines(file_id))
490
repo = self.branch.repository
493
last_revision = self.get_file_revision(file_id)
494
base_vf = self._repository.texts
495
if base_vf not in vf.fallback_versionedfiles:
496
vf.fallback_versionedfiles.append(base_vf)
78
499
def _check_retrieved(self, ie, f):
81
fp = fingerprint_file(f)
502
fp = osutils.fingerprint_file(f)
84
if ie.text_size != None:
505
if ie.text_size is not None:
85
506
if ie.text_size != fp['size']:
86
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
507
raise errors.BzrError(
508
"mismatched size for file %r in %r" %
509
(ie.file_id, self._store),
87
510
["inventory expects %d bytes" % ie.text_size,
88
511
"file is actually %d bytes" % fp['size'],
89
512
"store is probably damaged/corrupt"])
91
514
if ie.text_sha1 != fp['sha1']:
92
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
515
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
516
(ie.file_id, self._store),
93
517
["inventory expects %s" % ie.text_sha1,
94
518
"file is actually %s" % fp['sha1'],
95
519
"store is probably damaged/corrupt"])
98
def print_file(self, file_id):
99
"""Print file with id `file_id` to stdout."""
101
sys.stdout.write(self.get_file_text(file_id))
104
def export(self, dest, format='dir', root=None):
105
"""Export this tree."""
107
exporter = exporters[format]
109
from bzrlib.errors import BzrCommandError
110
raise BzrCommandError("export format %r not supported" % format)
111
exporter(self, dest, root)
115
class RevisionTree(Tree):
116
"""Tree viewing a previous revision.
118
File text can be retrieved from the text store.
120
TODO: Some kind of `__repr__` method, but a good one
121
probably means knowing the branch and revision number,
122
or at least passing a description to the constructor.
125
def __init__(self, weave_store, inv, revision_id):
126
self._weave_store = weave_store
127
self._inventory = inv
128
self._revision_id = revision_id
130
def get_weave(self, file_id):
131
return self._weave_store.get_weave(file_id)
134
def get_file_lines(self, file_id):
135
ie = self._inventory[file_id]
136
weave = self.get_weave(file_id)
137
return weave.get(ie.revision)
140
def get_file_text(self, file_id):
141
return ''.join(self.get_file_lines(file_id))
144
def get_file(self, file_id):
145
return StringIO(self.get_file_text(file_id))
147
def get_file_size(self, file_id):
148
return self._inventory[file_id].text_size
150
def get_file_sha1(self, file_id):
151
ie = self._inventory[file_id]
152
if ie.kind == "file":
155
def is_executable(self, file_id):
156
return self._inventory[file_id].executable
158
def has_filename(self, filename):
159
return bool(self.inventory.path2id(filename))
161
def list_files(self):
162
# The only files returned by this are those from the version
163
for path, entry in self.inventory.iter_entries():
164
yield path, 'V', entry.kind, entry.file_id
166
def get_symlink_target(self, file_id):
167
ie = self._inventory[file_id]
168
return ie.symlink_target;
170
class EmptyTree(Tree):
172
self._inventory = Inventory()
174
def get_symlink_target(self, file_id):
177
def has_filename(self, filename):
521
def path2id(self, path):
522
"""Return the id for path in this tree."""
523
raise NotImplementedError(self.path2id)
525
def paths2ids(self, paths, trees=[], require_versioned=True):
526
"""Return all the ids that can be reached by walking from paths.
528
Each path is looked up in this tree and any extras provided in
529
trees, and this is repeated recursively: the children in an extra tree
530
of a directory that has been renamed under a provided path in this tree
531
are all returned, even if none exist under a provided path in this
532
tree, and vice versa.
534
:param paths: An iterable of paths to start converting to ids from.
535
Alternatively, if paths is None, no ids should be calculated and None
536
will be returned. This is offered to make calling the api unconditional
537
for code that *might* take a list of files.
538
:param trees: Additional trees to consider.
539
:param require_versioned: If False, do not raise NotVersionedError if
540
an element of paths is not versioned in this tree and all of trees.
542
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
544
def iter_children(self, file_id):
545
"""Iterate over the file ids of the children of an entry.
547
:param file_id: File id of the entry
548
:return: Iterator over child file ids.
550
raise NotImplementedError(self.iter_children)
553
"""Lock this tree for multiple read only operations.
555
:return: A bzrlib.lock.LogicalLockResult.
559
def revision_tree(self, revision_id):
560
"""Obtain a revision tree for the revision revision_id.
562
The intention of this method is to allow access to possibly cached
563
tree data. Implementors of this method should raise NoSuchRevision if
564
the tree is not locally available, even if they could obtain the
565
tree via a repository or some other means. Callers are responsible
566
for finding the ultimate source for a revision tree.
568
:param revision_id: The revision_id of the requested tree.
570
:raises: NoSuchRevision if the tree cannot be obtained.
572
raise errors.NoSuchRevisionInTree(self, revision_id)
575
"""What files are present in this tree and unknown.
577
:return: an iterator over the unknown files.
584
def filter_unversioned_files(self, paths):
585
"""Filter out paths that are versioned.
587
:return: set of paths.
589
raise NotImplementedError(self.filter_unversioned_files)
591
def walkdirs(self, prefix=""):
592
"""Walk the contents of this tree from path down.
594
This yields all the data about the contents of a directory at a time.
595
After each directory has been yielded, if the caller has mutated the
596
list to exclude some directories, they are then not descended into.
598
The data yielded is of the form:
599
((directory-relpath, directory-path-from-root, directory-fileid),
600
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
601
versioned_kind), ...]),
602
- directory-relpath is the containing dirs relpath from prefix
603
- directory-path-from-root is the containing dirs path from /
604
- directory-fileid is the id of the directory if it is versioned.
605
- relpath is the relative path within the subtree being walked.
606
- basename is the basename
607
- kind is the kind of the file now. If unknonwn then the file is not
608
present within the tree - but it may be recorded as versioned. See
610
- lstat is the stat data *if* the file was statted.
611
- path_from_tree_root is the path from the root of the tree.
612
- file_id is the file_id if the entry is versioned.
613
- versioned_kind is the kind of the file as last recorded in the
614
versioning system. If 'unknown' the file is not versioned.
615
One of 'kind' and 'versioned_kind' must not be 'unknown'.
617
:param prefix: Start walking from prefix within the tree rather than
618
at the root. This allows one to walk a subtree but get paths that are
619
relative to a tree rooted higher up.
620
:return: an iterator over the directory data.
622
raise NotImplementedError(self.walkdirs)
624
def supports_content_filtering(self):
180
def list_files(self):
181
if False: # just to make it a generator
184
def __contains__(self, file_id):
185
return file_id in self._inventory
187
def get_file_sha1(self, file_id):
188
assert self._inventory[file_id].kind == "root_directory"
627
def _content_filter_stack(self, path=None, file_id=None):
628
"""The stack of content filters for a path if filtering is supported.
630
Readers will be applied in first-to-last order.
631
Writers will be applied in last-to-first order.
632
Either the path or the file-id needs to be provided.
634
:param path: path relative to the root of the tree
636
:param file_id: file_id or None if unknown
637
:return: the list of filters - [] if there are none
639
filter_pref_names = filters._get_registered_names()
640
if len(filter_pref_names) == 0:
643
path = self.id2path(file_id)
644
prefs = self.iter_search_rules([path], filter_pref_names).next()
645
stk = filters._get_filter_stack_for(prefs)
646
if 'filters' in debug.debug_flags:
647
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
650
def _content_filter_stack_provider(self):
651
"""A function that returns a stack of ContentFilters.
653
The function takes a path (relative to the top of the tree) and a
654
file-id as parameters.
656
:return: None if content filtering is not supported by this tree.
658
if self.supports_content_filtering():
659
return lambda path, file_id: \
660
self._content_filter_stack(path, file_id)
664
def iter_search_rules(self, path_names, pref_names=None,
665
_default_searcher=None):
666
"""Find the preferences for filenames in a tree.
668
:param path_names: an iterable of paths to find attributes for.
669
Paths are given relative to the root of the tree.
670
:param pref_names: the list of preferences to lookup - None for all
671
:param _default_searcher: private parameter to assist testing - don't use
672
:return: an iterator of tuple sequences, one per path-name.
673
See _RulesSearcher.get_items for details on the tuple sequence.
675
if _default_searcher is None:
676
_default_searcher = rules._per_user_searcher
677
searcher = self._get_rules_searcher(_default_searcher)
678
if searcher is not None:
679
if pref_names is not None:
680
for path in path_names:
681
yield searcher.get_selected_items(path, pref_names)
683
for path in path_names:
684
yield searcher.get_items(path)
686
def _get_rules_searcher(self, default_searcher):
687
"""Get the RulesSearcher for this tree given the default one."""
688
searcher = default_searcher
692
class InventoryTree(Tree):
693
"""A tree that relies on an inventory for its metadata.
695
Trees contain an `Inventory` object, and also know how to retrieve
696
file texts mentioned in the inventory, either from a working
697
directory or from a store.
699
It is possible for trees to contain files that are not described
700
in their inventory or vice versa; for this use `filenames()`.
702
Subclasses should set the _inventory attribute, which is considered
703
private to external API users.
706
def get_canonical_inventory_paths(self, paths):
707
"""Like get_canonical_inventory_path() but works on multiple items.
709
:param paths: A sequence of paths relative to the root of the tree.
710
:return: A list of paths, with each item the corresponding input path
711
adjusted to account for existing elements that match case
714
return list(self._yield_canonical_inventory_paths(paths))
716
def get_canonical_inventory_path(self, path):
717
"""Returns the first inventory item that case-insensitively matches path.
719
If a path matches exactly, it is returned. If no path matches exactly
720
but more than one path matches case-insensitively, it is implementation
721
defined which is returned.
723
If no path matches case-insensitively, the input path is returned, but
724
with as many path entries that do exist changed to their canonical
727
If you need to resolve many names from the same tree, you should
728
use get_canonical_inventory_paths() to avoid O(N) behaviour.
730
:param path: A paths relative to the root of the tree.
731
:return: The input path adjusted to account for existing elements
732
that match case insensitively.
734
return self._yield_canonical_inventory_paths([path]).next()
736
def _yield_canonical_inventory_paths(self, paths):
738
# First, if the path as specified exists exactly, just use it.
739
if self.path2id(path) is not None:
743
cur_id = self.get_root_id()
745
bit_iter = iter(path.split("/"))
749
for child in self.iter_children(cur_id):
751
# XXX: it seem like if the child is known to be in the
752
# tree, we shouldn't need to go from its id back to
753
# its path -- mbp 2010-02-11
755
# XXX: it seems like we could be more efficient
756
# by just directly looking up the original name and
757
# only then searching all children; also by not
758
# chopping paths so much. -- mbp 2010-02-11
759
child_base = os.path.basename(self.id2path(child))
760
if (child_base == elt):
761
# if we found an exact match, we can stop now; if
762
# we found an approximate match we need to keep
763
# searching because there might be an exact match
766
new_path = osutils.pathjoin(cur_path, child_base)
768
elif child_base.lower() == lelt:
770
new_path = osutils.pathjoin(cur_path, child_base)
771
except errors.NoSuchId:
772
# before a change is committed we can see this error...
777
# got to the end of this directory and no entries matched.
778
# Return what matched so far, plus the rest as specified.
779
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
784
@deprecated_method(deprecated_in((2, 5, 0)))
785
def _get_inventory(self):
786
return self._inventory
788
inventory = property(_get_inventory,
789
doc="Inventory of this Tree")
791
def _get_root_inventory(self):
792
return self._inventory
794
root_inventory = property(_get_root_inventory,
795
doc="Root inventory of this tree")
797
def _unpack_file_id(self, file_id):
798
"""Find the inventory and inventory file id for a tree file id.
800
:param file_id: The tree file id, as bytestring or tuple
801
:return: Inventory and inventory file id
803
if isinstance(file_id, tuple):
804
if len(file_id) != 1:
805
raise ValueError("nested trees not yet supported: %r" % file_id)
807
return self.root_inventory, file_id
810
def path2id(self, path):
811
"""Return the id for path in this tree."""
812
return self._path2inv_file_id(path)[1]
814
def _path2inv_file_id(self, path):
815
"""Lookup a inventory and inventory file id by path.
817
:param path: Path to look up
818
:return: tuple with inventory and inventory file id
820
# FIXME: Support nested trees
821
return self.root_inventory, self.root_inventory.path2id(path)
823
def id2path(self, file_id):
824
"""Return the path for a file id.
828
inventory, file_id = self._unpack_file_id(file_id)
829
return inventory.id2path(file_id)
831
def has_id(self, file_id):
832
inventory, file_id = self._unpack_file_id(file_id)
833
return inventory.has_id(file_id)
835
def has_or_had_id(self, file_id):
836
inventory, file_id = self._unpack_file_id(file_id)
837
return inventory.has_id(file_id)
839
def all_file_ids(self):
841
[entry.file_id for path, entry in self.iter_entries_by_dir()])
843
@deprecated_method(deprecated_in((2, 4, 0)))
845
return iter(self.all_file_ids())
847
def filter_unversioned_files(self, paths):
848
"""Filter out paths that are versioned.
850
:return: set of paths.
852
# NB: we specifically *don't* call self.has_filename, because for
853
# WorkingTrees that can indicate files that exist on disk but that
855
return set((p for p in paths if self.path2id(p) is None))
858
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
859
"""Walk the tree in 'by_dir' order.
861
This will yield each entry in the tree as a (path, entry) tuple.
862
The order that they are yielded is:
864
See Tree.iter_entries_by_dir for details.
866
:param yield_parents: If True, yield the parents from the root leading
867
down to specific_file_ids that have been requested. This has no
868
impact if specific_file_ids is None.
870
if specific_file_ids is None:
871
inventory_file_ids = None
873
inventory_file_ids = []
874
for tree_file_id in specific_file_ids:
875
inventory, inv_file_id = self._unpack_file_id(tree_file_id)
876
if not inventory is self.root_inventory: # for now
877
raise AssertionError("%r != %r" % (
878
inventory, self.root_inventory))
879
inventory_file_ids.append(inv_file_id)
880
# FIXME: Handle nested trees
881
return self.root_inventory.iter_entries_by_dir(
882
specific_file_ids=inventory_file_ids, yield_parents=yield_parents)
885
def iter_child_entries(self, file_id, path=None):
886
inv, inv_file_id = self._unpack_file_id(file_id)
887
return inv[inv_file_id].children.itervalues()
889
@deprecated_method(deprecated_in((2, 5, 0)))
890
def get_file_by_path(self, path):
891
return self.get_file(self.path2id(path), path)
893
def iter_children(self, file_id, path=None):
894
"""See Tree.iter_children."""
895
entry = self.iter_entries_by_dir([file_id]).next()[1]
896
for child in getattr(entry, 'children', {}).itervalues():
900
def find_ids_across_trees(filenames, trees, require_versioned=True):
901
"""Find the ids corresponding to specified filenames.
903
All matches in all trees will be used, and all children of matched
904
directories will be used.
906
:param filenames: The filenames to find file_ids for (if None, returns
908
:param trees: The trees to find file_ids within
909
:param require_versioned: if true, all specified filenames must occur in
911
:return: a set of file ids for the specified filenames and their children.
194
######################################################################
197
# TODO: Merge these two functions into a single one that can operate
198
# on either a whole tree or a set of files.
200
# TODO: Return the diff in order by filename, not by category or in
201
# random order. Can probably be done by lock-stepping through the
202
# filenames from both trees.
205
def file_status(filename, old_tree, new_tree):
206
"""Return single-letter status, old and new names for a file.
208
The complexity here is in deciding how to represent renames;
209
many complex cases are possible.
211
old_inv = old_tree.inventory
212
new_inv = new_tree.inventory
213
new_id = new_inv.path2id(filename)
214
old_id = old_inv.path2id(filename)
216
if not new_id and not old_id:
217
# easy: doesn't exist in either; not versioned at all
218
if new_tree.is_ignored(filename):
219
return 'I', None, None
221
return '?', None, None
223
# There is now a file of this name, great.
226
# There is no longer a file of this name, but we can describe
227
# what happened to the file that used to have
228
# this name. There are two possibilities: either it was
229
# deleted entirely, or renamed.
231
if new_inv.has_id(old_id):
232
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
234
return 'D', old_inv.id2path(old_id), None
236
# if the file_id is new in this revision, it is added
237
if new_id and not old_inv.has_id(new_id):
240
# if there used to be a file of this name, but that ID has now
241
# disappeared, it is deleted
242
if old_id and not new_inv.has_id(old_id):
249
def find_renames(old_inv, new_inv):
250
for file_id in old_inv:
251
if file_id not in new_inv:
253
old_name = old_inv.id2path(file_id)
254
new_name = new_inv.id2path(file_id)
255
if old_name != new_name:
256
yield (old_name, new_name)
260
######################################################################
263
def dir_exporter(tree, dest, root):
264
"""Export this tree to a new directory.
266
`dest` should not exist, and will be created holding the
267
contents of this tree.
269
TODO: To handle subdirectories we need to create the
272
:note: If the export fails, the destination directory will be
273
left in a half-assed state.
277
mutter('export version %r' % tree)
279
for dp, ie in inv.iter_entries():
281
fullpath = appendpath(dest, dp)
282
if kind == 'directory':
285
pumpfile(tree.get_file(ie.file_id), file(fullpath, 'wb'))
286
if tree.is_executable(ie.file_id):
287
os.chmod(fullpath, 0755)
288
elif kind == 'symlink':
290
os.symlink(ie.symlink_target, fullpath)
292
raise BzrError("Failed to create symlink %r -> %r, error: %s" % (fullpath, ie.symlink_target, e))
294
raise BzrError("don't know how to export {%s} of kind %r" % (ie.file_id, kind))
295
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
296
exporters['dir'] = dir_exporter
303
def get_root_name(dest):
304
"""Get just the root name for a tarball.
306
>>> get_root_name('mytar.tar')
308
>>> get_root_name('mytar.tar.bz2')
310
>>> get_root_name('tar.tar.tar.tgz')
312
>>> get_root_name('bzr-0.0.5.tar.gz')
314
>>> get_root_name('a/long/path/mytar.tgz')
316
>>> get_root_name('../parent/../dir/other.tbz2')
319
endings = ['.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tbz2']
320
dest = os.path.basename(dest)
322
if dest.endswith(end):
323
return dest[:-len(end)]
325
def tar_exporter(tree, dest, root, compression=None):
326
"""Export this tree to a new tar file.
328
`dest` will be created holding the contents of this tree; if it
329
already exists, it will be clobbered, like with "tar -c".
331
from time import time
333
compression = str(compression or '')
335
root = get_root_name(dest)
337
ball = tarfile.open(dest, 'w:' + compression)
338
except tarfile.CompressionError, e:
339
raise BzrError(str(e))
340
mutter('export version %r' % tree)
342
for dp, ie in inv.iter_entries():
343
mutter(" export {%s} kind %s to %s" % (ie.file_id, ie.kind, dest))
344
item = tarfile.TarInfo(os.path.join(root, dp))
345
# TODO: would be cool to actually set it to the timestamp of the
346
# revision it was last changed
348
if ie.kind == 'directory':
349
item.type = tarfile.DIRTYPE
354
elif ie.kind == 'file':
355
item.type = tarfile.REGTYPE
356
fileobj = tree.get_file(ie.file_id)
357
item.size = _find_file_size(fileobj)
358
if tree.is_executable(ie.file_id):
915
specified_path_ids = _find_ids_across_trees(filenames, trees,
917
return _find_children_across_trees(specified_path_ids, trees)
920
def _find_ids_across_trees(filenames, trees, require_versioned):
921
"""Find the ids corresponding to specified filenames.
923
All matches in all trees will be used, but subdirectories are not scanned.
925
:param filenames: The filenames to find file_ids for
926
:param trees: The trees to find file_ids within
927
:param require_versioned: if true, all specified filenames must occur in
929
:return: a set of file ids for the specified filenames
932
interesting_ids = set()
933
for tree_path in filenames:
936
file_id = tree.path2id(tree_path)
937
if file_id is not None:
938
interesting_ids.add(file_id)
941
not_versioned.append(tree_path)
942
if len(not_versioned) > 0 and require_versioned:
943
raise errors.PathsNotVersionedError(not_versioned)
944
return interesting_ids
947
def _find_children_across_trees(specified_ids, trees):
948
"""Return a set including specified ids and their children.
950
All matches in all trees will be used.
952
:param trees: The trees to find file_ids within
953
:return: a set containing all specified ids and their children
955
interesting_ids = set(specified_ids)
956
pending = interesting_ids
957
# now handle children of interesting ids
958
# we loop so that we handle all children of each id in both trees
959
while len(pending) > 0:
961
for file_id in pending:
963
if not tree.has_or_had_id(file_id):
965
for child_id in tree.iter_children(file_id):
966
if child_id not in interesting_ids:
967
new_pending.add(child_id)
968
interesting_ids.update(new_pending)
969
pending = new_pending
970
return interesting_ids
973
class InterTree(InterObject):
974
"""This class represents operations taking place between two Trees.
976
Its instances have methods like 'compare' and contain references to the
977
source and target trees these operations are to be carried out on.
979
Clients of bzrlib should not need to use InterTree directly, rather they
980
should use the convenience methods on Tree such as 'Tree.compare()' which
981
will pass through to InterTree as appropriate.
984
# Formats that will be used to test this InterTree. If both are
985
# None, this InterTree will not be tested (e.g. because a complex
987
_matching_from_tree_format = None
988
_matching_to_tree_format = None
993
def is_compatible(kls, source, target):
994
# The default implementation is naive and uses the public API, so
995
# it works for all trees.
998
def _changes_from_entries(self, source_entry, target_entry,
999
source_path=None, target_path=None):
1000
"""Generate a iter_changes tuple between source_entry and target_entry.
1002
:param source_entry: An inventory entry from self.source, or None.
1003
:param target_entry: An inventory entry from self.target, or None.
1004
:param source_path: The path of source_entry, if known. If not known
1005
it will be looked up.
1006
:param target_path: The path of target_entry, if known. If not known
1007
it will be looked up.
1008
:return: A tuple, item 0 of which is an iter_changes result tuple, and
1009
item 1 is True if there are any changes in the result tuple.
1011
if source_entry is None:
1012
if target_entry is None:
1014
file_id = target_entry.file_id
1016
file_id = source_entry.file_id
1017
if source_entry is not None:
1018
source_versioned = True
1019
source_name = source_entry.name
1020
source_parent = source_entry.parent_id
1021
if source_path is None:
1022
source_path = self.source.id2path(file_id)
1023
source_kind, source_executable, source_stat = \
1024
self.source._comparison_data(source_entry, source_path)
1026
source_versioned = False
1028
source_parent = None
1030
source_executable = None
1031
if target_entry is not None:
1032
target_versioned = True
1033
target_name = target_entry.name
1034
target_parent = target_entry.parent_id
1035
if target_path is None:
1036
target_path = self.target.id2path(file_id)
1037
target_kind, target_executable, target_stat = \
1038
self.target._comparison_data(target_entry, target_path)
1040
target_versioned = False
1042
target_parent = None
1044
target_executable = None
1045
versioned = (source_versioned, target_versioned)
1046
kind = (source_kind, target_kind)
1047
changed_content = False
1048
if source_kind != target_kind:
1049
changed_content = True
1050
elif source_kind == 'file':
1051
if not self.file_content_matches(file_id, file_id, source_path,
1052
target_path, source_stat, target_stat):
1053
changed_content = True
1054
elif source_kind == 'symlink':
1055
if (self.source.get_symlink_target(file_id) !=
1056
self.target.get_symlink_target(file_id)):
1057
changed_content = True
1058
elif source_kind == 'tree-reference':
1059
if (self.source.get_reference_revision(file_id, source_path)
1060
!= self.target.get_reference_revision(file_id, target_path)):
1061
changed_content = True
1062
parent = (source_parent, target_parent)
1063
name = (source_name, target_name)
1064
executable = (source_executable, target_executable)
1065
if (changed_content is not False or versioned[0] != versioned[1]
1066
or parent[0] != parent[1] or name[0] != name[1] or
1067
executable[0] != executable[1]):
1071
return (file_id, (source_path, target_path), changed_content,
1072
versioned, parent, name, kind, executable), changes
1075
def compare(self, want_unchanged=False, specific_files=None,
1076
extra_trees=None, require_versioned=False, include_root=False,
1077
want_unversioned=False):
1078
"""Return the changes from source to target.
1080
:return: A TreeDelta.
1081
:param specific_files: An optional list of file paths to restrict the
1082
comparison to. When mapping filenames to ids, all matches in all
1083
trees (including optional extra_trees) are used, and all children of
1084
matched directories are included.
1085
:param want_unchanged: An optional boolean requesting the inclusion of
1086
unchanged entries in the result.
1087
:param extra_trees: An optional list of additional trees to use when
1088
mapping the contents of specific_files (paths) to file_ids.
1089
:param require_versioned: An optional boolean (defaults to False). When
1090
supplied and True all the 'specific_files' must be versioned, or
1091
a PathsNotVersionedError will be thrown.
1092
:param want_unversioned: Scan for unversioned paths.
1094
trees = (self.source,)
1095
if extra_trees is not None:
1096
trees = trees + tuple(extra_trees)
1097
# target is usually the newer tree:
1098
specific_file_ids = self.target.paths2ids(specific_files, trees,
1099
require_versioned=require_versioned)
1100
if specific_files and not specific_file_ids:
1101
# All files are unversioned, so just return an empty delta
1102
# _compare_trees would think we want a complete delta
1103
result = delta.TreeDelta()
1104
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1105
result.unversioned = [(path, None,
1106
self.target._comparison_data(fake_entry, path)[0]) for path in
1109
return delta._compare_trees(self.source, self.target, want_unchanged,
1110
specific_files, include_root, extra_trees=extra_trees,
1111
require_versioned=require_versioned,
1112
want_unversioned=want_unversioned)
1114
def iter_changes(self, include_unchanged=False,
1115
specific_files=None, pb=None, extra_trees=[],
1116
require_versioned=True, want_unversioned=False):
1117
"""Generate an iterator of changes between trees.
1119
A tuple is returned:
1120
(file_id, (path_in_source, path_in_target),
1121
changed_content, versioned, parent, name, kind,
1124
Changed_content is True if the file's content has changed. This
1125
includes changes to its kind, and to a symlink's target.
1127
versioned, parent, name, kind, executable are tuples of (from, to).
1128
If a file is missing in a tree, its kind is None.
1130
Iteration is done in parent-to-child order, relative to the target
1133
There is no guarantee that all paths are in sorted order: the
1134
requirement to expand the search due to renames may result in children
1135
that should be found early being found late in the search, after
1136
lexically later results have been returned.
1137
:param require_versioned: Raise errors.PathsNotVersionedError if a
1138
path in the specific_files list is not versioned in one of
1139
source, target or extra_trees.
1140
:param specific_files: An optional list of file paths to restrict the
1141
comparison to. When mapping filenames to ids, all matches in all
1142
trees (including optional extra_trees) are used, and all children
1143
of matched directories are included. The parents in the target tree
1144
of the specific files up to and including the root of the tree are
1145
always evaluated for changes too.
1146
:param want_unversioned: Should unversioned files be returned in the
1147
output. An unversioned file is defined as one with (False, False)
1148
for the versioned pair.
1150
lookup_trees = [self.source]
1152
lookup_trees.extend(extra_trees)
1153
# The ids of items we need to examine to insure delta consistency.
1154
precise_file_ids = set()
1155
changed_file_ids = []
1156
if specific_files == []:
1157
specific_file_ids = []
1159
specific_file_ids = self.target.paths2ids(specific_files,
1160
lookup_trees, require_versioned=require_versioned)
1161
if specific_files is not None:
1162
# reparented or added entries must have their parents included
1163
# so that valid deltas can be created. The seen_parents set
1164
# tracks the parents that we need to have.
1165
# The seen_dirs set tracks directory entries we've yielded.
1166
# After outputting version object in to_entries we set difference
1167
# the two seen sets and start checking parents.
1168
seen_parents = set()
1170
if want_unversioned:
1171
all_unversioned = sorted([(p.split('/'), p) for p in
1172
self.target.extras()
1173
if specific_files is None or
1174
osutils.is_inside_any(specific_files, p)])
1175
all_unversioned = collections.deque(all_unversioned)
1177
all_unversioned = collections.deque()
1179
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1180
specific_file_ids=specific_file_ids))
1181
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1182
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1183
specific_file_ids=specific_file_ids))
1184
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1186
# the unversioned path lookup only occurs on real trees - where there
1187
# can be extras. So the fake_entry is solely used to look up
1188
# executable it values when execute is not supported.
1189
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1190
for target_path, target_entry in to_entries_by_dir:
1191
while (all_unversioned and
1192
all_unversioned[0][0] < target_path.split('/')):
1193
unversioned_path = all_unversioned.popleft()
1194
target_kind, target_executable, target_stat = \
1195
self.target._comparison_data(fake_entry, unversioned_path[1])
1196
yield (None, (None, unversioned_path[1]), True, (False, False),
1198
(None, unversioned_path[0][-1]),
1199
(None, target_kind),
1200
(None, target_executable))
1201
source_path, source_entry = from_data.get(target_entry.file_id,
1203
result, changes = self._changes_from_entries(source_entry,
1204
target_entry, source_path=source_path, target_path=target_path)
1205
to_paths[result[0]] = result[1][1]
1210
pb.update('comparing files', entry_count, num_entries)
1211
if changes or include_unchanged:
1212
if specific_file_ids is not None:
1213
new_parent_id = result[4][1]
1214
precise_file_ids.add(new_parent_id)
1215
changed_file_ids.append(result[0])
1217
# Ensure correct behaviour for reparented/added specific files.
1218
if specific_files is not None:
1219
# Record output dirs
1220
if result[6][1] == 'directory':
1221
seen_dirs.add(result[0])
1222
# Record parents of reparented/added entries.
1223
versioned = result[3]
1225
if not versioned[0] or parents[0] != parents[1]:
1226
seen_parents.add(parents[1])
1227
while all_unversioned:
1228
# yield any trailing unversioned paths
1229
unversioned_path = all_unversioned.popleft()
1230
to_kind, to_executable, to_stat = \
1231
self.target._comparison_data(fake_entry, unversioned_path[1])
1232
yield (None, (None, unversioned_path[1]), True, (False, False),
1234
(None, unversioned_path[0][-1]),
1236
(None, to_executable))
1237
# Yield all remaining source paths
1238
for path, from_entry in from_entries_by_dir:
1239
file_id = from_entry.file_id
1240
if file_id in to_paths:
1243
if not self.target.has_id(file_id):
1244
# common case - paths we have not emitted are not present in
363
raise BzrError("don't know how to export {%s} of kind %r" %
364
(ie.file_id, ie.kind))
366
ball.addfile(item, fileobj)
368
exporters['tar'] = tar_exporter
370
def tgz_exporter(tree, dest, root):
371
tar_exporter(tree, dest, root, compression='gz')
372
exporters['tgz'] = tgz_exporter
374
def tbz_exporter(tree, dest, root):
375
tar_exporter(tree, dest, root, compression='bz2')
376
exporters['tbz2'] = tbz_exporter
379
def _find_file_size(fileobj):
380
offset = fileobj.tell()
383
size = fileobj.tell()
385
# gzip doesn't accept second argument to seek()
389
nread = len(fileobj.read())
1248
to_path = self.target.id2path(file_id)
1251
pb.update('comparing files', entry_count, num_entries)
1252
versioned = (True, False)
1253
parent = (from_entry.parent_id, None)
1254
name = (from_entry.name, None)
1255
from_kind, from_executable, stat_value = \
1256
self.source._comparison_data(from_entry, path)
1257
kind = (from_kind, None)
1258
executable = (from_executable, None)
1259
changed_content = from_kind is not None
1260
# the parent's path is necessarily known at this point.
1261
changed_file_ids.append(file_id)
1262
yield(file_id, (path, to_path), changed_content, versioned, parent,
1263
name, kind, executable)
1264
changed_file_ids = set(changed_file_ids)
1265
if specific_file_ids is not None:
1266
for result in self._handle_precise_ids(precise_file_ids,
1270
def _get_entry(self, tree, file_id):
1271
"""Get an inventory entry from a tree, with missing entries as None.
1273
If the tree raises NotImplementedError on accessing .inventory, then
1274
this is worked around using iter_entries_by_dir on just the file id
1277
:param tree: The tree to lookup the entry in.
1278
:param file_id: The file_id to lookup.
1281
inventory = tree.root_inventory
1282
except NotImplementedError:
1283
# No inventory available.
1285
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1286
return iterator.next()[1]
1287
except StopIteration:
1291
return inventory[file_id]
1292
except errors.NoSuchId:
1295
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1296
discarded_changes=None):
1297
"""Fill out a partial iter_changes to be consistent.
1299
:param precise_file_ids: The file ids of parents that were seen during
1301
:param changed_file_ids: The file ids of already emitted items.
1302
:param discarded_changes: An optional dict of precalculated
1303
iter_changes items which the partial iter_changes had not output
1305
:return: A generator of iter_changes items to output.
1307
# process parents of things that had changed under the users
1308
# requested paths to prevent incorrect paths or parent ids which
1309
# aren't in the tree.
1310
while precise_file_ids:
1311
precise_file_ids.discard(None)
1312
# Don't emit file_ids twice
1313
precise_file_ids.difference_update(changed_file_ids)
1314
if not precise_file_ids:
1316
# If the there was something at a given output path in source, we
1317
# have to include the entry from source in the delta, or we would
1318
# be putting this entry into a used path.
1320
for parent_id in precise_file_ids:
1322
paths.append(self.target.id2path(parent_id))
1323
except errors.NoSuchId:
1324
# This id has been dragged in from the source by delta
1325
# expansion and isn't present in target at all: we don't
1326
# need to check for path collisions on it.
1329
old_id = self.source.path2id(path)
1330
precise_file_ids.add(old_id)
1331
precise_file_ids.discard(None)
1332
current_ids = precise_file_ids
1333
precise_file_ids = set()
1334
# We have to emit all of precise_file_ids that have been altered.
1335
# We may have to output the children of some of those ids if any
1336
# directories have stopped being directories.
1337
for file_id in current_ids:
1339
if discarded_changes:
1340
result = discarded_changes.get(file_id)
1345
old_entry = self._get_entry(self.source, file_id)
1346
new_entry = self._get_entry(self.target, file_id)
1347
result, changes = self._changes_from_entries(
1348
old_entry, new_entry)
1351
# Get this parents parent to examine.
1352
new_parent_id = result[4][1]
1353
precise_file_ids.add(new_parent_id)
1355
if (result[6][0] == 'directory' and
1356
result[6][1] != 'directory'):
1357
# This stopped being a directory, the old children have
1359
if old_entry is None:
1360
# Reusing a discarded change.
1361
old_entry = self._get_entry(self.source, file_id)
1362
precise_file_ids.update(
1363
self.source.iter_children(file_id))
1364
changed_file_ids.add(result[0])
1368
def file_content_matches(self, source_file_id, target_file_id,
1369
source_path=None, target_path=None, source_stat=None, target_stat=None):
1370
"""Check if two files are the same in the source and target trees.
1372
This only checks that the contents of the files are the same,
1373
it does not touch anything else.
1375
:param source_file_id: File id of the file in the source tree
1376
:param target_file_id: File id of the file in the target tree
1377
:param source_path: Path of the file in the source tree
1378
:param target_path: Path of the file in the target tree
1379
:param source_stat: Optional stat value of the file in the source tree
1380
:param target_stat: Optional stat value of the file in the target tree
1381
:return: Boolean indicating whether the files have the same contents
1383
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1384
source_file_id, source_path, source_stat)
1385
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1386
target_file_id, target_path, target_stat)
1387
if source_verifier_kind == target_verifier_kind:
1388
return (source_verifier_data == target_verifier_data)
1389
# Fall back to SHA1 for now
1390
if source_verifier_kind != "SHA1":
1391
source_sha1 = self.source.get_file_sha1(source_file_id,
1392
source_path, source_stat)
1394
source_sha1 = source_verifier_data
1395
if target_verifier_kind != "SHA1":
1396
target_sha1 = self.target.get_file_sha1(target_file_id,
1397
target_path, target_stat)
1399
target_sha1 = target_verifier_data
1400
return (source_sha1 == target_sha1)
1402
InterTree.register_optimiser(InterTree)
1405
class MultiWalker(object):
1406
"""Walk multiple trees simultaneously, getting combined results."""
1408
# Note: This could be written to not assume you can do out-of-order
1409
# lookups. Instead any nodes that don't match in all trees could be
1410
# marked as 'deferred', and then returned in the final cleanup loop.
1411
# For now, I think it is "nicer" to return things as close to the
1412
# "master_tree" order as we can.
1414
def __init__(self, master_tree, other_trees):
1415
"""Create a new MultiWalker.
1417
All trees being walked must implement "iter_entries_by_dir()", such
1418
that they yield (path, object) tuples, where that object will have a
1419
'.file_id' member, that can be used to check equality.
1421
:param master_tree: All trees will be 'slaved' to the master_tree such
1422
that nodes in master_tree will be used as 'first-pass' sync points.
1423
Any nodes that aren't in master_tree will be merged in a second
1425
:param other_trees: A list of other trees to walk simultaneously.
1427
self._master_tree = master_tree
1428
self._other_trees = other_trees
1430
# Keep track of any nodes that were properly processed just out of
1431
# order, that way we don't return them at the end, we don't have to
1432
# track *all* processed file_ids, just the out-of-order ones
1433
self._out_of_order_processed = set()
1436
def _step_one(iterator):
1437
"""Step an iter_entries_by_dir iterator.
1439
:return: (has_more, path, ie)
1440
If has_more is False, path and ie will be None.
1443
path, ie = iterator.next()
1444
except StopIteration:
1445
return False, None, None
1447
return True, path, ie
1450
def _cmp_path_by_dirblock(path1, path2):
1451
"""Compare two paths based on what directory they are in.
1453
This generates a sort order, such that all children of a directory are
1454
sorted together, and grandchildren are in the same order as the
1455
children appear. But all grandchildren come after all children.
1457
:param path1: first path
1458
:param path2: the second path
1459
:return: negative number if ``path1`` comes first,
1460
0 if paths are equal
1461
and a positive number if ``path2`` sorts first
1463
# Shortcut this special case
1466
# This is stolen from _dirstate_helpers_py.py, only switching it to
1467
# Unicode objects. Consider using encode_utf8() and then using the
1468
# optimized versions, or maybe writing optimized unicode versions.
1469
if not isinstance(path1, unicode):
1470
raise TypeError("'path1' must be a unicode string, not %s: %r"
1471
% (type(path1), path1))
1472
if not isinstance(path2, unicode):
1473
raise TypeError("'path2' must be a unicode string, not %s: %r"
1474
% (type(path2), path2))
1475
return cmp(MultiWalker._path_to_key(path1),
1476
MultiWalker._path_to_key(path2))
1479
def _path_to_key(path):
1480
dirname, basename = osutils.split(path)
1481
return (dirname.split(u'/'), basename)
1483
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1484
"""Lookup an inventory entry by file_id.
1486
This is called when an entry is missing in the normal order.
1487
Generally this is because a file was either renamed, or it was
1488
deleted/added. If the entry was found in the inventory and not in
1489
extra_entries, it will be added to self._out_of_order_processed
1491
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1492
should be filled with entries that were found before they were
1493
used. If file_id is present, it will be removed from the
1495
:param other_tree: The Tree to search, in case we didn't find the entry
1497
:param file_id: The file_id to look for
1498
:return: (path, ie) if found or (None, None) if not present.
1500
if file_id in extra_entries:
1501
return extra_entries.pop(file_id)
1502
# TODO: Is id2path better as the first call, or is
1503
# inventory[file_id] better as a first check?
1505
cur_path = other_tree.id2path(file_id)
1506
except errors.NoSuchId:
1508
if cur_path is None:
1511
self._out_of_order_processed.add(file_id)
1512
cur_ie = other_tree.root_inventory[file_id]
1513
return (cur_path, cur_ie)
1516
"""Match up the values in the different trees."""
1517
for result in self._walk_master_tree():
1519
self._finish_others()
1520
for result in self._walk_others():
1523
def _walk_master_tree(self):
1524
"""First pass, walk all trees in lock-step.
1526
When we are done, all nodes in the master_tree will have been
1527
processed. _other_walkers, _other_entries, and _others_extra will be
1528
set on 'self' for future processing.
1530
# This iterator has the most "inlining" done, because it tends to touch
1531
# every file in the tree, while the others only hit nodes that don't
1533
master_iterator = self._master_tree.iter_entries_by_dir()
1535
other_walkers = [other.iter_entries_by_dir()
1536
for other in self._other_trees]
1537
other_entries = [self._step_one(walker) for walker in other_walkers]
1538
# Track extra nodes in the other trees
1539
others_extra = [{} for i in xrange(len(self._other_trees))]
1541
master_has_more = True
1542
step_one = self._step_one
1543
lookup_by_file_id = self._lookup_by_file_id
1544
out_of_order_processed = self._out_of_order_processed
1546
while master_has_more:
1547
(master_has_more, path, master_ie) = step_one(master_iterator)
1548
if not master_has_more:
1551
file_id = master_ie.file_id
1553
other_values_append = other_values.append
1554
next_other_entries = []
1555
next_other_entries_append = next_other_entries.append
1556
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1557
if not other_has_more:
1558
other_values_append(lookup_by_file_id(
1559
others_extra[idx], self._other_trees[idx], file_id))
1560
next_other_entries_append((False, None, None))
1561
elif file_id == other_ie.file_id:
1562
# This is the critical code path, as most of the entries
1563
# should match between most trees.
1564
other_values_append((other_path, other_ie))
1565
next_other_entries_append(step_one(other_walkers[idx]))
1567
# This walker did not match, step it until it either
1568
# matches, or we know we are past the current walker.
1569
other_walker = other_walkers[idx]
1570
other_extra = others_extra[idx]
1571
while (other_has_more and
1572
self._cmp_path_by_dirblock(other_path, path) < 0):
1573
other_file_id = other_ie.file_id
1574
if other_file_id not in out_of_order_processed:
1575
other_extra[other_file_id] = (other_path, other_ie)
1576
other_has_more, other_path, other_ie = \
1577
step_one(other_walker)
1578
if other_has_more and other_ie.file_id == file_id:
1579
# We ended up walking to this point, match and step
1581
other_values_append((other_path, other_ie))
1582
other_has_more, other_path, other_ie = \
1583
step_one(other_walker)
1585
# This record isn't in the normal order, see if it
1587
other_values_append(lookup_by_file_id(
1588
other_extra, self._other_trees[idx], file_id))
1589
next_other_entries_append((other_has_more, other_path,
1591
other_entries = next_other_entries
1593
# We've matched all the walkers, yield this datapoint
1594
yield path, file_id, master_ie, other_values
1595
self._other_walkers = other_walkers
1596
self._other_entries = other_entries
1597
self._others_extra = others_extra
1599
def _finish_others(self):
1600
"""Finish walking the other iterators, so we get all entries."""
1601
for idx, info in enumerate(self._other_entries):
1602
other_extra = self._others_extra[idx]
1603
(other_has_more, other_path, other_ie) = info
1604
while other_has_more:
1605
other_file_id = other_ie.file_id
1606
if other_file_id not in self._out_of_order_processed:
1607
other_extra[other_file_id] = (other_path, other_ie)
1608
other_has_more, other_path, other_ie = \
1609
self._step_one(self._other_walkers[idx])
1610
del self._other_entries
1612
def _walk_others(self):
1613
"""Finish up by walking all the 'deferred' nodes."""
1614
# TODO: One alternative would be to grab all possible unprocessed
1615
# file_ids, and then sort by path, and then yield them. That
1616
# might ensure better ordering, in case a caller strictly
1617
# requires parents before children.
1618
for idx, other_extra in enumerate(self._others_extra):
1619
others = sorted(other_extra.itervalues(),
1620
key=lambda x: self._path_to_key(x[0]))
1621
for other_path, other_ie in others:
1622
file_id = other_ie.file_id
1623
# We don't need to check out_of_order_processed here, because
1624
# the lookup_by_file_id will be removing anything processed
1625
# from the extras cache
1626
other_extra.pop(file_id)
1627
other_values = [(None, None) for i in xrange(idx)]
1628
other_values.append((other_path, other_ie))
1629
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1630
alt_idx = alt_idx + idx + 1
1631
alt_extra = self._others_extra[alt_idx]
1632
alt_tree = self._other_trees[alt_idx]
1633
other_values.append(self._lookup_by_file_id(
1634
alt_extra, alt_tree, file_id))
1635
yield other_path, file_id, None, other_values