1
# Copyright (C) 2005-2011 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
"""Tree classes, representing directory at point in time.
20
from __future__ import absolute_import
24
from bzrlib.lazy_import import lazy_import
25
lazy_import(globals(), """
29
conflicts as _mod_conflicts,
36
revision as _mod_revision,
40
from bzrlib.i18n import gettext
43
from bzrlib.decorators import needs_read_lock
44
from bzrlib.inter import InterObject
45
from bzrlib.symbol_versioning import (
21
import os.path, os, fnmatch
23
from osutils import pumpfile, compare_files, filesize, quotefn, sha_file, \
24
joinpath, splitpath, appendpath, isdir, isfile, file_kind, fingerprint_file
26
from stat import S_ISREG, S_ISDIR, ST_MODE, ST_SIZE
28
from inventory import Inventory
29
from trace import mutter, note
30
from errors import bailout
52
36
"""Abstract file tree.
54
38
There are several subclasses:
56
40
* `WorkingTree` exists as files on disk editable by the user.
58
42
* `RevisionTree` is a tree as recorded at some point in the past.
46
Trees contain an `Inventory` object, and also know how to retrieve
47
file texts mentioned in the inventory, either from a working
48
directory or from a store.
50
It is possible for trees to contain files that are not described
51
in their inventory or vice versa; for this use `filenames()`.
60
53
Trees can be compared, etc, regardless of whether they are working
61
54
trees or versioned trees.
64
def has_versioned_directories(self):
65
"""Whether this tree can contain explicitly versioned directories.
67
This defaults to True, but some implementations may want to override
72
def changes_from(self, other, want_unchanged=False, specific_files=None,
73
extra_trees=None, require_versioned=False, include_root=False,
74
want_unversioned=False):
75
"""Return a TreeDelta of the changes from other to this tree.
77
:param other: A tree to compare with.
78
:param specific_files: An optional list of file paths to restrict the
79
comparison to. When mapping filenames to ids, all matches in all
80
trees (including optional extra_trees) are used, and all children of
81
matched directories are included.
82
:param want_unchanged: An optional boolean requesting the inclusion of
83
unchanged entries in the result.
84
:param extra_trees: An optional list of additional trees to use when
85
mapping the contents of specific_files (paths) to file_ids.
86
:param require_versioned: An optional boolean (defaults to False). When
87
supplied and True all the 'specific_files' must be versioned, or
88
a PathsNotVersionedError will be thrown.
89
:param want_unversioned: Scan for unversioned paths.
91
The comparison will be performed by an InterTree object looked up on
94
# Martin observes that Tree.changes_from returns a TreeDelta and this
95
# may confuse people, because the class name of the returned object is
96
# a synonym of the object referenced in the method name.
97
return InterTree.get(other, self).compare(
98
want_unchanged=want_unchanged,
99
specific_files=specific_files,
100
extra_trees=extra_trees,
101
require_versioned=require_versioned,
102
include_root=include_root,
103
want_unversioned=want_unversioned,
106
def iter_changes(self, from_tree, include_unchanged=False,
107
specific_files=None, pb=None, extra_trees=None,
108
require_versioned=True, want_unversioned=False):
109
"""See InterTree.iter_changes"""
110
intertree = InterTree.get(from_tree, self)
111
return intertree.iter_changes(include_unchanged, specific_files, pb,
112
extra_trees, require_versioned, want_unversioned=want_unversioned)
115
"""Get a list of the conflicts in the tree.
117
Each conflict is an instance of bzrlib.conflicts.Conflict.
119
return _mod_conflicts.ConflictList()
122
"""For trees that can have unversioned files, return all such paths."""
125
def get_parent_ids(self):
126
"""Get the parent ids for this tree.
128
:return: a list of parent ids. [] is returned to indicate
129
a tree with no parents.
130
:raises: BzrError if the parents are not known.
132
raise NotImplementedError(self.get_parent_ids)
134
57
def has_filename(self, filename):
135
58
"""True if the tree has given filename."""
136
raise NotImplementedError(self.has_filename)
59
raise NotImplementedError()
138
61
def has_id(self, file_id):
139
raise NotImplementedError(self.has_id)
141
@deprecated_method(deprecated_in((2, 4, 0)))
142
def __contains__(self, file_id):
143
return self.has_id(file_id)
145
def has_or_had_id(self, file_id):
146
raise NotImplementedError(self.has_or_had_id)
148
def is_ignored(self, filename):
149
"""Check whether the filename is ignored by this tree.
151
:param filename: The relative filename within the tree.
152
:return: True if the filename is ignored.
156
def all_file_ids(self):
157
"""Iterate through all file ids, including ids for missing files."""
158
raise NotImplementedError(self.all_file_ids)
62
return self.inventory.has_id(file_id)
65
"""Return set of all ids in this tree."""
66
return self.inventory.id_set()
160
68
def id2path(self, file_id):
161
"""Return the path for a file id.
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
168
"""Walk the tree in 'by_dir' order.
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def list_files(self, include_root=False, from_dir=None, recursive=True):
199
"""List all files in this tree.
201
:param include_root: Whether to include the entry for the tree root
202
:param from_dir: Directory under which to list files
203
:param recursive: Whether to list files recursively
204
:return: iterator over tuples of (path, versioned, kind, file_id,
207
raise NotImplementedError(self.list_files)
209
def iter_references(self):
210
if self.supports_tree_reference():
211
for path, entry in self.iter_entries_by_dir():
212
if entry.kind == 'tree-reference':
213
yield path, entry.file_id
215
def kind(self, file_id):
216
raise NotImplementedError("Tree subclass %s must implement kind"
217
% self.__class__.__name__)
219
def stored_kind(self, file_id):
220
"""File kind stored for this file_id.
222
May not match kind on disk for working trees. Always available
223
for versioned files, even when the file itself is missing.
225
return self.kind(file_id)
227
def path_content_summary(self, path):
228
"""Get a summary of the information about path.
230
All the attributes returned are for the canonical form, not the
231
convenient form (if content filters are in use.)
233
:param path: A relative path within the tree.
234
:return: A tuple containing kind, size, exec, sha1-or-link.
235
Kind is always present (see tree.kind()).
236
size is present if kind is file and the size of the
237
canonical form can be cheaply determined, None otherwise.
238
exec is None unless kind is file and the platform supports the 'x'
240
sha1-or-link is the link target if kind is symlink, or the sha1 if
241
it can be obtained without reading the file.
243
raise NotImplementedError(self.path_content_summary)
245
def get_reference_revision(self, file_id, path=None):
246
raise NotImplementedError("Tree subclass %s must implement "
247
"get_reference_revision"
248
% self.__class__.__name__)
250
def _comparison_data(self, entry, path):
251
"""Return a tuple of kind, executable, stat_value for a file.
253
entry may be None if there is no inventory entry for the file, but
254
path must always be supplied.
256
kind is None if there is no file present (even if an inventory id is
257
present). executable is False for non-file entries.
259
raise NotImplementedError(self._comparison_data)
261
def _file_size(self, entry, stat_value):
262
raise NotImplementedError(self._file_size)
264
def get_file(self, file_id, path=None):
265
"""Return a file object for the file file_id in the tree.
267
If both file_id and path are defined, it is implementation defined as
268
to which one is used.
270
raise NotImplementedError(self.get_file)
272
def get_file_with_stat(self, file_id, path=None):
273
"""Get a file handle and stat object for file_id.
275
The default implementation returns (self.get_file, None) for backwards
278
:param file_id: The file id to read.
279
:param path: The path of the file, if it is known.
280
:return: A tuple (file_handle, stat_value_or_None). If the tree has
281
no stat facility, or need for a stat cache feedback during commit,
282
it may return None for the second element of the tuple.
284
return (self.get_file(file_id, path), None)
286
def get_file_text(self, file_id, path=None):
287
"""Return the byte content of a file.
289
:param file_id: The file_id of the file.
290
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
:returns: A single byte string for the whole file.
297
my_file = self.get_file(file_id, path)
299
return my_file.read()
303
def get_file_lines(self, file_id, path=None):
304
"""Return the content of a file, as lines.
306
:param file_id: The file_id of the file.
307
:param path: The path of the file.
309
If both file_id and path are supplied, an implementation may use
312
return osutils.split_lines(self.get_file_text(file_id, path))
314
def get_file_verifier(self, file_id, path=None, stat_value=None):
315
"""Return a verifier for a file.
317
The default implementation returns a sha1.
319
:param file_id: The handle for this file.
320
:param path: The path that this file can be found at.
321
These must point to the same object.
322
:param stat_value: Optional stat value for the object
323
:return: Tuple with verifier name and verifier data
325
return ("SHA1", self.get_file_sha1(file_id, path=path,
326
stat_value=stat_value))
328
def get_file_sha1(self, file_id, path=None, stat_value=None):
329
"""Return the SHA1 file for a file.
331
:note: callers should use get_file_verifier instead
332
where possible, as the underlying repository implementation may
333
have quicker access to a non-sha1 verifier.
335
:param file_id: The handle for this file.
336
:param path: The path that this file can be found at.
337
These must point to the same object.
338
:param stat_value: Optional stat value for the object
340
raise NotImplementedError(self.get_file_sha1)
342
def get_file_mtime(self, file_id, path=None):
343
"""Return the modification time for a file.
345
:param file_id: The handle for this file.
346
:param path: The path that this file can be found at.
347
These must point to the same object.
349
raise NotImplementedError(self.get_file_mtime)
351
def get_file_size(self, file_id):
352
"""Return the size of a file in bytes.
354
This applies only to regular files. If invoked on directories or
355
symlinks, it will return None.
356
:param file_id: The file-id of the file
358
raise NotImplementedError(self.get_file_size)
360
def is_executable(self, file_id, path=None):
361
"""Check if a file is executable.
363
:param file_id: The handle for this file.
364
:param path: The path that this file can be found at.
365
These must point to the same object.
367
raise NotImplementedError(self.is_executable)
369
def iter_files_bytes(self, desired_files):
370
"""Iterate through file contents.
372
Files will not necessarily be returned in the order they occur in
373
desired_files. No specific order is guaranteed.
375
Yields pairs of identifier, bytes_iterator. identifier is an opaque
376
value supplied by the caller as part of desired_files. It should
377
uniquely identify the file version in the caller's context. (Examples:
378
an index number or a TreeTransform trans_id.)
380
bytes_iterator is an iterable of bytestrings for the file. The
381
kind of iterable and length of the bytestrings are unspecified, but for
382
this implementation, it is a tuple containing a single bytestring with
383
the complete text of the file.
385
:param desired_files: a list of (file_id, identifier) pairs
387
for file_id, identifier in desired_files:
388
# We wrap the string in a tuple so that we can return an iterable
389
# of bytestrings. (Technically, a bytestring is also an iterable
390
# of bytestrings, but iterating through each character is not
392
cur_file = (self.get_file_text(file_id),)
393
yield identifier, cur_file
395
def get_symlink_target(self, file_id, path=None):
396
"""Get the target for a given file_id.
398
It is assumed that the caller already knows that file_id is referencing
400
:param file_id: Handle for the symlink entry.
401
:param path: The path of the file.
402
If both file_id and path are supplied, an implementation may use
404
:return: The path the symlink points to.
406
raise NotImplementedError(self.get_symlink_target)
408
def get_root_id(self):
409
"""Return the file_id for the root of this tree."""
410
raise NotImplementedError(self.get_root_id)
412
def annotate_iter(self, file_id,
413
default_revision=_mod_revision.CURRENT_REVISION):
414
"""Return an iterator of revision_id, line tuples.
416
For working trees (and mutable trees in general), the special
417
revision_id 'current:' will be used for lines that are new in this
418
tree, e.g. uncommitted changes.
419
:param file_id: The file to produce an annotated version from
420
:param default_revision: For lines that don't match a basis, mark them
421
with this revision id. Not all implementations will make use of
424
raise NotImplementedError(self.annotate_iter)
426
def _get_plan_merge_data(self, file_id, other, base):
427
from bzrlib import versionedfile
428
vf = versionedfile._PlanMergeVersionedFile(file_id)
429
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
430
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
432
last_revision_base = None
434
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
435
return vf, last_revision_a, last_revision_b, last_revision_base
437
def plan_file_merge(self, file_id, other, base=None):
438
"""Generate a merge plan based on annotations.
440
If the file contains uncommitted changes in this tree, they will be
441
attributed to the 'current:' pseudo-revision. If the file contains
442
uncommitted changes in the other tree, they will be assigned to the
443
'other:' pseudo-revision.
445
data = self._get_plan_merge_data(file_id, other, base)
446
vf, last_revision_a, last_revision_b, last_revision_base = data
447
return vf.plan_merge(last_revision_a, last_revision_b,
450
def plan_file_lca_merge(self, file_id, other, base=None):
451
"""Generate a merge plan based lca-newness.
453
If the file contains uncommitted changes in this tree, they will be
454
attributed to the 'current:' pseudo-revision. If the file contains
455
uncommitted changes in the other tree, they will be assigned to the
456
'other:' pseudo-revision.
458
data = self._get_plan_merge_data(file_id, other, base)
459
vf, last_revision_a, last_revision_b, last_revision_base = data
460
return vf.plan_lca_merge(last_revision_a, last_revision_b,
463
def _iter_parent_trees(self):
464
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
465
for revision_id in self.get_parent_ids():
467
yield self.revision_tree(revision_id)
468
except errors.NoSuchRevisionInTree:
469
yield self.repository.revision_tree(revision_id)
471
def _get_file_revision(self, file_id, vf, tree_revision):
472
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
474
if getattr(self, '_repository', None) is None:
475
last_revision = tree_revision
476
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
477
self._iter_parent_trees()]
478
vf.add_lines((file_id, last_revision), parent_keys,
479
self.get_file_lines(file_id))
480
repo = self.branch.repository
483
last_revision = self.get_file_revision(file_id)
484
base_vf = self._repository.texts
485
if base_vf not in vf.fallback_versionedfiles:
486
vf.fallback_versionedfiles.append(base_vf)
69
return self.inventory.id2path(file_id)
71
def _get_inventory(self):
72
return self._inventory
74
inventory = property(_get_inventory,
75
doc="Inventory of this Tree")
489
77
def _check_retrieved(self, ie, f):
492
fp = osutils.fingerprint_file(f)
78
fp = fingerprint_file(f)
495
if ie.text_size is not None:
81
if ie.text_size != None:
496
82
if ie.text_size != fp['size']:
497
raise errors.BzrError(
498
"mismatched size for file %r in %r" %
499
(ie.file_id, self._store),
83
bailout("mismatched size for file %r in %r" % (ie.file_id, self._store),
500
84
["inventory expects %d bytes" % ie.text_size,
501
85
"file is actually %d bytes" % fp['size'],
502
86
"store is probably damaged/corrupt"])
504
88
if ie.text_sha1 != fp['sha1']:
505
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
506
(ie.file_id, self._store),
89
bailout("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
507
90
["inventory expects %s" % ie.text_sha1,
508
91
"file is actually %s" % fp['sha1'],
509
92
"store is probably damaged/corrupt"])
511
def path2id(self, path):
512
"""Return the id for path in this tree."""
513
raise NotImplementedError(self.path2id)
515
def paths2ids(self, paths, trees=[], require_versioned=True):
516
"""Return all the ids that can be reached by walking from paths.
518
Each path is looked up in this tree and any extras provided in
519
trees, and this is repeated recursively: the children in an extra tree
520
of a directory that has been renamed under a provided path in this tree
521
are all returned, even if none exist under a provided path in this
522
tree, and vice versa.
524
:param paths: An iterable of paths to start converting to ids from.
525
Alternatively, if paths is None, no ids should be calculated and None
526
will be returned. This is offered to make calling the api unconditional
527
for code that *might* take a list of files.
528
:param trees: Additional trees to consider.
529
:param require_versioned: If False, do not raise NotVersionedError if
530
an element of paths is not versioned in this tree and all of trees.
532
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
534
def iter_children(self, file_id):
535
entry = self.iter_entries_by_dir([file_id]).next()[1]
536
for child in getattr(entry, 'children', {}).itervalues():
540
"""Lock this tree for multiple read only operations.
542
:return: A bzrlib.lock.LogicalLockResult.
546
def revision_tree(self, revision_id):
547
"""Obtain a revision tree for the revision revision_id.
549
The intention of this method is to allow access to possibly cached
550
tree data. Implementors of this method should raise NoSuchRevision if
551
the tree is not locally available, even if they could obtain the
552
tree via a repository or some other means. Callers are responsible
553
for finding the ultimate source for a revision tree.
555
:param revision_id: The revision_id of the requested tree.
557
:raises: NoSuchRevision if the tree cannot be obtained.
559
raise errors.NoSuchRevisionInTree(self, revision_id)
95
def print_file(self, fileid):
96
"""Print file with id `fileid` to stdout."""
98
pumpfile(self.get_file(fileid), sys.stdout)
101
def export(self, dest):
102
"""Export this tree to a new directory.
104
`dest` should not exist, and will be created holding the
105
contents of this tree.
107
TODO: To handle subdirectories we need to create the
110
:note: If the export fails, the destination directory will be
111
left in a half-assed state.
114
mutter('export version %r' % self)
116
for dp, ie in inv.iter_entries():
118
fullpath = appendpath(dest, dp)
119
if kind == 'directory':
122
pumpfile(self.get_file(ie.file_id), file(fullpath, 'wb'))
124
bailout("don't know how to export {%s} of kind %r" % (fid, kind))
125
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
129
class WorkingTree(Tree):
130
"""Working copy tree.
132
The inventory is held in the `Branch` working-inventory, and the
133
files are in a directory on disk.
135
It is possible for a `WorkingTree` to have a filename which is
136
not listed in the Inventory and vice versa.
138
def __init__(self, basedir, inv):
139
self._inventory = inv
140
self.basedir = basedir
141
self.path2id = inv.path2id
144
return "<%s of %s>" % (self.__class__.__name__,
147
def abspath(self, filename):
148
return os.path.join(self.basedir, filename)
150
def has_filename(self, filename):
151
return os.path.exists(self.abspath(filename))
153
def get_file(self, file_id):
154
return self.get_file_byname(self.id2path(file_id))
156
def get_file_byname(self, filename):
157
return file(self.abspath(filename), 'rb')
159
def _get_store_filename(self, file_id):
160
## XXX: badly named; this isn't in the store at all
161
return self.abspath(self.id2path(file_id))
163
def has_id(self, file_id):
164
# files that have been deleted are excluded
165
if not self.inventory.has_id(file_id):
167
return os.access(self.abspath(self.inventory.id2path(file_id)), os.F_OK)
169
def get_file_size(self, file_id):
170
return os.stat(self._get_store_filename(file_id))[ST_SIZE]
172
def get_file_sha1(self, file_id):
173
f = self.get_file(file_id)
177
def file_class(self, filename):
178
if self.path2id(filename):
180
elif self.is_ignored(filename):
186
def list_files(self):
187
"""Recursively list all files as (path, class, kind, id).
189
Lists, but does not descend into unversioned directories.
191
This does not include files that have been deleted in this
194
Skips the control directory.
198
def descend(from_dir_relpath, from_dir_id, dp):
202
## TODO: If we find a subdirectory with its own .bzr
203
## directory, then that is a separate tree and we
204
## should exclude it.
205
if bzrlib.BZRDIR == f:
209
fp = appendpath(from_dir_relpath, f)
212
fap = appendpath(dp, f)
214
f_ie = inv.get_child(from_dir_id, f)
217
elif self.is_ignored(fp):
226
bailout("file %r entered as kind %r id %r, now of kind %r"
227
% (fap, f_ie.kind, f_ie.file_id, fk))
229
yield fp, c, fk, (f_ie and f_ie.file_id)
231
if fk != 'directory':
235
# don't descend unversioned directories
238
for ff in descend(fp, f_ie.file_id, fap):
241
for f in descend('', inv.root.file_id, self.basedir):
561
246
def unknowns(self):
562
"""What files are present in this tree and unknown.
564
:return: an iterator over the unknown files.
571
def filter_unversioned_files(self, paths):
572
"""Filter out paths that are versioned.
574
:return: set of paths.
576
raise NotImplementedError(self.filter_unversioned_files)
578
def walkdirs(self, prefix=""):
579
"""Walk the contents of this tree from path down.
581
This yields all the data about the contents of a directory at a time.
582
After each directory has been yielded, if the caller has mutated the
583
list to exclude some directories, they are then not descended into.
585
The data yielded is of the form:
586
((directory-relpath, directory-path-from-root, directory-fileid),
587
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
588
versioned_kind), ...]),
589
- directory-relpath is the containing dirs relpath from prefix
590
- directory-path-from-root is the containing dirs path from /
591
- directory-fileid is the id of the directory if it is versioned.
592
- relpath is the relative path within the subtree being walked.
593
- basename is the basename
594
- kind is the kind of the file now. If unknonwn then the file is not
595
present within the tree - but it may be recorded as versioned. See
597
- lstat is the stat data *if* the file was statted.
598
- path_from_tree_root is the path from the root of the tree.
599
- file_id is the file_id if the entry is versioned.
600
- versioned_kind is the kind of the file as last recorded in the
601
versioning system. If 'unknown' the file is not versioned.
602
One of 'kind' and 'versioned_kind' must not be 'unknown'.
604
:param prefix: Start walking from prefix within the tree rather than
605
at the root. This allows one to walk a subtree but get paths that are
606
relative to a tree rooted higher up.
607
:return: an iterator over the directory data.
609
raise NotImplementedError(self.walkdirs)
611
def supports_content_filtering(self):
614
def _content_filter_stack(self, path=None, file_id=None):
615
"""The stack of content filters for a path if filtering is supported.
617
Readers will be applied in first-to-last order.
618
Writers will be applied in last-to-first order.
619
Either the path or the file-id needs to be provided.
621
:param path: path relative to the root of the tree
623
:param file_id: file_id or None if unknown
624
:return: the list of filters - [] if there are none
626
filter_pref_names = filters._get_registered_names()
627
if len(filter_pref_names) == 0:
630
path = self.id2path(file_id)
631
prefs = self.iter_search_rules([path], filter_pref_names).next()
632
stk = filters._get_filter_stack_for(prefs)
633
if 'filters' in debug.debug_flags:
634
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
637
def _content_filter_stack_provider(self):
638
"""A function that returns a stack of ContentFilters.
640
The function takes a path (relative to the top of the tree) and a
641
file-id as parameters.
643
:return: None if content filtering is not supported by this tree.
645
if self.supports_content_filtering():
646
return lambda path, file_id: \
647
self._content_filter_stack(path, file_id)
651
def iter_search_rules(self, path_names, pref_names=None,
652
_default_searcher=None):
653
"""Find the preferences for filenames in a tree.
655
:param path_names: an iterable of paths to find attributes for.
656
Paths are given relative to the root of the tree.
657
:param pref_names: the list of preferences to lookup - None for all
658
:param _default_searcher: private parameter to assist testing - don't use
659
:return: an iterator of tuple sequences, one per path-name.
660
See _RulesSearcher.get_items for details on the tuple sequence.
662
if _default_searcher is None:
663
_default_searcher = rules._per_user_searcher
664
searcher = self._get_rules_searcher(_default_searcher)
665
if searcher is not None:
666
if pref_names is not None:
667
for path in path_names:
668
yield searcher.get_selected_items(path, pref_names)
670
for path in path_names:
671
yield searcher.get_items(path)
673
def _get_rules_searcher(self, default_searcher):
674
"""Get the RulesSearcher for this tree given the default one."""
675
searcher = default_searcher
679
class InventoryTree(Tree):
680
"""A tree that relies on an inventory for its metadata.
682
Trees contain an `Inventory` object, and also know how to retrieve
683
file texts mentioned in the inventory, either from a working
684
directory or from a store.
686
It is possible for trees to contain files that are not described
687
in their inventory or vice versa; for this use `filenames()`.
689
Subclasses should set the _inventory attribute, which is considered
690
private to external API users.
693
def get_canonical_inventory_paths(self, paths):
694
"""Like get_canonical_inventory_path() but works on multiple items.
696
:param paths: A sequence of paths relative to the root of the tree.
697
:return: A list of paths, with each item the corresponding input path
698
adjusted to account for existing elements that match case
701
return list(self._yield_canonical_inventory_paths(paths))
703
def get_canonical_inventory_path(self, path):
704
"""Returns the first inventory item that case-insensitively matches path.
706
If a path matches exactly, it is returned. If no path matches exactly
707
but more than one path matches case-insensitively, it is implementation
708
defined which is returned.
710
If no path matches case-insensitively, the input path is returned, but
711
with as many path entries that do exist changed to their canonical
714
If you need to resolve many names from the same tree, you should
715
use get_canonical_inventory_paths() to avoid O(N) behaviour.
717
:param path: A paths relative to the root of the tree.
718
:return: The input path adjusted to account for existing elements
719
that match case insensitively.
721
return self._yield_canonical_inventory_paths([path]).next()
723
def _yield_canonical_inventory_paths(self, paths):
725
# First, if the path as specified exists exactly, just use it.
726
if self.path2id(path) is not None:
247
for subp in self.extras():
248
if not self.is_ignored(subp):
253
"""Yield all unknown files in this WorkingTree.
255
If there are any unknown directories then only the directory is
256
returned, not all its children. But if there are unknown files
257
under a versioned subdirectory, they are returned.
259
Currently returned depth-first, sorted by name within directories.
261
## TODO: Work from given directory downwards
263
for path, dir_entry in self.inventory.directories():
264
mutter("search for unknowns in %r" % path)
265
dirabs = self.abspath(path)
266
if not isdir(dirabs):
267
# e.g. directory deleted
730
cur_id = self.get_root_id()
732
bit_iter = iter(path.split("/"))
736
for child in self.iter_children(cur_id):
738
# XXX: it seem like if the child is known to be in the
739
# tree, we shouldn't need to go from its id back to
740
# its path -- mbp 2010-02-11
742
# XXX: it seems like we could be more efficient
743
# by just directly looking up the original name and
744
# only then searching all children; also by not
745
# chopping paths so much. -- mbp 2010-02-11
746
child_base = os.path.basename(self.id2path(child))
747
if (child_base == elt):
748
# if we found an exact match, we can stop now; if
749
# we found an approximate match we need to keep
750
# searching because there might be an exact match
753
new_path = osutils.pathjoin(cur_path, child_base)
755
elif child_base.lower() == lelt:
757
new_path = osutils.pathjoin(cur_path, child_base)
758
except errors.NoSuchId:
759
# before a change is committed we can see this error...
271
for subf in os.listdir(dirabs):
273
and (subf not in dir_entry.children)):
278
subp = appendpath(path, subf)
282
def ignored_files(self):
283
"""Yield list of PATH, IGNORE_PATTERN"""
284
for subp in self.extras():
285
pat = self.is_ignored(subp)
290
def get_ignore_list(self):
291
"""Return list of ignore patterns.
293
Cached in the Tree object after the first call.
295
if hasattr(self, '_ignorelist'):
296
return self._ignorelist
298
l = bzrlib.DEFAULT_IGNORE[:]
299
if self.has_filename(bzrlib.IGNORE_FILENAME):
300
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
301
l.extend([line.rstrip("\n\r") for line in f.readlines()])
306
def is_ignored(self, filename):
307
r"""Check whether the filename matches an ignore pattern.
309
Patterns containing '/' or '\' need to match the whole path;
310
others match against only the last component.
312
If the file is ignored, returns the pattern which caused it to
313
be ignored, otherwise None. So this can simply be used as a
314
boolean if desired."""
316
# TODO: Use '**' to match directories, and other extended
317
# globbing stuff from cvs/rsync.
319
# XXX: fnmatch is actually not quite what we want: it's only
320
# approximately the same as real Unix fnmatch, and doesn't
321
# treat dotfiles correctly and allows * to match /.
322
# Eventually it should be replaced with something more
325
for pat in self.get_ignore_list():
326
if '/' in pat or '\\' in pat:
328
# as a special case, you can put ./ at the start of a
329
# pattern; this is good to match in the top-level
332
if (pat[:2] == './') or (pat[:2] == '.\\'):
764
# got to the end of this directory and no entries matched.
765
# Return what matched so far, plus the rest as specified.
766
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
771
def _get_inventory(self):
772
return self._inventory
774
inventory = property(_get_inventory,
775
doc="Inventory of this Tree")
777
def _unpack_file_id(self, file_id):
778
"""Find the inventory and inventory file id for a tree file id.
780
:param file_id: The tree file id, as bytestring or tuple
781
:return: Inventory and inventory file id
783
if isinstance(file_id, tuple):
784
if len(file_id) != 1:
785
raise ValueError("nested trees not yet supported: %r" % file_id)
787
return self.inventory, file_id
790
def path2id(self, path):
791
"""Return the id for path in this tree."""
792
return self._path2inv_file_id(path)[1]
794
def _path2inv_file_id(self, path):
795
"""Lookup a inventory and inventory file id by path.
797
:param path: Path to look up
798
:return: tuple with inventory and inventory file id
800
return self.inventory, self.inventory.path2id(path)
802
def id2path(self, file_id):
803
"""Return the path for a file id.
807
inventory, file_id = self._unpack_file_id(file_id)
808
return inventory.id2path(file_id)
810
def has_id(self, file_id):
811
inventory, file_id = self._unpack_file_id(file_id)
812
return inventory.has_id(file_id)
814
def has_or_had_id(self, file_id):
815
inventory, file_id = self._unpack_file_id(file_id)
816
return inventory.has_id(file_id)
818
def all_file_ids(self):
820
[entry.file_id for path, entry in self.iter_entries_by_dir()])
822
@deprecated_method(deprecated_in((2, 4, 0)))
824
return iter(self.all_file_ids())
826
def filter_unversioned_files(self, paths):
827
"""Filter out paths that are versioned.
829
:return: set of paths.
831
# NB: we specifically *don't* call self.has_filename, because for
832
# WorkingTrees that can indicate files that exist on disk but that
834
return set((p for p in paths if self.path2id(p) is None))
837
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
838
"""Walk the tree in 'by_dir' order.
840
This will yield each entry in the tree as a (path, entry) tuple.
841
The order that they are yielded is:
843
See Tree.iter_entries_by_dir for details.
845
:param yield_parents: If True, yield the parents from the root leading
846
down to specific_file_ids that have been requested. This has no
847
impact if specific_file_ids is None.
849
if specific_file_ids is None:
850
inventory_file_ids = None
852
inventory_file_ids = []
853
for tree_file_id in specific_file_ids:
854
inventory, inv_file_id = self._unpack_file_id(tree_file_id)
855
if not inventory is self.inventory: # for now
856
raise AssertionError("%r != %r" % (
857
inventory, self.inventory))
858
inventory_file_ids.append(inv_file_id)
859
return self.inventory.iter_entries_by_dir(
860
specific_file_ids=inventory_file_ids, yield_parents=yield_parents)
862
@deprecated_method(deprecated_in((2, 5, 0)))
863
def get_file_by_path(self, path):
864
return self.get_file(self.path2id(path), path)
867
def find_ids_across_trees(filenames, trees, require_versioned=True):
868
"""Find the ids corresponding to specified filenames.
870
All matches in all trees will be used, and all children of matched
871
directories will be used.
873
:param filenames: The filenames to find file_ids for (if None, returns
875
:param trees: The trees to find file_ids within
876
:param require_versioned: if true, all specified filenames must occur in
878
:return: a set of file ids for the specified filenames and their children.
336
if fnmatch.fnmatchcase(filename, newpat):
339
if fnmatch.fnmatchcase(splitpath(filename)[-1], pat):
882
specified_path_ids = _find_ids_across_trees(filenames, trees,
884
return _find_children_across_trees(specified_path_ids, trees)
887
def _find_ids_across_trees(filenames, trees, require_versioned):
888
"""Find the ids corresponding to specified filenames.
890
All matches in all trees will be used, but subdirectories are not scanned.
892
:param filenames: The filenames to find file_ids for
893
:param trees: The trees to find file_ids within
894
:param require_versioned: if true, all specified filenames must occur in
896
:return: a set of file ids for the specified filenames
899
interesting_ids = set()
900
for tree_path in filenames:
903
file_id = tree.path2id(tree_path)
904
if file_id is not None:
905
interesting_ids.add(file_id)
908
not_versioned.append(tree_path)
909
if len(not_versioned) > 0 and require_versioned:
910
raise errors.PathsNotVersionedError(not_versioned)
911
return interesting_ids
914
def _find_children_across_trees(specified_ids, trees):
915
"""Return a set including specified ids and their children.
917
All matches in all trees will be used.
919
:param trees: The trees to find file_ids within
920
:return: a set containing all specified ids and their children
922
interesting_ids = set(specified_ids)
923
pending = interesting_ids
924
# now handle children of interesting ids
925
# we loop so that we handle all children of each id in both trees
926
while len(pending) > 0:
928
for file_id in pending:
930
if not tree.has_or_had_id(file_id):
932
for child_id in tree.iter_children(file_id):
933
if child_id not in interesting_ids:
934
new_pending.add(child_id)
935
interesting_ids.update(new_pending)
936
pending = new_pending
937
return interesting_ids
940
class InterTree(InterObject):
941
"""This class represents operations taking place between two Trees.
943
Its instances have methods like 'compare' and contain references to the
944
source and target trees these operations are to be carried out on.
946
Clients of bzrlib should not need to use InterTree directly, rather they
947
should use the convenience methods on Tree such as 'Tree.compare()' which
948
will pass through to InterTree as appropriate.
951
# Formats that will be used to test this InterTree. If both are
952
# None, this InterTree will not be tested (e.g. because a complex
954
_matching_from_tree_format = None
955
_matching_to_tree_format = None
960
def is_compatible(kls, source, target):
961
# The default implementation is naive and uses the public API, so
962
# it works for all trees.
965
def _changes_from_entries(self, source_entry, target_entry,
966
source_path=None, target_path=None):
967
"""Generate a iter_changes tuple between source_entry and target_entry.
969
:param source_entry: An inventory entry from self.source, or None.
970
:param target_entry: An inventory entry from self.target, or None.
971
:param source_path: The path of source_entry, if known. If not known
972
it will be looked up.
973
:param target_path: The path of target_entry, if known. If not known
974
it will be looked up.
975
:return: A tuple, item 0 of which is an iter_changes result tuple, and
976
item 1 is True if there are any changes in the result tuple.
978
if source_entry is None:
979
if target_entry is None:
981
file_id = target_entry.file_id
983
file_id = source_entry.file_id
984
if source_entry is not None:
985
source_versioned = True
986
source_name = source_entry.name
987
source_parent = source_entry.parent_id
988
if source_path is None:
989
source_path = self.source.id2path(file_id)
990
source_kind, source_executable, source_stat = \
991
self.source._comparison_data(source_entry, source_path)
993
source_versioned = False
997
source_executable = None
998
if target_entry is not None:
999
target_versioned = True
1000
target_name = target_entry.name
1001
target_parent = target_entry.parent_id
1002
if target_path is None:
1003
target_path = self.target.id2path(file_id)
1004
target_kind, target_executable, target_stat = \
1005
self.target._comparison_data(target_entry, target_path)
1007
target_versioned = False
1009
target_parent = None
1011
target_executable = None
1012
versioned = (source_versioned, target_versioned)
1013
kind = (source_kind, target_kind)
1014
changed_content = False
1015
if source_kind != target_kind:
1016
changed_content = True
1017
elif source_kind == 'file':
1018
if not self.file_content_matches(file_id, file_id, source_path,
1019
target_path, source_stat, target_stat):
1020
changed_content = True
1021
elif source_kind == 'symlink':
1022
if (self.source.get_symlink_target(file_id) !=
1023
self.target.get_symlink_target(file_id)):
1024
changed_content = True
1025
# XXX: Yes, the indentation below is wrong. But fixing it broke
1026
# test_merge.TestMergerEntriesLCAOnDisk.
1027
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
1028
# the fix from bzr.dev -- vila 2009026
1029
elif source_kind == 'tree-reference':
1030
if (self.source.get_reference_revision(file_id, source_path)
1031
!= self.target.get_reference_revision(file_id, target_path)):
1032
changed_content = True
1033
parent = (source_parent, target_parent)
1034
name = (source_name, target_name)
1035
executable = (source_executable, target_executable)
1036
if (changed_content is not False or versioned[0] != versioned[1]
1037
or parent[0] != parent[1] or name[0] != name[1] or
1038
executable[0] != executable[1]):
1042
return (file_id, (source_path, target_path), changed_content,
1043
versioned, parent, name, kind, executable), changes
1046
def compare(self, want_unchanged=False, specific_files=None,
1047
extra_trees=None, require_versioned=False, include_root=False,
1048
want_unversioned=False):
1049
"""Return the changes from source to target.
1051
:return: A TreeDelta.
1052
:param specific_files: An optional list of file paths to restrict the
1053
comparison to. When mapping filenames to ids, all matches in all
1054
trees (including optional extra_trees) are used, and all children of
1055
matched directories are included.
1056
:param want_unchanged: An optional boolean requesting the inclusion of
1057
unchanged entries in the result.
1058
:param extra_trees: An optional list of additional trees to use when
1059
mapping the contents of specific_files (paths) to file_ids.
1060
:param require_versioned: An optional boolean (defaults to False). When
1061
supplied and True all the 'specific_files' must be versioned, or
1062
a PathsNotVersionedError will be thrown.
1063
:param want_unversioned: Scan for unversioned paths.
1065
trees = (self.source,)
1066
if extra_trees is not None:
1067
trees = trees + tuple(extra_trees)
1068
# target is usually the newer tree:
1069
specific_file_ids = self.target.paths2ids(specific_files, trees,
1070
require_versioned=require_versioned)
1071
if specific_files and not specific_file_ids:
1072
# All files are unversioned, so just return an empty delta
1073
# _compare_trees would think we want a complete delta
1074
result = delta.TreeDelta()
1075
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1076
result.unversioned = [(path, None,
1077
self.target._comparison_data(fake_entry, path)[0]) for path in
1080
return delta._compare_trees(self.source, self.target, want_unchanged,
1081
specific_files, include_root, extra_trees=extra_trees,
1082
require_versioned=require_versioned,
1083
want_unversioned=want_unversioned)
1085
def iter_changes(self, include_unchanged=False,
1086
specific_files=None, pb=None, extra_trees=[],
1087
require_versioned=True, want_unversioned=False):
1088
"""Generate an iterator of changes between trees.
1090
A tuple is returned:
1091
(file_id, (path_in_source, path_in_target),
1092
changed_content, versioned, parent, name, kind,
1095
Changed_content is True if the file's content has changed. This
1096
includes changes to its kind, and to a symlink's target.
1098
versioned, parent, name, kind, executable are tuples of (from, to).
1099
If a file is missing in a tree, its kind is None.
1101
Iteration is done in parent-to-child order, relative to the target
1104
There is no guarantee that all paths are in sorted order: the
1105
requirement to expand the search due to renames may result in children
1106
that should be found early being found late in the search, after
1107
lexically later results have been returned.
1108
:param require_versioned: Raise errors.PathsNotVersionedError if a
1109
path in the specific_files list is not versioned in one of
1110
source, target or extra_trees.
1111
:param specific_files: An optional list of file paths to restrict the
1112
comparison to. When mapping filenames to ids, all matches in all
1113
trees (including optional extra_trees) are used, and all children
1114
of matched directories are included. The parents in the target tree
1115
of the specific files up to and including the root of the tree are
1116
always evaluated for changes too.
1117
:param want_unversioned: Should unversioned files be returned in the
1118
output. An unversioned file is defined as one with (False, False)
1119
for the versioned pair.
1121
lookup_trees = [self.source]
1123
lookup_trees.extend(extra_trees)
1124
# The ids of items we need to examine to insure delta consistency.
1125
precise_file_ids = set()
1126
changed_file_ids = []
1127
if specific_files == []:
1128
specific_file_ids = []
1130
specific_file_ids = self.target.paths2ids(specific_files,
1131
lookup_trees, require_versioned=require_versioned)
1132
if specific_files is not None:
1133
# reparented or added entries must have their parents included
1134
# so that valid deltas can be created. The seen_parents set
1135
# tracks the parents that we need to have.
1136
# The seen_dirs set tracks directory entries we've yielded.
1137
# After outputting version object in to_entries we set difference
1138
# the two seen sets and start checking parents.
1139
seen_parents = set()
1141
if want_unversioned:
1142
all_unversioned = sorted([(p.split('/'), p) for p in
1143
self.target.extras()
1144
if specific_files is None or
1145
osutils.is_inside_any(specific_files, p)])
1146
all_unversioned = collections.deque(all_unversioned)
1148
all_unversioned = collections.deque()
1150
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1151
specific_file_ids=specific_file_ids))
1152
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1153
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1154
specific_file_ids=specific_file_ids))
1155
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1157
# the unversioned path lookup only occurs on real trees - where there
1158
# can be extras. So the fake_entry is solely used to look up
1159
# executable it values when execute is not supported.
1160
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1161
for target_path, target_entry in to_entries_by_dir:
1162
while (all_unversioned and
1163
all_unversioned[0][0] < target_path.split('/')):
1164
unversioned_path = all_unversioned.popleft()
1165
target_kind, target_executable, target_stat = \
1166
self.target._comparison_data(fake_entry, unversioned_path[1])
1167
yield (None, (None, unversioned_path[1]), True, (False, False),
1169
(None, unversioned_path[0][-1]),
1170
(None, target_kind),
1171
(None, target_executable))
1172
source_path, source_entry = from_data.get(target_entry.file_id,
1174
result, changes = self._changes_from_entries(source_entry,
1175
target_entry, source_path=source_path, target_path=target_path)
1176
to_paths[result[0]] = result[1][1]
1181
pb.update('comparing files', entry_count, num_entries)
1182
if changes or include_unchanged:
1183
if specific_file_ids is not None:
1184
new_parent_id = result[4][1]
1185
precise_file_ids.add(new_parent_id)
1186
changed_file_ids.append(result[0])
1188
# Ensure correct behaviour for reparented/added specific files.
1189
if specific_files is not None:
1190
# Record output dirs
1191
if result[6][1] == 'directory':
1192
seen_dirs.add(result[0])
1193
# Record parents of reparented/added entries.
1194
versioned = result[3]
1196
if not versioned[0] or parents[0] != parents[1]:
1197
seen_parents.add(parents[1])
1198
while all_unversioned:
1199
# yield any trailing unversioned paths
1200
unversioned_path = all_unversioned.popleft()
1201
to_kind, to_executable, to_stat = \
1202
self.target._comparison_data(fake_entry, unversioned_path[1])
1203
yield (None, (None, unversioned_path[1]), True, (False, False),
1205
(None, unversioned_path[0][-1]),
1207
(None, to_executable))
1208
# Yield all remaining source paths
1209
for path, from_entry in from_entries_by_dir:
1210
file_id = from_entry.file_id
1211
if file_id in to_paths:
1214
if not self.target.has_id(file_id):
1215
# common case - paths we have not emitted are not present in
1219
to_path = self.target.id2path(file_id)
1222
pb.update('comparing files', entry_count, num_entries)
1223
versioned = (True, False)
1224
parent = (from_entry.parent_id, None)
1225
name = (from_entry.name, None)
1226
from_kind, from_executable, stat_value = \
1227
self.source._comparison_data(from_entry, path)
1228
kind = (from_kind, None)
1229
executable = (from_executable, None)
1230
changed_content = from_kind is not None
1231
# the parent's path is necessarily known at this point.
1232
changed_file_ids.append(file_id)
1233
yield(file_id, (path, to_path), changed_content, versioned, parent,
1234
name, kind, executable)
1235
changed_file_ids = set(changed_file_ids)
1236
if specific_file_ids is not None:
1237
for result in self._handle_precise_ids(precise_file_ids,
1241
def _get_entry(self, tree, file_id):
1242
"""Get an inventory entry from a tree, with missing entries as None.
1244
If the tree raises NotImplementedError on accessing .inventory, then
1245
this is worked around using iter_entries_by_dir on just the file id
1248
:param tree: The tree to lookup the entry in.
1249
:param file_id: The file_id to lookup.
1252
inventory = tree.inventory
1253
except NotImplementedError:
1254
# No inventory available.
1256
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1257
return iterator.next()[1]
1258
except StopIteration:
1262
return inventory[file_id]
1263
except errors.NoSuchId:
1266
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1267
discarded_changes=None):
1268
"""Fill out a partial iter_changes to be consistent.
1270
:param precise_file_ids: The file ids of parents that were seen during
1272
:param changed_file_ids: The file ids of already emitted items.
1273
:param discarded_changes: An optional dict of precalculated
1274
iter_changes items which the partial iter_changes had not output
1276
:return: A generator of iter_changes items to output.
1278
# process parents of things that had changed under the users
1279
# requested paths to prevent incorrect paths or parent ids which
1280
# aren't in the tree.
1281
while precise_file_ids:
1282
precise_file_ids.discard(None)
1283
# Don't emit file_ids twice
1284
precise_file_ids.difference_update(changed_file_ids)
1285
if not precise_file_ids:
1287
# If the there was something at a given output path in source, we
1288
# have to include the entry from source in the delta, or we would
1289
# be putting this entry into a used path.
1291
for parent_id in precise_file_ids:
1293
paths.append(self.target.id2path(parent_id))
1294
except errors.NoSuchId:
1295
# This id has been dragged in from the source by delta
1296
# expansion and isn't present in target at all: we don't
1297
# need to check for path collisions on it.
1300
old_id = self.source.path2id(path)
1301
precise_file_ids.add(old_id)
1302
precise_file_ids.discard(None)
1303
current_ids = precise_file_ids
1304
precise_file_ids = set()
1305
# We have to emit all of precise_file_ids that have been altered.
1306
# We may have to output the children of some of those ids if any
1307
# directories have stopped being directories.
1308
for file_id in current_ids:
1310
if discarded_changes:
1311
result = discarded_changes.get(file_id)
1316
old_entry = self._get_entry(self.source, file_id)
1317
new_entry = self._get_entry(self.target, file_id)
1318
result, changes = self._changes_from_entries(
1319
old_entry, new_entry)
1322
# Get this parents parent to examine.
1323
new_parent_id = result[4][1]
1324
precise_file_ids.add(new_parent_id)
1326
if (result[6][0] == 'directory' and
1327
result[6][1] != 'directory'):
1328
# This stopped being a directory, the old children have
1330
if old_entry is None:
1331
# Reusing a discarded change.
1332
old_entry = self._get_entry(self.source, file_id)
1333
for child in old_entry.children.values():
1334
precise_file_ids.add(child.file_id)
1335
changed_file_ids.add(result[0])
1339
def file_content_matches(self, source_file_id, target_file_id,
1340
source_path=None, target_path=None, source_stat=None, target_stat=None):
1341
"""Check if two files are the same in the source and target trees.
1343
This only checks that the contents of the files are the same,
1344
it does not touch anything else.
1346
:param source_file_id: File id of the file in the source tree
1347
:param target_file_id: File id of the file in the target tree
1348
:param source_path: Path of the file in the source tree
1349
:param target_path: Path of the file in the target tree
1350
:param source_stat: Optional stat value of the file in the source tree
1351
:param target_stat: Optional stat value of the file in the target tree
1352
:return: Boolean indicating whether the files have the same contents
1354
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1355
source_file_id, source_path, source_stat)
1356
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1357
target_file_id, target_path, target_stat)
1358
if source_verifier_kind == target_verifier_kind:
1359
return (source_verifier_data == target_verifier_data)
1360
# Fall back to SHA1 for now
1361
if source_verifier_kind != "SHA1":
1362
source_sha1 = self.source.get_file_sha1(source_file_id,
1363
source_path, source_stat)
1365
source_sha1 = source_verifier_data
1366
if target_verifier_kind != "SHA1":
1367
target_sha1 = self.target.get_file_sha1(target_file_id,
1368
target_path, target_stat)
1370
target_sha1 = target_verifier_data
1371
return (source_sha1 == target_sha1)
1373
InterTree.register_optimiser(InterTree)
1376
class MultiWalker(object):
1377
"""Walk multiple trees simultaneously, getting combined results."""
1379
# Note: This could be written to not assume you can do out-of-order
1380
# lookups. Instead any nodes that don't match in all trees could be
1381
# marked as 'deferred', and then returned in the final cleanup loop.
1382
# For now, I think it is "nicer" to return things as close to the
1383
# "master_tree" order as we can.
1385
def __init__(self, master_tree, other_trees):
1386
"""Create a new MultiWalker.
1388
All trees being walked must implement "iter_entries_by_dir()", such
1389
that they yield (path, object) tuples, where that object will have a
1390
'.file_id' member, that can be used to check equality.
1392
:param master_tree: All trees will be 'slaved' to the master_tree such
1393
that nodes in master_tree will be used as 'first-pass' sync points.
1394
Any nodes that aren't in master_tree will be merged in a second
1396
:param other_trees: A list of other trees to walk simultaneously.
1398
self._master_tree = master_tree
1399
self._other_trees = other_trees
1401
# Keep track of any nodes that were properly processed just out of
1402
# order, that way we don't return them at the end, we don't have to
1403
# track *all* processed file_ids, just the out-of-order ones
1404
self._out_of_order_processed = set()
1407
def _step_one(iterator):
1408
"""Step an iter_entries_by_dir iterator.
1410
:return: (has_more, path, ie)
1411
If has_more is False, path and ie will be None.
1414
path, ie = iterator.next()
1415
except StopIteration:
1416
return False, None, None
1418
return True, path, ie
1421
def _cmp_path_by_dirblock(path1, path2):
1422
"""Compare two paths based on what directory they are in.
1424
This generates a sort order, such that all children of a directory are
1425
sorted together, and grandchildren are in the same order as the
1426
children appear. But all grandchildren come after all children.
1428
:param path1: first path
1429
:param path2: the second path
1430
:return: negative number if ``path1`` comes first,
1431
0 if paths are equal
1432
and a positive number if ``path2`` sorts first
1434
# Shortcut this special case
1437
# This is stolen from _dirstate_helpers_py.py, only switching it to
1438
# Unicode objects. Consider using encode_utf8() and then using the
1439
# optimized versions, or maybe writing optimized unicode versions.
1440
if not isinstance(path1, unicode):
1441
raise TypeError("'path1' must be a unicode string, not %s: %r"
1442
% (type(path1), path1))
1443
if not isinstance(path2, unicode):
1444
raise TypeError("'path2' must be a unicode string, not %s: %r"
1445
% (type(path2), path2))
1446
return cmp(MultiWalker._path_to_key(path1),
1447
MultiWalker._path_to_key(path2))
1450
def _path_to_key(path):
1451
dirname, basename = osutils.split(path)
1452
return (dirname.split(u'/'), basename)
1454
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1455
"""Lookup an inventory entry by file_id.
1457
This is called when an entry is missing in the normal order.
1458
Generally this is because a file was either renamed, or it was
1459
deleted/added. If the entry was found in the inventory and not in
1460
extra_entries, it will be added to self._out_of_order_processed
1462
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1463
should be filled with entries that were found before they were
1464
used. If file_id is present, it will be removed from the
1466
:param other_tree: The Tree to search, in case we didn't find the entry
1468
:param file_id: The file_id to look for
1469
:return: (path, ie) if found or (None, None) if not present.
1471
if file_id in extra_entries:
1472
return extra_entries.pop(file_id)
1473
# TODO: Is id2path better as the first call, or is
1474
# inventory[file_id] better as a first check?
1476
cur_path = other_tree.id2path(file_id)
1477
except errors.NoSuchId:
1479
if cur_path is None:
1482
self._out_of_order_processed.add(file_id)
1483
cur_ie = other_tree.inventory[file_id]
1484
return (cur_path, cur_ie)
1487
"""Match up the values in the different trees."""
1488
for result in self._walk_master_tree():
1490
self._finish_others()
1491
for result in self._walk_others():
1494
def _walk_master_tree(self):
1495
"""First pass, walk all trees in lock-step.
1497
When we are done, all nodes in the master_tree will have been
1498
processed. _other_walkers, _other_entries, and _others_extra will be
1499
set on 'self' for future processing.
1501
# This iterator has the most "inlining" done, because it tends to touch
1502
# every file in the tree, while the others only hit nodes that don't
1504
master_iterator = self._master_tree.iter_entries_by_dir()
1506
other_walkers = [other.iter_entries_by_dir()
1507
for other in self._other_trees]
1508
other_entries = [self._step_one(walker) for walker in other_walkers]
1509
# Track extra nodes in the other trees
1510
others_extra = [{} for i in xrange(len(self._other_trees))]
1512
master_has_more = True
1513
step_one = self._step_one
1514
lookup_by_file_id = self._lookup_by_file_id
1515
out_of_order_processed = self._out_of_order_processed
1517
while master_has_more:
1518
(master_has_more, path, master_ie) = step_one(master_iterator)
1519
if not master_has_more:
1522
file_id = master_ie.file_id
1524
other_values_append = other_values.append
1525
next_other_entries = []
1526
next_other_entries_append = next_other_entries.append
1527
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1528
if not other_has_more:
1529
other_values_append(lookup_by_file_id(
1530
others_extra[idx], self._other_trees[idx], file_id))
1531
next_other_entries_append((False, None, None))
1532
elif file_id == other_ie.file_id:
1533
# This is the critical code path, as most of the entries
1534
# should match between most trees.
1535
other_values_append((other_path, other_ie))
1536
next_other_entries_append(step_one(other_walkers[idx]))
1538
# This walker did not match, step it until it either
1539
# matches, or we know we are past the current walker.
1540
other_walker = other_walkers[idx]
1541
other_extra = others_extra[idx]
1542
while (other_has_more and
1543
self._cmp_path_by_dirblock(other_path, path) < 0):
1544
other_file_id = other_ie.file_id
1545
if other_file_id not in out_of_order_processed:
1546
other_extra[other_file_id] = (other_path, other_ie)
1547
other_has_more, other_path, other_ie = \
1548
step_one(other_walker)
1549
if other_has_more and other_ie.file_id == file_id:
1550
# We ended up walking to this point, match and step
1552
other_values_append((other_path, other_ie))
1553
other_has_more, other_path, other_ie = \
1554
step_one(other_walker)
1556
# This record isn't in the normal order, see if it
1558
other_values_append(lookup_by_file_id(
1559
other_extra, self._other_trees[idx], file_id))
1560
next_other_entries_append((other_has_more, other_path,
1562
other_entries = next_other_entries
1564
# We've matched all the walkers, yield this datapoint
1565
yield path, file_id, master_ie, other_values
1566
self._other_walkers = other_walkers
1567
self._other_entries = other_entries
1568
self._others_extra = others_extra
1570
def _finish_others(self):
1571
"""Finish walking the other iterators, so we get all entries."""
1572
for idx, info in enumerate(self._other_entries):
1573
other_extra = self._others_extra[idx]
1574
(other_has_more, other_path, other_ie) = info
1575
while other_has_more:
1576
other_file_id = other_ie.file_id
1577
if other_file_id not in self._out_of_order_processed:
1578
other_extra[other_file_id] = (other_path, other_ie)
1579
other_has_more, other_path, other_ie = \
1580
self._step_one(self._other_walkers[idx])
1581
del self._other_entries
1583
def _walk_others(self):
1584
"""Finish up by walking all the 'deferred' nodes."""
1585
# TODO: One alternative would be to grab all possible unprocessed
1586
# file_ids, and then sort by path, and then yield them. That
1587
# might ensure better ordering, in case a caller strictly
1588
# requires parents before children.
1589
for idx, other_extra in enumerate(self._others_extra):
1590
others = sorted(other_extra.itervalues(),
1591
key=lambda x: self._path_to_key(x[0]))
1592
for other_path, other_ie in others:
1593
file_id = other_ie.file_id
1594
# We don't need to check out_of_order_processed here, because
1595
# the lookup_by_file_id will be removing anything processed
1596
# from the extras cache
1597
other_extra.pop(file_id)
1598
other_values = [(None, None) for i in xrange(idx)]
1599
other_values.append((other_path, other_ie))
1600
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1601
alt_idx = alt_idx + idx + 1
1602
alt_extra = self._others_extra[alt_idx]
1603
alt_tree = self._other_trees[alt_idx]
1604
other_values.append(self._lookup_by_file_id(
1605
alt_extra, alt_tree, file_id))
1606
yield other_path, file_id, None, other_values
347
class RevisionTree(Tree):
348
"""Tree viewing a previous revision.
350
File text can be retrieved from the text store.
352
TODO: Some kind of `__repr__` method, but a good one
353
probably means knowing the branch and revision number,
354
or at least passing a description to the constructor.
357
def __init__(self, store, inv):
359
self._inventory = inv
361
def get_file(self, file_id):
362
ie = self._inventory[file_id]
363
f = self._store[ie.text_id]
364
mutter(" get fileid{%s} from %r" % (file_id, self))
365
self._check_retrieved(ie, f)
368
def get_file_size(self, file_id):
369
return self._inventory[file_id].text_size
371
def get_file_sha1(self, file_id):
372
ie = self._inventory[file_id]
375
def has_filename(self, filename):
376
return bool(self.inventory.path2id(filename))
378
def list_files(self):
379
# The only files returned by this are those from the version
380
for path, entry in self.inventory.iter_entries():
381
yield path, 'V', entry.kind, entry.file_id
384
class EmptyTree(Tree):
386
self._inventory = Inventory()
388
def has_filename(self, filename):
391
def list_files(self):
392
if False: # just to make it a generator
397
######################################################################
400
# TODO: Merge these two functions into a single one that can operate
401
# on either a whole tree or a set of files.
403
# TODO: Return the diff in order by filename, not by category or in
404
# random order. Can probably be done by lock-stepping through the
405
# filenames from both trees.
408
def file_status(filename, old_tree, new_tree):
409
"""Return single-letter status, old and new names for a file.
411
The complexity here is in deciding how to represent renames;
412
many complex cases are possible.
414
old_inv = old_tree.inventory
415
new_inv = new_tree.inventory
416
new_id = new_inv.path2id(filename)
417
old_id = old_inv.path2id(filename)
419
if not new_id and not old_id:
420
# easy: doesn't exist in either; not versioned at all
421
if new_tree.is_ignored(filename):
422
return 'I', None, None
424
return '?', None, None
426
# There is now a file of this name, great.
429
# There is no longer a file of this name, but we can describe
430
# what happened to the file that used to have
431
# this name. There are two possibilities: either it was
432
# deleted entirely, or renamed.
434
if new_inv.has_id(old_id):
435
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
437
return 'D', old_inv.id2path(old_id), None
439
# if the file_id is new in this revision, it is added
440
if new_id and not old_inv.has_id(new_id):
443
# if there used to be a file of this name, but that ID has now
444
# disappeared, it is deleted
445
if old_id and not new_inv.has_id(old_id):
452
def find_renames(old_inv, new_inv):
453
for file_id in old_inv:
454
if file_id not in new_inv:
456
old_name = old_inv.id2path(file_id)
457
new_name = new_inv.id2path(file_id)
458
if old_name != new_name:
459
yield (old_name, new_name)