62
53
Trees can be compared, etc, regardless of whether they are working
63
54
trees or versioned trees.
66
def changes_from(self, other, want_unchanged=False, specific_files=None,
67
extra_trees=None, require_versioned=False, include_root=False,
68
want_unversioned=False):
69
"""Return a TreeDelta of the changes from other to this tree.
71
:param other: A tree to compare with.
72
:param specific_files: An optional list of file paths to restrict the
73
comparison to. When mapping filenames to ids, all matches in all
74
trees (including optional extra_trees) are used, and all children of
75
matched directories are included.
76
:param want_unchanged: An optional boolean requesting the inclusion of
77
unchanged entries in the result.
78
:param extra_trees: An optional list of additional trees to use when
79
mapping the contents of specific_files (paths) to file_ids.
80
:param require_versioned: An optional boolean (defaults to False). When
81
supplied and True all the 'specific_files' must be versioned, or
82
a PathsNotVersionedError will be thrown.
83
:param want_unversioned: Scan for unversioned paths.
85
The comparison will be performed by an InterTree object looked up on
88
# Martin observes that Tree.changes_from returns a TreeDelta and this
89
# may confuse people, because the class name of the returned object is
90
# a synonym of the object referenced in the method name.
91
return InterTree.get(other, self).compare(
92
want_unchanged=want_unchanged,
93
specific_files=specific_files,
94
extra_trees=extra_trees,
95
require_versioned=require_versioned,
96
include_root=include_root,
97
want_unversioned=want_unversioned,
100
def iter_changes(self, from_tree, include_unchanged=False,
101
specific_files=None, pb=None, extra_trees=None,
102
require_versioned=True, want_unversioned=False):
103
intertree = InterTree.get(from_tree, self)
104
return intertree.iter_changes(include_unchanged, specific_files, pb,
105
extra_trees, require_versioned, want_unversioned=want_unversioned)
108
"""Get a list of the conflicts in the tree.
110
Each conflict is an instance of bzrlib.conflicts.Conflict.
112
return _mod_conflicts.ConflictList()
115
"""For trees that can have unversioned files, return all such paths."""
118
def get_parent_ids(self):
119
"""Get the parent ids for this tree.
121
:return: a list of parent ids. [] is returned to indicate
122
a tree with no parents.
123
:raises: BzrError if the parents are not known.
125
raise NotImplementedError(self.get_parent_ids)
127
57
def has_filename(self, filename):
128
58
"""True if the tree has given filename."""
129
raise NotImplementedError(self.has_filename)
59
raise NotImplementedError()
131
61
def has_id(self, file_id):
132
62
return self.inventory.has_id(file_id)
134
def __contains__(self, file_id):
135
return self.has_id(file_id)
137
def has_or_had_id(self, file_id):
138
if file_id == self.inventory.root.file_id:
140
return self.inventory.has_id(file_id)
142
def is_ignored(self, filename):
143
"""Check whether the filename is ignored by this tree.
145
:param filename: The relative filename within the tree.
146
:return: True if the filename is ignored.
67
"""Return set of all ids in this tree."""
68
return self.inventory.id_set()
150
70
def __iter__(self):
151
71
return iter(self.inventory)
153
def all_file_ids(self):
154
"""Iterate through all file ids, including ids for missing files."""
155
return set(self.inventory)
157
73
def id2path(self, file_id):
158
"""Return the path for a file id.
162
74
return self.inventory.id2path(file_id)
164
def is_control_filename(self, filename):
165
"""True if filename is the name of a control file in this tree.
167
:param filename: A filename within the tree. This is a relative path
168
from the root of this tree.
170
This is true IF and ONLY IF the filename is part of the meta data
171
that bzr controls in this tree. I.E. a random .bzr directory placed
172
on disk will not be a control file for this tree.
174
return self.bzrdir.is_control_filename(filename)
177
def iter_entries_by_dir(self, specific_file_ids=None):
178
"""Walk the tree in 'by_dir' order.
180
This will yield each entry in the tree as a (path, entry) tuple.
181
The order that they are yielded is:
183
Directories are walked in a depth-first lexicographical order,
184
however, whenever a directory is reached, all of its direct child
185
nodes are yielded in lexicographical order before yielding the
188
For example, in the tree::
198
The yield order (ignoring root) would be::
199
a, f, a/b, a/d, a/b/c, a/d/e, f/g
201
return self.inventory.iter_entries_by_dir(
202
specific_file_ids=specific_file_ids)
204
def iter_references(self):
205
for path, entry in self.iter_entries_by_dir():
206
if entry.kind == 'tree-reference':
207
yield path, entry.file_id
209
def kind(self, file_id):
210
raise NotImplementedError("Tree subclass %s must implement kind"
211
% self.__class__.__name__)
213
def stored_kind(self, file_id):
214
"""File kind stored for this file_id.
216
May not match kind on disk for working trees. Always available
217
for versioned files, even when the file itself is missing.
219
return self.kind(file_id)
221
def path_content_summary(self, path):
222
"""Get a summary of the information about path.
224
:param path: A relative path within the tree.
225
:return: A tuple containing kind, size, exec, sha1-or-link.
226
Kind is always present (see tree.kind()).
227
size is present if kind is file, None otherwise.
228
exec is None unless kind is file and the platform supports the 'x'
230
sha1-or-link is the link target if kind is symlink, or the sha1 if
231
it can be obtained without reading the file.
233
raise NotImplementedError(self.path_content_summary)
235
def get_reference_revision(self, file_id, path=None):
236
raise NotImplementedError("Tree subclass %s must implement "
237
"get_reference_revision"
238
% self.__class__.__name__)
240
def _comparison_data(self, entry, path):
241
"""Return a tuple of kind, executable, stat_value for a file.
243
entry may be None if there is no inventory entry for the file, but
244
path must always be supplied.
246
kind is None if there is no file present (even if an inventory id is
247
present). executable is False for non-file entries.
249
raise NotImplementedError(self._comparison_data)
251
def _file_size(self, entry, stat_value):
252
raise NotImplementedError(self._file_size)
254
76
def _get_inventory(self):
255
77
return self._inventory
257
def get_file(self, file_id, path=None):
258
"""Return a file object for the file file_id in the tree.
260
If both file_id and path are defined, it is implementation defined as
261
to which one is used.
263
raise NotImplementedError(self.get_file)
265
def get_file_text(self, file_id, path=None):
266
"""Return the byte content of a file.
268
:param file_id: The file_id of the file.
269
:param path: The path of the file.
270
If both file_id and path are supplied, an implementation may use
273
my_file = self.get_file(file_id, path)
275
return my_file.read()
279
def get_file_lines(self, file_id, path=None):
280
"""Return the content of a file, as lines.
282
:param file_id: The file_id of the file.
283
:param path: The path of the file.
284
If both file_id and path are supplied, an implementation may use
287
return osutils.split_lines(self.get_file_text(file_id, path))
289
def get_file_mtime(self, file_id, path=None):
290
"""Return the modification time for a file.
292
:param file_id: The handle for this file.
293
:param path: The path that this file can be found at.
294
These must point to the same object.
296
raise NotImplementedError(self.get_file_mtime)
298
def get_file_size(self, file_id):
299
"""Return the size of a file in bytes.
301
This applies only to regular files. If invoked on directories or
302
symlinks, it will return None.
303
:param file_id: The file-id of the file
305
raise NotImplementedError(self.get_file_size)
307
def get_file_by_path(self, path):
308
return self.get_file(self._inventory.path2id(path), path)
310
def iter_files_bytes(self, desired_files):
311
"""Iterate through file contents.
313
Files will not necessarily be returned in the order they occur in
314
desired_files. No specific order is guaranteed.
316
Yields pairs of identifier, bytes_iterator. identifier is an opaque
317
value supplied by the caller as part of desired_files. It should
318
uniquely identify the file version in the caller's context. (Examples:
319
an index number or a TreeTransform trans_id.)
321
bytes_iterator is an iterable of bytestrings for the file. The
322
kind of iterable and length of the bytestrings are unspecified, but for
323
this implementation, it is a tuple containing a single bytestring with
324
the complete text of the file.
326
:param desired_files: a list of (file_id, identifier) pairs
328
for file_id, identifier in desired_files:
329
# We wrap the string in a tuple so that we can return an iterable
330
# of bytestrings. (Technically, a bytestring is also an iterable
331
# of bytestrings, but iterating through each character is not
333
cur_file = (self.get_file_text(file_id),)
334
yield identifier, cur_file
336
def get_symlink_target(self, file_id):
337
"""Get the target for a given file_id.
339
It is assumed that the caller already knows that file_id is referencing
341
:param file_id: Handle for the symlink entry.
342
:return: The path the symlink points to.
344
raise NotImplementedError(self.get_symlink_target)
346
def get_canonical_inventory_paths(self, paths):
347
"""Like get_canonical_inventory_path() but works on multiple items.
349
:param paths: A sequence of paths relative to the root of the tree.
350
:return: A list of paths, with each item the corresponding input path
351
adjusted to account for existing elements that match case
354
return list(self._yield_canonical_inventory_paths(paths))
356
def get_canonical_inventory_path(self, path):
357
"""Returns the first inventory item that case-insensitively matches path.
359
If a path matches exactly, it is returned. If no path matches exactly
360
but more than one path matches case-insensitively, it is implementation
361
defined which is returned.
363
If no path matches case-insensitively, the input path is returned, but
364
with as many path entries that do exist changed to their canonical
367
If you need to resolve many names from the same tree, you should
368
use get_canonical_inventory_paths() to avoid O(N) behaviour.
370
:param path: A paths relative to the root of the tree.
371
:return: The input path adjusted to account for existing elements
372
that match case insensitively.
374
return self._yield_canonical_inventory_paths([path]).next()
376
def _yield_canonical_inventory_paths(self, paths):
378
# First, if the path as specified exists exactly, just use it.
379
if self.path2id(path) is not None:
383
cur_id = self.get_root_id()
385
bit_iter = iter(path.split("/"))
388
for child in self.iter_children(cur_id):
390
child_base = os.path.basename(self.id2path(child))
391
if child_base.lower() == lelt:
393
cur_path = osutils.pathjoin(cur_path, child_base)
396
# before a change is committed we can see this error...
399
# got to the end of this directory and no entries matched.
400
# Return what matched so far, plus the rest as specified.
401
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
406
def get_root_id(self):
407
"""Return the file_id for the root of this tree."""
408
raise NotImplementedError(self.get_root_id)
410
def annotate_iter(self, file_id,
411
default_revision=_mod_revision.CURRENT_REVISION):
412
"""Return an iterator of revision_id, line tuples.
414
For working trees (and mutable trees in general), the special
415
revision_id 'current:' will be used for lines that are new in this
416
tree, e.g. uncommitted changes.
417
:param file_id: The file to produce an annotated version from
418
:param default_revision: For lines that don't match a basis, mark them
419
with this revision id. Not all implementations will make use of
422
raise NotImplementedError(self.annotate_iter)
424
def _get_plan_merge_data(self, file_id, other, base):
425
from bzrlib import merge, versionedfile
426
vf = versionedfile._PlanMergeVersionedFile(file_id)
427
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
428
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
430
last_revision_base = None
432
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
433
return vf, last_revision_a, last_revision_b, last_revision_base
435
def plan_file_merge(self, file_id, other, base=None):
436
"""Generate a merge plan based on annotations.
438
If the file contains uncommitted changes in this tree, they will be
439
attributed to the 'current:' pseudo-revision. If the file contains
440
uncommitted changes in the other tree, they will be assigned to the
441
'other:' pseudo-revision.
443
data = self._get_plan_merge_data(file_id, other, base)
444
vf, last_revision_a, last_revision_b, last_revision_base = data
445
return vf.plan_merge(last_revision_a, last_revision_b,
448
def plan_file_lca_merge(self, file_id, other, base=None):
449
"""Generate a merge plan based lca-newness.
451
If the file contains uncommitted changes in this tree, they will be
452
attributed to the 'current:' pseudo-revision. If the file contains
453
uncommitted changes in the other tree, they will be assigned to the
454
'other:' pseudo-revision.
456
data = self._get_plan_merge_data(file_id, other, base)
457
vf, last_revision_a, last_revision_b, last_revision_base = data
458
return vf.plan_lca_merge(last_revision_a, last_revision_b,
461
def _iter_parent_trees(self):
462
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
463
for revision_id in self.get_parent_ids():
465
yield self.revision_tree(revision_id)
466
except errors.NoSuchRevisionInTree:
467
yield self.repository.revision_tree(revision_id)
470
def _file_revision(revision_tree, file_id):
471
"""Determine the revision associated with a file in a given tree."""
472
revision_tree.lock_read()
474
return revision_tree.inventory[file_id].revision
476
revision_tree.unlock()
478
def _get_file_revision(self, file_id, vf, tree_revision):
479
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
481
if getattr(self, '_repository', None) is None:
482
last_revision = tree_revision
483
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
484
self._iter_parent_trees()]
485
vf.add_lines((file_id, last_revision), parent_keys,
486
self.get_file(file_id).readlines())
487
repo = self.branch.repository
490
last_revision = self._file_revision(self, file_id)
491
base_vf = self._repository.texts
492
if base_vf not in vf.fallback_versionedfiles:
493
vf.fallback_versionedfiles.append(base_vf)
496
79
inventory = property(_get_inventory,
497
80
doc="Inventory of this Tree")
499
82
def _check_retrieved(self, ie, f):
502
83
fp = fingerprint_file(f)
505
if ie.text_size is not None:
86
if ie.text_size != None:
506
87
if ie.text_size != fp['size']:
507
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
88
bailout("mismatched size for file %r in %r" % (ie.file_id, self._store),
508
89
["inventory expects %d bytes" % ie.text_size,
509
90
"file is actually %d bytes" % fp['size'],
510
91
"store is probably damaged/corrupt"])
512
93
if ie.text_sha1 != fp['sha1']:
513
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
94
bailout("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
514
95
["inventory expects %s" % ie.text_sha1,
515
96
"file is actually %s" % fp['sha1'],
516
97
"store is probably damaged/corrupt"])
519
def path2id(self, path):
520
"""Return the id for path in this tree."""
521
return self._inventory.path2id(path)
523
def paths2ids(self, paths, trees=[], require_versioned=True):
524
"""Return all the ids that can be reached by walking from paths.
526
Each path is looked up in this tree and any extras provided in
527
trees, and this is repeated recursively: the children in an extra tree
528
of a directory that has been renamed under a provided path in this tree
529
are all returned, even if none exist under a provided path in this
530
tree, and vice versa.
532
:param paths: An iterable of paths to start converting to ids from.
533
Alternatively, if paths is None, no ids should be calculated and None
534
will be returned. This is offered to make calling the api unconditional
535
for code that *might* take a list of files.
536
:param trees: Additional trees to consider.
537
:param require_versioned: If False, do not raise NotVersionedError if
538
an element of paths is not versioned in this tree and all of trees.
540
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
542
def iter_children(self, file_id):
543
entry = self.iter_entries_by_dir([file_id]).next()[1]
544
for child in getattr(entry, 'children', {}).itervalues():
550
def revision_tree(self, revision_id):
551
"""Obtain a revision tree for the revision revision_id.
553
The intention of this method is to allow access to possibly cached
554
tree data. Implementors of this method should raise NoSuchRevision if
555
the tree is not locally available, even if they could obtain the
556
tree via a repository or some other means. Callers are responsible
557
for finding the ultimate source for a revision tree.
559
:param revision_id: The revision_id of the requested tree.
561
:raises: NoSuchRevision if the tree cannot be obtained.
563
raise errors.NoSuchRevisionInTree(self, revision_id)
566
"""What files are present in this tree and unknown.
568
:return: an iterator over the unknown files.
575
def filter_unversioned_files(self, paths):
576
"""Filter out paths that are versioned.
578
:return: set of paths.
580
# NB: we specifically *don't* call self.has_filename, because for
581
# WorkingTrees that can indicate files that exist on disk but that
583
pred = self.inventory.has_filename
584
return set((p for p in paths if not pred(p)))
586
def walkdirs(self, prefix=""):
587
"""Walk the contents of this tree from path down.
589
This yields all the data about the contents of a directory at a time.
590
After each directory has been yielded, if the caller has mutated the
591
list to exclude some directories, they are then not descended into.
593
The data yielded is of the form:
594
((directory-relpath, directory-path-from-root, directory-fileid),
595
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
596
versioned_kind), ...]),
597
- directory-relpath is the containing dirs relpath from prefix
598
- directory-path-from-root is the containing dirs path from /
599
- directory-fileid is the id of the directory if it is versioned.
600
- relpath is the relative path within the subtree being walked.
601
- basename is the basename
602
- kind is the kind of the file now. If unknonwn then the file is not
603
present within the tree - but it may be recorded as versioned. See
605
- lstat is the stat data *if* the file was statted.
606
- path_from_tree_root is the path from the root of the tree.
607
- file_id is the file_id if the entry is versioned.
608
- versioned_kind is the kind of the file as last recorded in the
609
versioning system. If 'unknown' the file is not versioned.
610
One of 'kind' and 'versioned_kind' must not be 'unknown'.
612
:param prefix: Start walking from prefix within the tree rather than
613
at the root. This allows one to walk a subtree but get paths that are
614
relative to a tree rooted higher up.
615
:return: an iterator over the directory data.
617
raise NotImplementedError(self.walkdirs)
619
def supports_content_filtering(self):
100
def print_file(self, fileid):
101
"""Print file with id `fileid` to stdout."""
103
pumpfile(self.get_file(fileid), sys.stdout)
106
def export(self, dest):
107
"""Export this tree to a new directory.
109
`dest` should not exist, and will be created holding the
110
contents of this tree.
112
TODO: To handle subdirectories we need to create the
115
:note: If the export fails, the destination directory will be
116
left in a half-assed state.
119
mutter('export version %r' % self)
121
for dp, ie in inv.iter_entries():
123
fullpath = appendpath(dest, dp)
124
if kind == 'directory':
127
pumpfile(self.get_file(ie.file_id), file(fullpath, 'wb'))
129
bailout("don't know how to export {%s} of kind %r" % (ie.file_id, kind))
130
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
134
class RevisionTree(Tree):
135
"""Tree viewing a previous revision.
137
File text can be retrieved from the text store.
139
TODO: Some kind of `__repr__` method, but a good one
140
probably means knowing the branch and revision number,
141
or at least passing a description to the constructor.
144
def __init__(self, store, inv):
146
self._inventory = inv
148
def get_file(self, file_id):
149
ie = self._inventory[file_id]
150
f = self._store[ie.text_id]
151
mutter(" get fileid{%s} from %r" % (file_id, self))
152
self._check_retrieved(ie, f)
155
def get_file_size(self, file_id):
156
return self._inventory[file_id].text_size
158
def get_file_sha1(self, file_id):
159
ie = self._inventory[file_id]
162
def has_filename(self, filename):
163
return bool(self.inventory.path2id(filename))
165
def list_files(self):
166
# The only files returned by this are those from the version
167
for path, entry in self.inventory.iter_entries():
168
yield path, 'V', entry.kind, entry.file_id
171
class EmptyTree(Tree):
173
self._inventory = Inventory()
175
def has_filename(self, filename):
622
def _content_filter_stack(self, path=None, file_id=None):
623
"""The stack of content filters for a path if filtering is supported.
625
Readers will be applied in first-to-last order.
626
Writers will be applied in last-to-first order.
627
Either the path or the file-id needs to be provided.
629
:param path: path relative to the root of the tree
631
:param file_id: file_id or None if unknown
632
:return: the list of filters - [] if there are none
634
filter_pref_names = filters._get_registered_names()
635
if len(filter_pref_names) == 0:
638
path = self.id2path(file_id)
639
prefs = self.iter_search_rules([path], filter_pref_names).next()
640
stk = filters._get_filter_stack_for(prefs)
641
if 'filters' in debug.debug_flags:
642
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
645
def _content_filter_stack_provider(self):
646
"""A function that returns a stack of ContentFilters.
648
The function takes a path (relative to the top of the tree) and a
649
file-id as parameters.
651
:return: None if content filtering is not supported by this tree.
653
if self.supports_content_filtering():
654
return lambda path, file_id: \
655
self._content_filter_stack(path, file_id)
659
def iter_search_rules(self, path_names, pref_names=None,
660
_default_searcher=rules._per_user_searcher):
661
"""Find the preferences for filenames in a tree.
663
:param path_names: an iterable of paths to find attributes for.
664
Paths are given relative to the root of the tree.
665
:param pref_names: the list of preferences to lookup - None for all
666
:param _default_searcher: private parameter to assist testing - don't use
667
:return: an iterator of tuple sequences, one per path-name.
668
See _RulesSearcher.get_items for details on the tuple sequence.
670
searcher = self._get_rules_searcher(_default_searcher)
671
if searcher is not None:
672
if pref_names is not None:
673
for path in path_names:
674
yield searcher.get_selected_items(path, pref_names)
676
for path in path_names:
677
yield searcher.get_items(path)
680
def _get_rules_searcher(self, default_searcher):
681
"""Get the RulesSearcher for this tree given the default one."""
682
searcher = default_searcher
178
def list_files(self):
179
if False: # just to make it a generator
686
184
######################################################################
745
244
new_name = new_inv.id2path(file_id)
746
245
if old_name != new_name:
747
246
yield (old_name, new_name)
750
def find_ids_across_trees(filenames, trees, require_versioned=True):
751
"""Find the ids corresponding to specified filenames.
753
All matches in all trees will be used, and all children of matched
754
directories will be used.
756
:param filenames: The filenames to find file_ids for (if None, returns
758
:param trees: The trees to find file_ids within
759
:param require_versioned: if true, all specified filenames must occur in
761
:return: a set of file ids for the specified filenames and their children.
765
specified_path_ids = _find_ids_across_trees(filenames, trees,
767
return _find_children_across_trees(specified_path_ids, trees)
770
def _find_ids_across_trees(filenames, trees, require_versioned):
771
"""Find the ids corresponding to specified filenames.
773
All matches in all trees will be used, but subdirectories are not scanned.
775
:param filenames: The filenames to find file_ids for
776
:param trees: The trees to find file_ids within
777
:param require_versioned: if true, all specified filenames must occur in
779
:return: a set of file ids for the specified filenames
782
interesting_ids = set()
783
for tree_path in filenames:
786
file_id = tree.path2id(tree_path)
787
if file_id is not None:
788
interesting_ids.add(file_id)
791
not_versioned.append(tree_path)
792
if len(not_versioned) > 0 and require_versioned:
793
raise errors.PathsNotVersionedError(not_versioned)
794
return interesting_ids
797
def _find_children_across_trees(specified_ids, trees):
798
"""Return a set including specified ids and their children.
800
All matches in all trees will be used.
802
:param trees: The trees to find file_ids within
803
:return: a set containing all specified ids and their children
805
interesting_ids = set(specified_ids)
806
pending = interesting_ids
807
# now handle children of interesting ids
808
# we loop so that we handle all children of each id in both trees
809
while len(pending) > 0:
811
for file_id in pending:
813
if not tree.has_id(file_id):
815
for child_id in tree.iter_children(file_id):
816
if child_id not in interesting_ids:
817
new_pending.add(child_id)
818
interesting_ids.update(new_pending)
819
pending = new_pending
820
return interesting_ids
823
class InterTree(InterObject):
824
"""This class represents operations taking place between two Trees.
826
Its instances have methods like 'compare' and contain references to the
827
source and target trees these operations are to be carried out on.
829
Clients of bzrlib should not need to use InterTree directly, rather they
830
should use the convenience methods on Tree such as 'Tree.compare()' which
831
will pass through to InterTree as appropriate.
837
def compare(self, want_unchanged=False, specific_files=None,
838
extra_trees=None, require_versioned=False, include_root=False,
839
want_unversioned=False):
840
"""Return the changes from source to target.
842
:return: A TreeDelta.
843
:param specific_files: An optional list of file paths to restrict the
844
comparison to. When mapping filenames to ids, all matches in all
845
trees (including optional extra_trees) are used, and all children of
846
matched directories are included.
847
:param want_unchanged: An optional boolean requesting the inclusion of
848
unchanged entries in the result.
849
:param extra_trees: An optional list of additional trees to use when
850
mapping the contents of specific_files (paths) to file_ids.
851
:param require_versioned: An optional boolean (defaults to False). When
852
supplied and True all the 'specific_files' must be versioned, or
853
a PathsNotVersionedError will be thrown.
854
:param want_unversioned: Scan for unversioned paths.
856
# NB: show_status depends on being able to pass in non-versioned files
857
# and report them as unknown
858
trees = (self.source,)
859
if extra_trees is not None:
860
trees = trees + tuple(extra_trees)
861
# target is usually the newer tree:
862
specific_file_ids = self.target.paths2ids(specific_files, trees,
863
require_versioned=require_versioned)
864
if specific_files and not specific_file_ids:
865
# All files are unversioned, so just return an empty delta
866
# _compare_trees would think we want a complete delta
867
result = delta.TreeDelta()
868
fake_entry = InventoryFile('unused', 'unused', 'unused')
869
result.unversioned = [(path, None,
870
self.target._comparison_data(fake_entry, path)[0]) for path in
873
return delta._compare_trees(self.source, self.target, want_unchanged,
874
specific_files, include_root, extra_trees=extra_trees,
875
require_versioned=require_versioned,
876
want_unversioned=want_unversioned)
878
def iter_changes(self, include_unchanged=False,
879
specific_files=None, pb=None, extra_trees=[],
880
require_versioned=True, want_unversioned=False):
881
"""Generate an iterator of changes between trees.
884
(file_id, (path_in_source, path_in_target),
885
changed_content, versioned, parent, name, kind,
888
Changed_content is True if the file's content has changed. This
889
includes changes to its kind, and to a symlink's target.
891
versioned, parent, name, kind, executable are tuples of (from, to).
892
If a file is missing in a tree, its kind is None.
894
Iteration is done in parent-to-child order, relative to the target
897
There is no guarantee that all paths are in sorted order: the
898
requirement to expand the search due to renames may result in children
899
that should be found early being found late in the search, after
900
lexically later results have been returned.
901
:param require_versioned: Raise errors.PathsNotVersionedError if a
902
path in the specific_files list is not versioned in one of
903
source, target or extra_trees.
904
:param want_unversioned: Should unversioned files be returned in the
905
output. An unversioned file is defined as one with (False, False)
906
for the versioned pair.
908
lookup_trees = [self.source]
910
lookup_trees.extend(extra_trees)
911
if specific_files == []:
912
specific_file_ids = []
914
specific_file_ids = self.target.paths2ids(specific_files,
915
lookup_trees, require_versioned=require_versioned)
917
all_unversioned = sorted([(p.split('/'), p) for p in
919
if specific_files is None or
920
osutils.is_inside_any(specific_files, p)])
921
all_unversioned = deque(all_unversioned)
923
all_unversioned = deque()
925
from_entries_by_dir = list(self.source.iter_entries_by_dir(
926
specific_file_ids=specific_file_ids))
927
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
928
to_entries_by_dir = list(self.target.iter_entries_by_dir(
929
specific_file_ids=specific_file_ids))
930
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
932
# the unversioned path lookup only occurs on real trees - where there
933
# can be extras. So the fake_entry is solely used to look up
934
# executable it values when execute is not supported.
935
fake_entry = InventoryFile('unused', 'unused', 'unused')
936
for to_path, to_entry in to_entries_by_dir:
937
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
938
unversioned_path = all_unversioned.popleft()
939
to_kind, to_executable, to_stat = \
940
self.target._comparison_data(fake_entry, unversioned_path[1])
941
yield (None, (None, unversioned_path[1]), True, (False, False),
943
(None, unversioned_path[0][-1]),
945
(None, to_executable))
946
file_id = to_entry.file_id
947
to_paths[file_id] = to_path
949
changed_content = False
950
from_path, from_entry = from_data.get(file_id, (None, None))
951
from_versioned = (from_entry is not None)
952
if from_entry is not None:
953
from_versioned = True
954
from_name = from_entry.name
955
from_parent = from_entry.parent_id
956
from_kind, from_executable, from_stat = \
957
self.source._comparison_data(from_entry, from_path)
960
from_versioned = False
964
from_executable = None
965
versioned = (from_versioned, True)
966
to_kind, to_executable, to_stat = \
967
self.target._comparison_data(to_entry, to_path)
968
kind = (from_kind, to_kind)
969
if kind[0] != kind[1]:
970
changed_content = True
971
elif from_kind == 'file':
972
if (self.source.get_file_sha1(file_id, from_path, from_stat) !=
973
self.target.get_file_sha1(file_id, to_path, to_stat)):
974
changed_content = True
975
elif from_kind == 'symlink':
976
if (self.source.get_symlink_target(file_id) !=
977
self.target.get_symlink_target(file_id)):
978
changed_content = True
979
# XXX: Yes, the indentation below is wrong. But fixing it broke
980
# test_merge.TestMergerEntriesLCAOnDisk.
981
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
982
# the fix from bzr.dev -- vila 2009026
983
elif from_kind == 'tree-reference':
984
if (self.source.get_reference_revision(file_id, from_path)
985
!= self.target.get_reference_revision(file_id, to_path)):
986
changed_content = True
987
parent = (from_parent, to_entry.parent_id)
988
name = (from_name, to_entry.name)
989
executable = (from_executable, to_executable)
991
pb.update('comparing files', entry_count, num_entries)
992
if (changed_content is not False or versioned[0] != versioned[1]
993
or parent[0] != parent[1] or name[0] != name[1] or
994
executable[0] != executable[1] or include_unchanged):
995
yield (file_id, (from_path, to_path), changed_content,
996
versioned, parent, name, kind, executable)
998
while all_unversioned:
999
# yield any trailing unversioned paths
1000
unversioned_path = all_unversioned.popleft()
1001
to_kind, to_executable, to_stat = \
1002
self.target._comparison_data(fake_entry, unversioned_path[1])
1003
yield (None, (None, unversioned_path[1]), True, (False, False),
1005
(None, unversioned_path[0][-1]),
1007
(None, to_executable))
1009
def get_to_path(to_entry):
1010
if to_entry.parent_id is None:
1011
to_path = '' # the root
1013
if to_entry.parent_id not in to_paths:
1015
return get_to_path(self.target.inventory[to_entry.parent_id])
1016
to_path = osutils.pathjoin(to_paths[to_entry.parent_id],
1018
to_paths[to_entry.file_id] = to_path
1021
for path, from_entry in from_entries_by_dir:
1022
file_id = from_entry.file_id
1023
if file_id in to_paths:
1026
if not file_id in self.target.all_file_ids():
1027
# common case - paths we have not emitted are not present in
1031
to_path = get_to_path(self.target.inventory[file_id])
1034
pb.update('comparing files', entry_count, num_entries)
1035
versioned = (True, False)
1036
parent = (from_entry.parent_id, None)
1037
name = (from_entry.name, None)
1038
from_kind, from_executable, stat_value = \
1039
self.source._comparison_data(from_entry, path)
1040
kind = (from_kind, None)
1041
executable = (from_executable, None)
1042
changed_content = from_kind is not None
1043
# the parent's path is necessarily known at this point.
1044
yield(file_id, (path, to_path), changed_content, versioned, parent,
1045
name, kind, executable)
1048
class MultiWalker(object):
1049
"""Walk multiple trees simultaneously, getting combined results."""
1051
# Note: This could be written to not assume you can do out-of-order
1052
# lookups. Instead any nodes that don't match in all trees could be
1053
# marked as 'deferred', and then returned in the final cleanup loop.
1054
# For now, I think it is "nicer" to return things as close to the
1055
# "master_tree" order as we can.
1057
def __init__(self, master_tree, other_trees):
1058
"""Create a new MultiWalker.
1060
All trees being walked must implement "iter_entries_by_dir()", such
1061
that they yield (path, object) tuples, where that object will have a
1062
'.file_id' member, that can be used to check equality.
1064
:param master_tree: All trees will be 'slaved' to the master_tree such
1065
that nodes in master_tree will be used as 'first-pass' sync points.
1066
Any nodes that aren't in master_tree will be merged in a second
1068
:param other_trees: A list of other trees to walk simultaneously.
1070
self._master_tree = master_tree
1071
self._other_trees = other_trees
1073
# Keep track of any nodes that were properly processed just out of
1074
# order, that way we don't return them at the end, we don't have to
1075
# track *all* processed file_ids, just the out-of-order ones
1076
self._out_of_order_processed = set()
1079
def _step_one(iterator):
1080
"""Step an iter_entries_by_dir iterator.
1082
:return: (has_more, path, ie)
1083
If has_more is False, path and ie will be None.
1086
path, ie = iterator.next()
1087
except StopIteration:
1088
return False, None, None
1090
return True, path, ie
1093
def _cmp_path_by_dirblock(path1, path2):
1094
"""Compare two paths based on what directory they are in.
1096
This generates a sort order, such that all children of a directory are
1097
sorted together, and grandchildren are in the same order as the
1098
children appear. But all grandchildren come after all children.
1100
:param path1: first path
1101
:param path2: the second path
1102
:return: negative number if ``path1`` comes first,
1103
0 if paths are equal
1104
and a positive number if ``path2`` sorts first
1106
# Shortcut this special case
1109
# This is stolen from _dirstate_helpers_py.py, only switching it to
1110
# Unicode objects. Consider using encode_utf8() and then using the
1111
# optimized versions, or maybe writing optimized unicode versions.
1112
if not isinstance(path1, unicode):
1113
raise TypeError("'path1' must be a unicode string, not %s: %r"
1114
% (type(path1), path1))
1115
if not isinstance(path2, unicode):
1116
raise TypeError("'path2' must be a unicode string, not %s: %r"
1117
% (type(path2), path2))
1118
return cmp(MultiWalker._path_to_key(path1),
1119
MultiWalker._path_to_key(path2))
1122
def _path_to_key(path):
1123
dirname, basename = osutils.split(path)
1124
return (dirname.split(u'/'), basename)
1126
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1127
"""Lookup an inventory entry by file_id.
1129
This is called when an entry is missing in the normal order.
1130
Generally this is because a file was either renamed, or it was
1131
deleted/added. If the entry was found in the inventory and not in
1132
extra_entries, it will be added to self._out_of_order_processed
1134
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1135
should be filled with entries that were found before they were
1136
used. If file_id is present, it will be removed from the
1138
:param other_tree: The Tree to search, in case we didn't find the entry
1140
:param file_id: The file_id to look for
1141
:return: (path, ie) if found or (None, None) if not present.
1143
if file_id in extra_entries:
1144
return extra_entries.pop(file_id)
1145
# TODO: Is id2path better as the first call, or is
1146
# inventory[file_id] better as a first check?
1148
cur_path = other_tree.id2path(file_id)
1149
except errors.NoSuchId:
1151
if cur_path is None:
1154
self._out_of_order_processed.add(file_id)
1155
cur_ie = other_tree.inventory[file_id]
1156
return (cur_path, cur_ie)
1159
"""Match up the values in the different trees."""
1160
for result in self._walk_master_tree():
1162
self._finish_others()
1163
for result in self._walk_others():
1166
def _walk_master_tree(self):
1167
"""First pass, walk all trees in lock-step.
1169
When we are done, all nodes in the master_tree will have been
1170
processed. _other_walkers, _other_entries, and _others_extra will be
1171
set on 'self' for future processing.
1173
# This iterator has the most "inlining" done, because it tends to touch
1174
# every file in the tree, while the others only hit nodes that don't
1176
master_iterator = self._master_tree.iter_entries_by_dir()
1178
other_walkers = [other.iter_entries_by_dir()
1179
for other in self._other_trees]
1180
other_entries = [self._step_one(walker) for walker in other_walkers]
1181
# Track extra nodes in the other trees
1182
others_extra = [{} for i in xrange(len(self._other_trees))]
1184
master_has_more = True
1185
step_one = self._step_one
1186
lookup_by_file_id = self._lookup_by_file_id
1187
out_of_order_processed = self._out_of_order_processed
1189
while master_has_more:
1190
(master_has_more, path, master_ie) = step_one(master_iterator)
1191
if not master_has_more:
1194
file_id = master_ie.file_id
1196
other_values_append = other_values.append
1197
next_other_entries = []
1198
next_other_entries_append = next_other_entries.append
1199
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1200
if not other_has_more:
1201
other_values_append(lookup_by_file_id(
1202
others_extra[idx], self._other_trees[idx], file_id))
1203
next_other_entries_append((False, None, None))
1204
elif file_id == other_ie.file_id:
1205
# This is the critical code path, as most of the entries
1206
# should match between most trees.
1207
other_values_append((other_path, other_ie))
1208
next_other_entries_append(step_one(other_walkers[idx]))
1210
# This walker did not match, step it until it either
1211
# matches, or we know we are past the current walker.
1212
other_walker = other_walkers[idx]
1213
other_extra = others_extra[idx]
1214
while (other_has_more and
1215
self._cmp_path_by_dirblock(other_path, path) < 0):
1216
other_file_id = other_ie.file_id
1217
if other_file_id not in out_of_order_processed:
1218
other_extra[other_file_id] = (other_path, other_ie)
1219
other_has_more, other_path, other_ie = \
1220
step_one(other_walker)
1221
if other_has_more and other_ie.file_id == file_id:
1222
# We ended up walking to this point, match and step
1224
other_values_append((other_path, other_ie))
1225
other_has_more, other_path, other_ie = \
1226
step_one(other_walker)
1228
# This record isn't in the normal order, see if it
1230
other_values_append(lookup_by_file_id(
1231
other_extra, self._other_trees[idx], file_id))
1232
next_other_entries_append((other_has_more, other_path,
1234
other_entries = next_other_entries
1236
# We've matched all the walkers, yield this datapoint
1237
yield path, file_id, master_ie, other_values
1238
self._other_walkers = other_walkers
1239
self._other_entries = other_entries
1240
self._others_extra = others_extra
1242
def _finish_others(self):
1243
"""Finish walking the other iterators, so we get all entries."""
1244
for idx, info in enumerate(self._other_entries):
1245
other_extra = self._others_extra[idx]
1246
(other_has_more, other_path, other_ie) = info
1247
while other_has_more:
1248
other_file_id = other_ie.file_id
1249
if other_file_id not in self._out_of_order_processed:
1250
other_extra[other_file_id] = (other_path, other_ie)
1251
other_has_more, other_path, other_ie = \
1252
self._step_one(self._other_walkers[idx])
1253
del self._other_entries
1255
def _walk_others(self):
1256
"""Finish up by walking all the 'deferred' nodes."""
1257
# TODO: One alternative would be to grab all possible unprocessed
1258
# file_ids, and then sort by path, and then yield them. That
1259
# might ensure better ordering, in case a caller strictly
1260
# requires parents before children.
1261
for idx, other_extra in enumerate(self._others_extra):
1262
others = sorted(other_extra.itervalues(),
1263
key=lambda x: self._path_to_key(x[0]))
1264
for other_path, other_ie in others:
1265
file_id = other_ie.file_id
1266
# We don't need to check out_of_order_processed here, because
1267
# the lookup_by_file_id will be removing anything processed
1268
# from the extras cache
1269
other_extra.pop(file_id)
1270
other_values = [(None, None) for i in xrange(idx)]
1271
other_values.append((other_path, other_ie))
1272
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1273
alt_idx = alt_idx + idx + 1
1274
alt_extra = self._others_extra[alt_idx]
1275
alt_tree = self._other_trees[alt_idx]
1276
other_values.append(self._lookup_by_file_id(
1277
alt_extra, alt_tree, file_id))
1278
yield other_path, file_id, None, other_values