60
49
Trees can be compared, etc, regardless of whether they are working
61
50
trees or versioned trees.
64
def changes_from(self, other, want_unchanged=False, specific_files=None,
65
extra_trees=None, require_versioned=False, include_root=False,
66
want_unversioned=False):
67
"""Return a TreeDelta of the changes from other to this tree.
69
:param other: A tree to compare with.
70
:param specific_files: An optional list of file paths to restrict the
71
comparison to. When mapping filenames to ids, all matches in all
72
trees (including optional extra_trees) are used, and all children of
73
matched directories are included.
74
:param want_unchanged: An optional boolean requesting the inclusion of
75
unchanged entries in the result.
76
:param extra_trees: An optional list of additional trees to use when
77
mapping the contents of specific_files (paths) to file_ids.
78
:param require_versioned: An optional boolean (defaults to False). When
79
supplied and True all the 'specific_files' must be versioned, or
80
a PathsNotVersionedError will be thrown.
81
:param want_unversioned: Scan for unversioned paths.
83
The comparison will be performed by an InterTree object looked up on
86
# Martin observes that Tree.changes_from returns a TreeDelta and this
87
# may confuse people, because the class name of the returned object is
88
# a synonym of the object referenced in the method name.
89
return InterTree.get(other, self).compare(
90
want_unchanged=want_unchanged,
91
specific_files=specific_files,
92
extra_trees=extra_trees,
93
require_versioned=require_versioned,
94
include_root=include_root,
95
want_unversioned=want_unversioned,
98
def iter_changes(self, from_tree, include_unchanged=False,
99
specific_files=None, pb=None, extra_trees=None,
100
require_versioned=True, want_unversioned=False):
101
intertree = InterTree.get(from_tree, self)
102
return intertree.iter_changes(include_unchanged, specific_files, pb,
103
extra_trees, require_versioned, want_unversioned=want_unversioned)
106
"""Get a list of the conflicts in the tree.
108
Each conflict is an instance of bzrlib.conflicts.Conflict.
110
return _mod_conflicts.ConflictList()
113
"""For trees that can have unversioned files, return all such paths."""
116
def get_parent_ids(self):
117
"""Get the parent ids for this tree.
119
:return: a list of parent ids. [] is returned to indicate
120
a tree with no parents.
121
:raises: BzrError if the parents are not known.
123
raise NotImplementedError(self.get_parent_ids)
125
53
def has_filename(self, filename):
126
54
"""True if the tree has given filename."""
127
raise NotImplementedError(self.has_filename)
55
raise NotImplementedError()
129
57
def has_id(self, file_id):
130
58
return self.inventory.has_id(file_id)
132
def __contains__(self, file_id):
133
return self.has_id(file_id)
135
def has_or_had_id(self, file_id):
136
if file_id == self.inventory.root.file_id:
138
return self.inventory.has_id(file_id)
140
def is_ignored(self, filename):
141
"""Check whether the filename is ignored by this tree.
143
:param filename: The relative filename within the tree.
144
:return: True if the filename is ignored.
148
62
def __iter__(self):
149
63
return iter(self.inventory)
151
def all_file_ids(self):
152
"""Iterate through all file ids, including ids for missing files."""
153
return set(self.inventory)
155
65
def id2path(self, file_id):
156
"""Return the path for a file id.
160
66
return self.inventory.id2path(file_id)
162
def is_control_filename(self, filename):
163
"""True if filename is the name of a control file in this tree.
165
:param filename: A filename within the tree. This is a relative path
166
from the root of this tree.
168
This is true IF and ONLY IF the filename is part of the meta data
169
that bzr controls in this tree. I.E. a random .bzr directory placed
170
on disk will not be a control file for this tree.
172
return self.bzrdir.is_control_filename(filename)
175
def iter_entries_by_dir(self, specific_file_ids=None):
176
"""Walk the tree in 'by_dir' order.
178
This will yield each entry in the tree as a (path, entry) tuple.
179
The order that they are yielded is:
181
Directories are walked in a depth-first lexicographical order,
182
however, whenever a directory is reached, all of its direct child
183
nodes are yielded in lexicographical order before yielding the
186
For example, in the tree::
196
The yield order (ignoring root) would be::
197
a, f, a/b, a/d, a/b/c, a/d/e, f/g
199
return self.inventory.iter_entries_by_dir(
200
specific_file_ids=specific_file_ids)
202
def iter_references(self):
203
if self.supports_tree_reference():
204
for path, entry in self.iter_entries_by_dir():
205
if entry.kind == 'tree-reference':
206
yield path, entry.file_id
208
def kind(self, file_id):
209
raise NotImplementedError("Tree subclass %s must implement kind"
210
% self.__class__.__name__)
212
def stored_kind(self, file_id):
213
"""File kind stored for this file_id.
215
May not match kind on disk for working trees. Always available
216
for versioned files, even when the file itself is missing.
218
return self.kind(file_id)
220
def path_content_summary(self, path):
221
"""Get a summary of the information about path.
223
:param path: A relative path within the tree.
224
:return: A tuple containing kind, size, exec, sha1-or-link.
225
Kind is always present (see tree.kind()).
226
size is present if kind is file, None otherwise.
227
exec is None unless kind is file and the platform supports the 'x'
229
sha1-or-link is the link target if kind is symlink, or the sha1 if
230
it can be obtained without reading the file.
232
raise NotImplementedError(self.path_content_summary)
234
def get_reference_revision(self, file_id, path=None):
235
raise NotImplementedError("Tree subclass %s must implement "
236
"get_reference_revision"
237
% self.__class__.__name__)
239
def _comparison_data(self, entry, path):
240
"""Return a tuple of kind, executable, stat_value for a file.
242
entry may be None if there is no inventory entry for the file, but
243
path must always be supplied.
245
kind is None if there is no file present (even if an inventory id is
246
present). executable is False for non-file entries.
248
raise NotImplementedError(self._comparison_data)
250
def _file_size(self, entry, stat_value):
251
raise NotImplementedError(self._file_size)
253
68
def _get_inventory(self):
254
69
return self._inventory
256
def get_file(self, file_id, path=None):
257
"""Return a file object for the file file_id in the tree.
259
If both file_id and path are defined, it is implementation defined as
260
to which one is used.
262
raise NotImplementedError(self.get_file)
264
def get_file_with_stat(self, file_id, path=None):
265
"""Get a file handle and stat object for file_id.
267
The default implementation returns (self.get_file, None) for backwards
270
:param file_id: The file id to read.
271
:param path: The path of the file, if it is known.
272
:return: A tuple (file_handle, stat_value_or_None). If the tree has
273
no stat facility, or need for a stat cache feedback during commit,
274
it may return None for the second element of the tuple.
276
return (self.get_file(file_id, path), None)
278
def get_file_text(self, file_id, path=None):
279
"""Return the byte content of a file.
281
:param file_id: The file_id of the file.
282
:param path: The path of the file.
283
If both file_id and path are supplied, an implementation may use
286
my_file = self.get_file(file_id, path)
288
return my_file.read()
292
def get_file_lines(self, file_id, path=None):
293
"""Return the content of a file, as lines.
295
:param file_id: The file_id of the file.
296
:param path: The path of the file.
297
If both file_id and path are supplied, an implementation may use
300
return osutils.split_lines(self.get_file_text(file_id, path))
302
def get_file_mtime(self, file_id, path=None):
303
"""Return the modification time for a file.
305
:param file_id: The handle for this file.
306
:param path: The path that this file can be found at.
307
These must point to the same object.
309
raise NotImplementedError(self.get_file_mtime)
311
def get_file_size(self, file_id):
312
"""Return the size of a file in bytes.
314
This applies only to regular files. If invoked on directories or
315
symlinks, it will return None.
316
:param file_id: The file-id of the file
318
raise NotImplementedError(self.get_file_size)
320
71
def get_file_by_path(self, path):
321
return self.get_file(self._inventory.path2id(path), path)
323
def iter_files_bytes(self, desired_files):
324
"""Iterate through file contents.
326
Files will not necessarily be returned in the order they occur in
327
desired_files. No specific order is guaranteed.
329
Yields pairs of identifier, bytes_iterator. identifier is an opaque
330
value supplied by the caller as part of desired_files. It should
331
uniquely identify the file version in the caller's context. (Examples:
332
an index number or a TreeTransform trans_id.)
334
bytes_iterator is an iterable of bytestrings for the file. The
335
kind of iterable and length of the bytestrings are unspecified, but for
336
this implementation, it is a tuple containing a single bytestring with
337
the complete text of the file.
339
:param desired_files: a list of (file_id, identifier) pairs
341
for file_id, identifier in desired_files:
342
# We wrap the string in a tuple so that we can return an iterable
343
# of bytestrings. (Technically, a bytestring is also an iterable
344
# of bytestrings, but iterating through each character is not
346
cur_file = (self.get_file_text(file_id),)
347
yield identifier, cur_file
349
def get_symlink_target(self, file_id):
350
"""Get the target for a given file_id.
352
It is assumed that the caller already knows that file_id is referencing
354
:param file_id: Handle for the symlink entry.
355
:return: The path the symlink points to.
357
raise NotImplementedError(self.get_symlink_target)
359
def get_canonical_inventory_paths(self, paths):
360
"""Like get_canonical_inventory_path() but works on multiple items.
362
:param paths: A sequence of paths relative to the root of the tree.
363
:return: A list of paths, with each item the corresponding input path
364
adjusted to account for existing elements that match case
367
return list(self._yield_canonical_inventory_paths(paths))
369
def get_canonical_inventory_path(self, path):
370
"""Returns the first inventory item that case-insensitively matches path.
372
If a path matches exactly, it is returned. If no path matches exactly
373
but more than one path matches case-insensitively, it is implementation
374
defined which is returned.
376
If no path matches case-insensitively, the input path is returned, but
377
with as many path entries that do exist changed to their canonical
380
If you need to resolve many names from the same tree, you should
381
use get_canonical_inventory_paths() to avoid O(N) behaviour.
383
:param path: A paths relative to the root of the tree.
384
:return: The input path adjusted to account for existing elements
385
that match case insensitively.
387
return self._yield_canonical_inventory_paths([path]).next()
389
def _yield_canonical_inventory_paths(self, paths):
391
# First, if the path as specified exists exactly, just use it.
392
if self.path2id(path) is not None:
396
cur_id = self.get_root_id()
398
bit_iter = iter(path.split("/"))
401
for child in self.iter_children(cur_id):
403
child_base = os.path.basename(self.id2path(child))
404
if child_base.lower() == lelt:
406
cur_path = osutils.pathjoin(cur_path, child_base)
409
# before a change is committed we can see this error...
412
# got to the end of this directory and no entries matched.
413
# Return what matched so far, plus the rest as specified.
414
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
419
def get_root_id(self):
420
"""Return the file_id for the root of this tree."""
421
raise NotImplementedError(self.get_root_id)
423
def annotate_iter(self, file_id,
424
default_revision=_mod_revision.CURRENT_REVISION):
425
"""Return an iterator of revision_id, line tuples.
427
For working trees (and mutable trees in general), the special
428
revision_id 'current:' will be used for lines that are new in this
429
tree, e.g. uncommitted changes.
430
:param file_id: The file to produce an annotated version from
431
:param default_revision: For lines that don't match a basis, mark them
432
with this revision id. Not all implementations will make use of
435
raise NotImplementedError(self.annotate_iter)
437
def _get_plan_merge_data(self, file_id, other, base):
438
from bzrlib import versionedfile
439
vf = versionedfile._PlanMergeVersionedFile(file_id)
440
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
441
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
443
last_revision_base = None
445
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
446
return vf, last_revision_a, last_revision_b, last_revision_base
448
def plan_file_merge(self, file_id, other, base=None):
449
"""Generate a merge plan based on annotations.
451
If the file contains uncommitted changes in this tree, they will be
452
attributed to the 'current:' pseudo-revision. If the file contains
453
uncommitted changes in the other tree, they will be assigned to the
454
'other:' pseudo-revision.
456
data = self._get_plan_merge_data(file_id, other, base)
457
vf, last_revision_a, last_revision_b, last_revision_base = data
458
return vf.plan_merge(last_revision_a, last_revision_b,
461
def plan_file_lca_merge(self, file_id, other, base=None):
462
"""Generate a merge plan based lca-newness.
464
If the file contains uncommitted changes in this tree, they will be
465
attributed to the 'current:' pseudo-revision. If the file contains
466
uncommitted changes in the other tree, they will be assigned to the
467
'other:' pseudo-revision.
469
data = self._get_plan_merge_data(file_id, other, base)
470
vf, last_revision_a, last_revision_b, last_revision_base = data
471
return vf.plan_lca_merge(last_revision_a, last_revision_b,
474
def _iter_parent_trees(self):
475
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
476
for revision_id in self.get_parent_ids():
478
yield self.revision_tree(revision_id)
479
except errors.NoSuchRevisionInTree:
480
yield self.repository.revision_tree(revision_id)
483
def _file_revision(revision_tree, file_id):
484
"""Determine the revision associated with a file in a given tree."""
485
revision_tree.lock_read()
487
return revision_tree.inventory[file_id].revision
489
revision_tree.unlock()
491
def _get_file_revision(self, file_id, vf, tree_revision):
492
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
494
if getattr(self, '_repository', None) is None:
495
last_revision = tree_revision
496
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
497
self._iter_parent_trees()]
498
vf.add_lines((file_id, last_revision), parent_keys,
499
self.get_file(file_id).readlines())
500
repo = self.branch.repository
503
last_revision = self._file_revision(self, file_id)
504
base_vf = self._repository.texts
505
if base_vf not in vf.fallback_versionedfiles:
506
vf.fallback_versionedfiles.append(base_vf)
72
return self.get_file(self._inventory.path2id(path))
509
74
inventory = property(_get_inventory,
510
75
doc="Inventory of this Tree")
512
77
def _check_retrieved(self, ie, f):
515
78
fp = fingerprint_file(f)
518
if ie.text_size is not None:
81
if ie.text_size != None:
519
82
if ie.text_size != fp['size']:
520
83
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
521
84
["inventory expects %d bytes" % ie.text_size,
528
91
"file is actually %s" % fp['sha1'],
529
92
"store is probably damaged/corrupt"])
532
def path2id(self, path):
533
"""Return the id for path in this tree."""
534
return self._inventory.path2id(path)
536
def paths2ids(self, paths, trees=[], require_versioned=True):
537
"""Return all the ids that can be reached by walking from paths.
539
Each path is looked up in this tree and any extras provided in
540
trees, and this is repeated recursively: the children in an extra tree
541
of a directory that has been renamed under a provided path in this tree
542
are all returned, even if none exist under a provided path in this
543
tree, and vice versa.
545
:param paths: An iterable of paths to start converting to ids from.
546
Alternatively, if paths is None, no ids should be calculated and None
547
will be returned. This is offered to make calling the api unconditional
548
for code that *might* take a list of files.
549
:param trees: Additional trees to consider.
550
:param require_versioned: If False, do not raise NotVersionedError if
551
an element of paths is not versioned in this tree and all of trees.
553
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
555
def iter_children(self, file_id):
556
entry = self.iter_entries_by_dir([file_id]).next()[1]
557
for child in getattr(entry, 'children', {}).itervalues():
563
def revision_tree(self, revision_id):
564
"""Obtain a revision tree for the revision revision_id.
566
The intention of this method is to allow access to possibly cached
567
tree data. Implementors of this method should raise NoSuchRevision if
568
the tree is not locally available, even if they could obtain the
569
tree via a repository or some other means. Callers are responsible
570
for finding the ultimate source for a revision tree.
572
:param revision_id: The revision_id of the requested tree.
574
:raises: NoSuchRevision if the tree cannot be obtained.
576
raise errors.NoSuchRevisionInTree(self, revision_id)
579
"""What files are present in this tree and unknown.
581
:return: an iterator over the unknown files.
588
def filter_unversioned_files(self, paths):
589
"""Filter out paths that are versioned.
591
:return: set of paths.
593
# NB: we specifically *don't* call self.has_filename, because for
594
# WorkingTrees that can indicate files that exist on disk but that
596
pred = self.inventory.has_filename
597
return set((p for p in paths if not pred(p)))
599
def walkdirs(self, prefix=""):
600
"""Walk the contents of this tree from path down.
602
This yields all the data about the contents of a directory at a time.
603
After each directory has been yielded, if the caller has mutated the
604
list to exclude some directories, they are then not descended into.
606
The data yielded is of the form:
607
((directory-relpath, directory-path-from-root, directory-fileid),
608
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
609
versioned_kind), ...]),
610
- directory-relpath is the containing dirs relpath from prefix
611
- directory-path-from-root is the containing dirs path from /
612
- directory-fileid is the id of the directory if it is versioned.
613
- relpath is the relative path within the subtree being walked.
614
- basename is the basename
615
- kind is the kind of the file now. If unknonwn then the file is not
616
present within the tree - but it may be recorded as versioned. See
618
- lstat is the stat data *if* the file was statted.
619
- path_from_tree_root is the path from the root of the tree.
620
- file_id is the file_id if the entry is versioned.
621
- versioned_kind is the kind of the file as last recorded in the
622
versioning system. If 'unknown' the file is not versioned.
623
One of 'kind' and 'versioned_kind' must not be 'unknown'.
625
:param prefix: Start walking from prefix within the tree rather than
626
at the root. This allows one to walk a subtree but get paths that are
627
relative to a tree rooted higher up.
628
:return: an iterator over the directory data.
630
raise NotImplementedError(self.walkdirs)
632
def supports_content_filtering(self):
95
def print_file(self, fileid):
96
"""Print file with id `fileid` to stdout."""
98
pumpfile(self.get_file(fileid), sys.stdout)
101
def export(self, dest, format='dir', root=None):
102
"""Export this tree."""
104
exporter = exporters[format]
106
from bzrlib.errors import BzrCommandError
107
raise BzrCommandError("export format %r not supported" % format)
108
exporter(self, dest, root)
112
class RevisionTree(Tree):
113
"""Tree viewing a previous revision.
115
File text can be retrieved from the text store.
117
TODO: Some kind of `__repr__` method, but a good one
118
probably means knowing the branch and revision number,
119
or at least passing a description to the constructor.
122
def __init__(self, store, inv):
124
self._inventory = inv
126
def get_file(self, file_id):
127
ie = self._inventory[file_id]
128
f = self._store[ie.text_id]
129
mutter(" get fileid{%s} from %r" % (file_id, self))
130
self._check_retrieved(ie, f)
133
def get_file_size(self, file_id):
134
return self._inventory[file_id].text_size
136
def get_file_sha1(self, file_id):
137
ie = self._inventory[file_id]
138
if ie.kind == "file":
141
def has_filename(self, filename):
142
return bool(self.inventory.path2id(filename))
144
def list_files(self):
145
# The only files returned by this are those from the version
146
for path, entry in self.inventory.iter_entries():
147
yield path, 'V', entry.kind, entry.file_id
150
class EmptyTree(Tree):
152
self._inventory = Inventory()
154
def has_filename(self, filename):
635
def _content_filter_stack(self, path=None, file_id=None):
636
"""The stack of content filters for a path if filtering is supported.
638
Readers will be applied in first-to-last order.
639
Writers will be applied in last-to-first order.
640
Either the path or the file-id needs to be provided.
642
:param path: path relative to the root of the tree
644
:param file_id: file_id or None if unknown
645
:return: the list of filters - [] if there are none
647
filter_pref_names = filters._get_registered_names()
648
if len(filter_pref_names) == 0:
651
path = self.id2path(file_id)
652
prefs = self.iter_search_rules([path], filter_pref_names).next()
653
stk = filters._get_filter_stack_for(prefs)
654
if 'filters' in debug.debug_flags:
655
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
658
def _content_filter_stack_provider(self):
659
"""A function that returns a stack of ContentFilters.
661
The function takes a path (relative to the top of the tree) and a
662
file-id as parameters.
664
:return: None if content filtering is not supported by this tree.
666
if self.supports_content_filtering():
667
return lambda path, file_id: \
668
self._content_filter_stack(path, file_id)
672
def iter_search_rules(self, path_names, pref_names=None,
673
_default_searcher=None):
674
"""Find the preferences for filenames in a tree.
676
:param path_names: an iterable of paths to find attributes for.
677
Paths are given relative to the root of the tree.
678
:param pref_names: the list of preferences to lookup - None for all
679
:param _default_searcher: private parameter to assist testing - don't use
680
:return: an iterator of tuple sequences, one per path-name.
681
See _RulesSearcher.get_items for details on the tuple sequence.
683
if _default_searcher is None:
684
_default_searcher = rules._per_user_searcher
685
searcher = self._get_rules_searcher(_default_searcher)
686
if searcher is not None:
687
if pref_names is not None:
688
for path in path_names:
689
yield searcher.get_selected_items(path, pref_names)
691
for path in path_names:
692
yield searcher.get_items(path)
695
def _get_rules_searcher(self, default_searcher):
696
"""Get the RulesSearcher for this tree given the default one."""
697
searcher = default_searcher
157
def list_files(self):
158
if False: # just to make it a generator
161
def __contains__(self, file_id):
162
return file_id in self._inventory
164
def get_file_sha1(self, file_id):
165
assert self._inventory[file_id].kind == "root_directory"
701
171
######################################################################
760
231
new_name = new_inv.id2path(file_id)
761
232
if old_name != new_name:
762
233
yield (old_name, new_name)
765
def find_ids_across_trees(filenames, trees, require_versioned=True):
766
"""Find the ids corresponding to specified filenames.
768
All matches in all trees will be used, and all children of matched
769
directories will be used.
771
:param filenames: The filenames to find file_ids for (if None, returns
773
:param trees: The trees to find file_ids within
774
:param require_versioned: if true, all specified filenames must occur in
776
:return: a set of file ids for the specified filenames and their children.
780
specified_path_ids = _find_ids_across_trees(filenames, trees,
782
return _find_children_across_trees(specified_path_ids, trees)
785
def _find_ids_across_trees(filenames, trees, require_versioned):
786
"""Find the ids corresponding to specified filenames.
788
All matches in all trees will be used, but subdirectories are not scanned.
790
:param filenames: The filenames to find file_ids for
791
:param trees: The trees to find file_ids within
792
:param require_versioned: if true, all specified filenames must occur in
794
:return: a set of file ids for the specified filenames
797
interesting_ids = set()
798
for tree_path in filenames:
801
file_id = tree.path2id(tree_path)
802
if file_id is not None:
803
interesting_ids.add(file_id)
806
not_versioned.append(tree_path)
807
if len(not_versioned) > 0 and require_versioned:
808
raise errors.PathsNotVersionedError(not_versioned)
809
return interesting_ids
812
def _find_children_across_trees(specified_ids, trees):
813
"""Return a set including specified ids and their children.
815
All matches in all trees will be used.
817
:param trees: The trees to find file_ids within
818
:return: a set containing all specified ids and their children
820
interesting_ids = set(specified_ids)
821
pending = interesting_ids
822
# now handle children of interesting ids
823
# we loop so that we handle all children of each id in both trees
824
while len(pending) > 0:
826
for file_id in pending:
828
if not tree.has_id(file_id):
830
for child_id in tree.iter_children(file_id):
831
if child_id not in interesting_ids:
832
new_pending.add(child_id)
833
interesting_ids.update(new_pending)
834
pending = new_pending
835
return interesting_ids
838
class InterTree(InterObject):
839
"""This class represents operations taking place between two Trees.
841
Its instances have methods like 'compare' and contain references to the
842
source and target trees these operations are to be carried out on.
844
Clients of bzrlib should not need to use InterTree directly, rather they
845
should use the convenience methods on Tree such as 'Tree.compare()' which
846
will pass through to InterTree as appropriate.
852
def compare(self, want_unchanged=False, specific_files=None,
853
extra_trees=None, require_versioned=False, include_root=False,
854
want_unversioned=False):
855
"""Return the changes from source to target.
857
:return: A TreeDelta.
858
:param specific_files: An optional list of file paths to restrict the
859
comparison to. When mapping filenames to ids, all matches in all
860
trees (including optional extra_trees) are used, and all children of
861
matched directories are included.
862
:param want_unchanged: An optional boolean requesting the inclusion of
863
unchanged entries in the result.
864
:param extra_trees: An optional list of additional trees to use when
865
mapping the contents of specific_files (paths) to file_ids.
866
:param require_versioned: An optional boolean (defaults to False). When
867
supplied and True all the 'specific_files' must be versioned, or
868
a PathsNotVersionedError will be thrown.
869
:param want_unversioned: Scan for unversioned paths.
871
# NB: show_status depends on being able to pass in non-versioned files
872
# and report them as unknown
873
trees = (self.source,)
874
if extra_trees is not None:
875
trees = trees + tuple(extra_trees)
876
# target is usually the newer tree:
877
specific_file_ids = self.target.paths2ids(specific_files, trees,
878
require_versioned=require_versioned)
879
if specific_files and not specific_file_ids:
880
# All files are unversioned, so just return an empty delta
881
# _compare_trees would think we want a complete delta
882
result = delta.TreeDelta()
883
fake_entry = InventoryFile('unused', 'unused', 'unused')
884
result.unversioned = [(path, None,
885
self.target._comparison_data(fake_entry, path)[0]) for path in
888
return delta._compare_trees(self.source, self.target, want_unchanged,
889
specific_files, include_root, extra_trees=extra_trees,
890
require_versioned=require_versioned,
891
want_unversioned=want_unversioned)
893
def iter_changes(self, include_unchanged=False,
894
specific_files=None, pb=None, extra_trees=[],
895
require_versioned=True, want_unversioned=False):
896
"""Generate an iterator of changes between trees.
899
(file_id, (path_in_source, path_in_target),
900
changed_content, versioned, parent, name, kind,
903
Changed_content is True if the file's content has changed. This
904
includes changes to its kind, and to a symlink's target.
906
versioned, parent, name, kind, executable are tuples of (from, to).
907
If a file is missing in a tree, its kind is None.
909
Iteration is done in parent-to-child order, relative to the target
912
There is no guarantee that all paths are in sorted order: the
913
requirement to expand the search due to renames may result in children
914
that should be found early being found late in the search, after
915
lexically later results have been returned.
916
:param require_versioned: Raise errors.PathsNotVersionedError if a
917
path in the specific_files list is not versioned in one of
918
source, target or extra_trees.
919
:param want_unversioned: Should unversioned files be returned in the
920
output. An unversioned file is defined as one with (False, False)
921
for the versioned pair.
923
lookup_trees = [self.source]
925
lookup_trees.extend(extra_trees)
926
if specific_files == []:
927
specific_file_ids = []
929
specific_file_ids = self.target.paths2ids(specific_files,
930
lookup_trees, require_versioned=require_versioned)
932
all_unversioned = sorted([(p.split('/'), p) for p in
934
if specific_files is None or
935
osutils.is_inside_any(specific_files, p)])
936
all_unversioned = deque(all_unversioned)
938
all_unversioned = deque()
940
from_entries_by_dir = list(self.source.iter_entries_by_dir(
941
specific_file_ids=specific_file_ids))
942
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
943
to_entries_by_dir = list(self.target.iter_entries_by_dir(
944
specific_file_ids=specific_file_ids))
945
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
947
# the unversioned path lookup only occurs on real trees - where there
948
# can be extras. So the fake_entry is solely used to look up
949
# executable it values when execute is not supported.
950
fake_entry = InventoryFile('unused', 'unused', 'unused')
951
for to_path, to_entry in to_entries_by_dir:
952
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
953
unversioned_path = all_unversioned.popleft()
954
to_kind, to_executable, to_stat = \
955
self.target._comparison_data(fake_entry, unversioned_path[1])
956
yield (None, (None, unversioned_path[1]), True, (False, False),
958
(None, unversioned_path[0][-1]),
960
(None, to_executable))
961
file_id = to_entry.file_id
962
to_paths[file_id] = to_path
964
changed_content = False
965
from_path, from_entry = from_data.get(file_id, (None, None))
966
from_versioned = (from_entry is not None)
967
if from_entry is not None:
968
from_versioned = True
969
from_name = from_entry.name
970
from_parent = from_entry.parent_id
971
from_kind, from_executable, from_stat = \
972
self.source._comparison_data(from_entry, from_path)
975
from_versioned = False
979
from_executable = None
980
versioned = (from_versioned, True)
981
to_kind, to_executable, to_stat = \
982
self.target._comparison_data(to_entry, to_path)
983
kind = (from_kind, to_kind)
984
if kind[0] != kind[1]:
985
changed_content = True
986
elif from_kind == 'file':
987
if (self.source.get_file_sha1(file_id, from_path, from_stat) !=
988
self.target.get_file_sha1(file_id, to_path, to_stat)):
989
changed_content = True
990
elif from_kind == 'symlink':
991
if (self.source.get_symlink_target(file_id) !=
992
self.target.get_symlink_target(file_id)):
993
changed_content = True
994
# XXX: Yes, the indentation below is wrong. But fixing it broke
995
# test_merge.TestMergerEntriesLCAOnDisk.
996
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
997
# the fix from bzr.dev -- vila 2009026
998
elif from_kind == 'tree-reference':
999
if (self.source.get_reference_revision(file_id, from_path)
1000
!= self.target.get_reference_revision(file_id, to_path)):
1001
changed_content = True
1002
parent = (from_parent, to_entry.parent_id)
1003
name = (from_name, to_entry.name)
1004
executable = (from_executable, to_executable)
1006
pb.update('comparing files', entry_count, num_entries)
1007
if (changed_content is not False or versioned[0] != versioned[1]
1008
or parent[0] != parent[1] or name[0] != name[1] or
1009
executable[0] != executable[1] or include_unchanged):
1010
yield (file_id, (from_path, to_path), changed_content,
1011
versioned, parent, name, kind, executable)
1013
while all_unversioned:
1014
# yield any trailing unversioned paths
1015
unversioned_path = all_unversioned.popleft()
1016
to_kind, to_executable, to_stat = \
1017
self.target._comparison_data(fake_entry, unversioned_path[1])
1018
yield (None, (None, unversioned_path[1]), True, (False, False),
1020
(None, unversioned_path[0][-1]),
1022
(None, to_executable))
1024
def get_to_path(to_entry):
1025
if to_entry.parent_id is None:
1026
to_path = '' # the root
1028
if to_entry.parent_id not in to_paths:
1030
return get_to_path(self.target.inventory[to_entry.parent_id])
1031
to_path = osutils.pathjoin(to_paths[to_entry.parent_id],
1033
to_paths[to_entry.file_id] = to_path
1036
for path, from_entry in from_entries_by_dir:
1037
file_id = from_entry.file_id
1038
if file_id in to_paths:
1041
if not file_id in self.target.all_file_ids():
1042
# common case - paths we have not emitted are not present in
1046
to_path = get_to_path(self.target.inventory[file_id])
1049
pb.update('comparing files', entry_count, num_entries)
1050
versioned = (True, False)
1051
parent = (from_entry.parent_id, None)
1052
name = (from_entry.name, None)
1053
from_kind, from_executable, stat_value = \
1054
self.source._comparison_data(from_entry, path)
1055
kind = (from_kind, None)
1056
executable = (from_executable, None)
1057
changed_content = from_kind is not None
1058
# the parent's path is necessarily known at this point.
1059
yield(file_id, (path, to_path), changed_content, versioned, parent,
1060
name, kind, executable)
1063
class MultiWalker(object):
1064
"""Walk multiple trees simultaneously, getting combined results."""
1066
# Note: This could be written to not assume you can do out-of-order
1067
# lookups. Instead any nodes that don't match in all trees could be
1068
# marked as 'deferred', and then returned in the final cleanup loop.
1069
# For now, I think it is "nicer" to return things as close to the
1070
# "master_tree" order as we can.
1072
def __init__(self, master_tree, other_trees):
1073
"""Create a new MultiWalker.
1075
All trees being walked must implement "iter_entries_by_dir()", such
1076
that they yield (path, object) tuples, where that object will have a
1077
'.file_id' member, that can be used to check equality.
1079
:param master_tree: All trees will be 'slaved' to the master_tree such
1080
that nodes in master_tree will be used as 'first-pass' sync points.
1081
Any nodes that aren't in master_tree will be merged in a second
1083
:param other_trees: A list of other trees to walk simultaneously.
1085
self._master_tree = master_tree
1086
self._other_trees = other_trees
1088
# Keep track of any nodes that were properly processed just out of
1089
# order, that way we don't return them at the end, we don't have to
1090
# track *all* processed file_ids, just the out-of-order ones
1091
self._out_of_order_processed = set()
1094
def _step_one(iterator):
1095
"""Step an iter_entries_by_dir iterator.
1097
:return: (has_more, path, ie)
1098
If has_more is False, path and ie will be None.
1101
path, ie = iterator.next()
1102
except StopIteration:
1103
return False, None, None
1105
return True, path, ie
1108
def _cmp_path_by_dirblock(path1, path2):
1109
"""Compare two paths based on what directory they are in.
1111
This generates a sort order, such that all children of a directory are
1112
sorted together, and grandchildren are in the same order as the
1113
children appear. But all grandchildren come after all children.
1115
:param path1: first path
1116
:param path2: the second path
1117
:return: negative number if ``path1`` comes first,
1118
0 if paths are equal
1119
and a positive number if ``path2`` sorts first
1121
# Shortcut this special case
1124
# This is stolen from _dirstate_helpers_py.py, only switching it to
1125
# Unicode objects. Consider using encode_utf8() and then using the
1126
# optimized versions, or maybe writing optimized unicode versions.
1127
if not isinstance(path1, unicode):
1128
raise TypeError("'path1' must be a unicode string, not %s: %r"
1129
% (type(path1), path1))
1130
if not isinstance(path2, unicode):
1131
raise TypeError("'path2' must be a unicode string, not %s: %r"
1132
% (type(path2), path2))
1133
return cmp(MultiWalker._path_to_key(path1),
1134
MultiWalker._path_to_key(path2))
1137
def _path_to_key(path):
1138
dirname, basename = osutils.split(path)
1139
return (dirname.split(u'/'), basename)
1141
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1142
"""Lookup an inventory entry by file_id.
1144
This is called when an entry is missing in the normal order.
1145
Generally this is because a file was either renamed, or it was
1146
deleted/added. If the entry was found in the inventory and not in
1147
extra_entries, it will be added to self._out_of_order_processed
1149
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1150
should be filled with entries that were found before they were
1151
used. If file_id is present, it will be removed from the
1153
:param other_tree: The Tree to search, in case we didn't find the entry
1155
:param file_id: The file_id to look for
1156
:return: (path, ie) if found or (None, None) if not present.
1158
if file_id in extra_entries:
1159
return extra_entries.pop(file_id)
1160
# TODO: Is id2path better as the first call, or is
1161
# inventory[file_id] better as a first check?
1163
cur_path = other_tree.id2path(file_id)
1164
except errors.NoSuchId:
1166
if cur_path is None:
1169
self._out_of_order_processed.add(file_id)
1170
cur_ie = other_tree.inventory[file_id]
1171
return (cur_path, cur_ie)
1174
"""Match up the values in the different trees."""
1175
for result in self._walk_master_tree():
1177
self._finish_others()
1178
for result in self._walk_others():
1181
def _walk_master_tree(self):
1182
"""First pass, walk all trees in lock-step.
1184
When we are done, all nodes in the master_tree will have been
1185
processed. _other_walkers, _other_entries, and _others_extra will be
1186
set on 'self' for future processing.
1188
# This iterator has the most "inlining" done, because it tends to touch
1189
# every file in the tree, while the others only hit nodes that don't
1191
master_iterator = self._master_tree.iter_entries_by_dir()
1193
other_walkers = [other.iter_entries_by_dir()
1194
for other in self._other_trees]
1195
other_entries = [self._step_one(walker) for walker in other_walkers]
1196
# Track extra nodes in the other trees
1197
others_extra = [{} for i in xrange(len(self._other_trees))]
1199
master_has_more = True
1200
step_one = self._step_one
1201
lookup_by_file_id = self._lookup_by_file_id
1202
out_of_order_processed = self._out_of_order_processed
1204
while master_has_more:
1205
(master_has_more, path, master_ie) = step_one(master_iterator)
1206
if not master_has_more:
237
######################################################################
240
def dir_exporter(tree, dest, root):
241
"""Export this tree to a new directory.
243
`dest` should not exist, and will be created holding the
244
contents of this tree.
246
TODO: To handle subdirectories we need to create the
249
:note: If the export fails, the destination directory will be
250
left in a half-assed state.
254
mutter('export version %r' % tree)
256
for dp, ie in inv.iter_entries():
258
fullpath = appendpath(dest, dp)
259
if kind == 'directory':
262
pumpfile(tree.get_file(ie.file_id), file(fullpath, 'wb'))
264
raise BzrError("don't know how to export {%s} of kind %r" % (ie.file_id, kind))
265
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
266
exporters['dir'] = dir_exporter
273
def get_root_name(dest):
274
"""Get just the root name for a tarball.
276
>>> get_root_name('mytar.tar')
278
>>> get_root_name('mytar.tar.bz2')
280
>>> get_root_name('tar.tar.tar.tgz')
282
>>> get_root_name('bzr-0.0.5.tar.gz')
284
>>> get_root_name('a/long/path/mytar.tgz')
286
>>> get_root_name('../parent/../dir/other.tbz2')
289
endings = ['.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tbz2']
290
dest = os.path.basename(dest)
292
if dest.endswith(end):
293
return dest[:-len(end)]
295
def tar_exporter(tree, dest, root, compression=None):
296
"""Export this tree to a new tar file.
298
`dest` will be created holding the contents of this tree; if it
299
already exists, it will be clobbered, like with "tar -c".
301
from time import time
303
compression = str(compression or '')
305
root = get_root_name(dest)
307
ball = tarfile.open(dest, 'w:' + compression)
308
except tarfile.CompressionError, e:
309
raise BzrError(str(e))
310
mutter('export version %r' % tree)
312
for dp, ie in inv.iter_entries():
313
mutter(" export {%s} kind %s to %s" % (ie.file_id, ie.kind, dest))
314
item = tarfile.TarInfo(os.path.join(root, dp))
315
# TODO: would be cool to actually set it to the timestamp of the
316
# revision it was last changed
318
if ie.kind == 'directory':
319
item.type = tarfile.DIRTYPE
324
elif ie.kind == 'file':
325
item.type = tarfile.REGTYPE
326
fileobj = tree.get_file(ie.file_id)
327
item.size = _find_file_size(fileobj)
330
raise BzrError("don't know how to export {%s} of kind %r" %
331
(ie.file_id, ie.kind))
333
ball.addfile(item, fileobj)
335
exporters['tar'] = tar_exporter
337
def tgz_exporter(tree, dest, root):
338
tar_exporter(tree, dest, root, compression='gz')
339
exporters['tgz'] = tgz_exporter
341
def tbz_exporter(tree, dest, root):
342
tar_exporter(tree, dest, root, compression='bz2')
343
exporters['tbz2'] = tbz_exporter
346
def _find_file_size(fileobj):
347
offset = fileobj.tell()
350
size = fileobj.tell()
352
# gzip doesn't accept second argument to seek()
356
nread = len(fileobj.read())
1209
file_id = master_ie.file_id
1211
other_values_append = other_values.append
1212
next_other_entries = []
1213
next_other_entries_append = next_other_entries.append
1214
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1215
if not other_has_more:
1216
other_values_append(lookup_by_file_id(
1217
others_extra[idx], self._other_trees[idx], file_id))
1218
next_other_entries_append((False, None, None))
1219
elif file_id == other_ie.file_id:
1220
# This is the critical code path, as most of the entries
1221
# should match between most trees.
1222
other_values_append((other_path, other_ie))
1223
next_other_entries_append(step_one(other_walkers[idx]))
1225
# This walker did not match, step it until it either
1226
# matches, or we know we are past the current walker.
1227
other_walker = other_walkers[idx]
1228
other_extra = others_extra[idx]
1229
while (other_has_more and
1230
self._cmp_path_by_dirblock(other_path, path) < 0):
1231
other_file_id = other_ie.file_id
1232
if other_file_id not in out_of_order_processed:
1233
other_extra[other_file_id] = (other_path, other_ie)
1234
other_has_more, other_path, other_ie = \
1235
step_one(other_walker)
1236
if other_has_more and other_ie.file_id == file_id:
1237
# We ended up walking to this point, match and step
1239
other_values_append((other_path, other_ie))
1240
other_has_more, other_path, other_ie = \
1241
step_one(other_walker)
1243
# This record isn't in the normal order, see if it
1245
other_values_append(lookup_by_file_id(
1246
other_extra, self._other_trees[idx], file_id))
1247
next_other_entries_append((other_has_more, other_path,
1249
other_entries = next_other_entries
1251
# We've matched all the walkers, yield this datapoint
1252
yield path, file_id, master_ie, other_values
1253
self._other_walkers = other_walkers
1254
self._other_entries = other_entries
1255
self._others_extra = others_extra
1257
def _finish_others(self):
1258
"""Finish walking the other iterators, so we get all entries."""
1259
for idx, info in enumerate(self._other_entries):
1260
other_extra = self._others_extra[idx]
1261
(other_has_more, other_path, other_ie) = info
1262
while other_has_more:
1263
other_file_id = other_ie.file_id
1264
if other_file_id not in self._out_of_order_processed:
1265
other_extra[other_file_id] = (other_path, other_ie)
1266
other_has_more, other_path, other_ie = \
1267
self._step_one(self._other_walkers[idx])
1268
del self._other_entries
1270
def _walk_others(self):
1271
"""Finish up by walking all the 'deferred' nodes."""
1272
# TODO: One alternative would be to grab all possible unprocessed
1273
# file_ids, and then sort by path, and then yield them. That
1274
# might ensure better ordering, in case a caller strictly
1275
# requires parents before children.
1276
for idx, other_extra in enumerate(self._others_extra):
1277
others = sorted(other_extra.itervalues(),
1278
key=lambda x: self._path_to_key(x[0]))
1279
for other_path, other_ie in others:
1280
file_id = other_ie.file_id
1281
# We don't need to check out_of_order_processed here, because
1282
# the lookup_by_file_id will be removing anything processed
1283
# from the extras cache
1284
other_extra.pop(file_id)
1285
other_values = [(None, None) for i in xrange(idx)]
1286
other_values.append((other_path, other_ie))
1287
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1288
alt_idx = alt_idx + idx + 1
1289
alt_extra = self._others_extra[alt_idx]
1290
alt_tree = self._other_trees[alt_idx]
1291
other_values.append(self._lookup_by_file_id(
1292
alt_extra, alt_tree, file_id))
1293
yield other_path, file_id, None, other_values