61
50
trees or versioned trees.
64
def changes_from(self, other, want_unchanged=False, specific_files=None,
65
extra_trees=None, require_versioned=False, include_root=False,
66
want_unversioned=False):
67
"""Return a TreeDelta of the changes from other to this tree.
69
:param other: A tree to compare with.
70
:param specific_files: An optional list of file paths to restrict the
71
comparison to. When mapping filenames to ids, all matches in all
72
trees (including optional extra_trees) are used, and all children of
73
matched directories are included.
74
:param want_unchanged: An optional boolean requesting the inclusion of
75
unchanged entries in the result.
76
:param extra_trees: An optional list of additional trees to use when
77
mapping the contents of specific_files (paths) to file_ids.
78
:param require_versioned: An optional boolean (defaults to False). When
79
supplied and True all the 'specific_files' must be versioned, or
80
a PathsNotVersionedError will be thrown.
81
:param want_unversioned: Scan for unversioned paths.
83
The comparison will be performed by an InterTree object looked up on
86
# Martin observes that Tree.changes_from returns a TreeDelta and this
87
# may confuse people, because the class name of the returned object is
88
# a synonym of the object referenced in the method name.
89
return InterTree.get(other, self).compare(
90
want_unchanged=want_unchanged,
91
specific_files=specific_files,
92
extra_trees=extra_trees,
93
require_versioned=require_versioned,
94
include_root=include_root,
95
want_unversioned=want_unversioned,
98
@symbol_versioning.deprecated_method(symbol_versioning.one_three)
99
def _iter_changes(self, *args, **kwargs):
100
return self.iter_changes(*args, **kwargs)
102
def iter_changes(self, from_tree, include_unchanged=False,
103
specific_files=None, pb=None, extra_trees=None,
104
require_versioned=True, want_unversioned=False):
105
intertree = InterTree.get(from_tree, self)
106
return intertree.iter_changes(include_unchanged, specific_files, pb,
107
extra_trees, require_versioned, want_unversioned=want_unversioned)
110
"""Get a list of the conflicts in the tree.
112
Each conflict is an instance of bzrlib.conflicts.Conflict.
114
return _mod_conflicts.ConflictList()
117
"""For trees that can have unversioned files, return all such paths."""
120
def get_parent_ids(self):
121
"""Get the parent ids for this tree.
123
:return: a list of parent ids. [] is returned to indicate
124
a tree with no parents.
125
:raises: BzrError if the parents are not known.
127
raise NotImplementedError(self.get_parent_ids)
129
53
def has_filename(self, filename):
130
54
"""True if the tree has given filename."""
131
raise NotImplementedError(self.has_filename)
55
raise NotImplementedError()
133
57
def has_id(self, file_id):
134
58
return self.inventory.has_id(file_id)
136
def __contains__(self, file_id):
137
return self.has_id(file_id)
139
def has_or_had_id(self, file_id):
140
if file_id == self.inventory.root.file_id:
142
return self.inventory.has_id(file_id)
144
def is_ignored(self, filename):
145
"""Check whether the filename is ignored by this tree.
147
:param filename: The relative filename within the tree.
148
:return: True if the filename is ignored.
152
62
def __iter__(self):
153
63
return iter(self.inventory)
155
def all_file_ids(self):
156
"""Iterate through all file ids, including ids for missing files."""
157
return set(self.inventory)
159
65
def id2path(self, file_id):
160
"""Return the path for a file id.
164
66
return self.inventory.id2path(file_id)
166
def is_control_filename(self, filename):
167
"""True if filename is the name of a control file in this tree.
169
:param filename: A filename within the tree. This is a relative path
170
from the root of this tree.
172
This is true IF and ONLY IF the filename is part of the meta data
173
that bzr controls in this tree. I.E. a random .bzr directory placed
174
on disk will not be a control file for this tree.
176
return self.bzrdir.is_control_filename(filename)
179
def iter_entries_by_dir(self, specific_file_ids=None):
180
"""Walk the tree in 'by_dir' order.
182
This will yield each entry in the tree as a (path, entry) tuple.
183
The order that they are yielded is:
185
Directories are walked in a depth-first lexicographical order,
186
however, whenever a directory is reached, all of its direct child
187
nodes are yielded in lexicographical order before yielding the
190
For example, in the tree::
200
The yield order (ignoring root) would be::
201
a, f, a/b, a/d, a/b/c, a/d/e, f/g
203
return self.inventory.iter_entries_by_dir(
204
specific_file_ids=specific_file_ids)
206
def iter_references(self):
207
for path, entry in self.iter_entries_by_dir():
208
if entry.kind == 'tree-reference':
209
yield path, entry.file_id
211
def kind(self, file_id):
212
raise NotImplementedError("Tree subclass %s must implement kind"
213
% self.__class__.__name__)
215
def stored_kind(self, file_id):
216
"""File kind stored for this file_id.
218
May not match kind on disk for working trees. Always available
219
for versioned files, even when the file itself is missing.
221
return self.kind(file_id)
223
def path_content_summary(self, path):
224
"""Get a summary of the information about path.
226
:param path: A relative path within the tree.
227
:return: A tuple containing kind, size, exec, sha1-or-link.
228
Kind is always present (see tree.kind()).
229
size is present if kind is file, None otherwise.
230
exec is None unless kind is file and the platform supports the 'x'
232
sha1-or-link is the link target if kind is symlink, or the sha1 if
233
it can be obtained without reading the file.
235
raise NotImplementedError(self.path_content_summary)
237
def get_reference_revision(self, file_id, path=None):
238
raise NotImplementedError("Tree subclass %s must implement "
239
"get_reference_revision"
240
% self.__class__.__name__)
242
def _comparison_data(self, entry, path):
243
"""Return a tuple of kind, executable, stat_value for a file.
245
entry may be None if there is no inventory entry for the file, but
246
path must always be supplied.
248
kind is None if there is no file present (even if an inventory id is
249
present). executable is False for non-file entries.
251
raise NotImplementedError(self._comparison_data)
253
def _file_size(self, entry, stat_value):
254
raise NotImplementedError(self._file_size)
256
68
def _get_inventory(self):
257
69
return self._inventory
259
def get_file(self, file_id, path=None):
260
"""Return a file object for the file file_id in the tree.
262
If both file_id and path are defined, it is implementation defined as
263
to which one is used.
265
raise NotImplementedError(self.get_file)
267
def get_file_text(self, file_id, path=None):
268
"""Return the byte content of a file.
270
:param file_id: The file_id of the file.
271
:param path: The path of the file.
272
If both file_id and path are supplied, an implementation may use
275
my_file = self.get_file(file_id, path)
277
return my_file.read()
281
def get_file_lines(self, file_id, path=None):
282
"""Return the content of a file, as lines.
284
:param file_id: The file_id of the file.
285
:param path: The path of the file.
286
If both file_id and path are supplied, an implementation may use
289
return osutils.split_lines(self.get_file_text(file_id, path))
291
def get_file_mtime(self, file_id, path=None):
292
"""Return the modification time for a file.
294
:param file_id: The handle for this file.
295
:param path: The path that this file can be found at.
296
These must point to the same object.
298
raise NotImplementedError(self.get_file_mtime)
300
def get_file_size(self, file_id):
301
"""Return the size of a file in bytes.
303
This applies only to regular files. If invoked on directories or
304
symlinks, it will return None.
305
:param file_id: The file-id of the file
307
raise NotImplementedError(self.get_file_size)
309
71
def get_file_by_path(self, path):
310
return self.get_file(self._inventory.path2id(path), path)
312
def iter_files_bytes(self, desired_files):
313
"""Iterate through file contents.
315
Files will not necessarily be returned in the order they occur in
316
desired_files. No specific order is guaranteed.
318
Yields pairs of identifier, bytes_iterator. identifier is an opaque
319
value supplied by the caller as part of desired_files. It should
320
uniquely identify the file version in the caller's context. (Examples:
321
an index number or a TreeTransform trans_id.)
323
bytes_iterator is an iterable of bytestrings for the file. The
324
kind of iterable and length of the bytestrings are unspecified, but for
325
this implementation, it is a tuple containing a single bytestring with
326
the complete text of the file.
328
:param desired_files: a list of (file_id, identifier) pairs
330
for file_id, identifier in desired_files:
331
# We wrap the string in a tuple so that we can return an iterable
332
# of bytestrings. (Technically, a bytestring is also an iterable
333
# of bytestrings, but iterating through each character is not
335
cur_file = (self.get_file_text(file_id),)
336
yield identifier, cur_file
338
def get_symlink_target(self, file_id):
339
"""Get the target for a given file_id.
341
It is assumed that the caller already knows that file_id is referencing
343
:param file_id: Handle for the symlink entry.
344
:return: The path the symlink points to.
346
raise NotImplementedError(self.get_symlink_target)
348
def get_root_id(self):
349
"""Return the file_id for the root of this tree."""
350
raise NotImplementedError(self.get_root_id)
352
def annotate_iter(self, file_id,
353
default_revision=_mod_revision.CURRENT_REVISION):
354
"""Return an iterator of revision_id, line tuples.
356
For working trees (and mutable trees in general), the special
357
revision_id 'current:' will be used for lines that are new in this
358
tree, e.g. uncommitted changes.
359
:param file_id: The file to produce an annotated version from
360
:param default_revision: For lines that don't match a basis, mark them
361
with this revision id. Not all implementations will make use of
364
raise NotImplementedError(self.annotate_iter)
366
def _get_plan_merge_data(self, file_id, other, base):
367
from bzrlib import merge, versionedfile
368
vf = versionedfile._PlanMergeVersionedFile(file_id)
369
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
370
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
372
last_revision_base = None
374
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
375
return vf, last_revision_a, last_revision_b, last_revision_base
377
def plan_file_merge(self, file_id, other, base=None):
378
"""Generate a merge plan based on annotations.
380
If the file contains uncommitted changes in this tree, they will be
381
attributed to the 'current:' pseudo-revision. If the file contains
382
uncommitted changes in the other tree, they will be assigned to the
383
'other:' pseudo-revision.
385
data = self._get_plan_merge_data(file_id, other, base)
386
vf, last_revision_a, last_revision_b, last_revision_base = data
387
return vf.plan_merge(last_revision_a, last_revision_b,
390
def plan_file_lca_merge(self, file_id, other, base=None):
391
"""Generate a merge plan based lca-newness.
393
If the file contains uncommitted changes in this tree, they will be
394
attributed to the 'current:' pseudo-revision. If the file contains
395
uncommitted changes in the other tree, they will be assigned to the
396
'other:' pseudo-revision.
398
data = self._get_plan_merge_data(file_id, other, base)
399
vf, last_revision_a, last_revision_b, last_revision_base = data
400
return vf.plan_lca_merge(last_revision_a, last_revision_b,
403
def _iter_parent_trees(self):
404
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
405
for revision_id in self.get_parent_ids():
407
yield self.revision_tree(revision_id)
408
except errors.NoSuchRevisionInTree:
409
yield self.repository.revision_tree(revision_id)
412
def _file_revision(revision_tree, file_id):
413
"""Determine the revision associated with a file in a given tree."""
414
revision_tree.lock_read()
416
return revision_tree.inventory[file_id].revision
418
revision_tree.unlock()
420
def _get_file_revision(self, file_id, vf, tree_revision):
421
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
423
if getattr(self, '_repository', None) is None:
424
last_revision = tree_revision
425
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
426
self._iter_parent_trees()]
427
vf.add_lines((file_id, last_revision), parent_keys,
428
self.get_file(file_id).readlines())
429
repo = self.branch.repository
432
last_revision = self._file_revision(self, file_id)
433
base_vf = self._repository.texts
434
if base_vf not in vf.fallback_versionedfiles:
435
vf.fallback_versionedfiles.append(base_vf)
72
return self.get_file(self._inventory.path2id(path))
438
74
inventory = property(_get_inventory,
439
75
doc="Inventory of this Tree")
441
77
def _check_retrieved(self, ie, f):
444
78
fp = fingerprint_file(f)
447
if ie.text_size is not None:
81
if ie.text_size != None:
448
82
if ie.text_size != fp['size']:
449
83
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
450
84
["inventory expects %d bytes" % ie.text_size,
457
91
"file is actually %s" % fp['sha1'],
458
92
"store is probably damaged/corrupt"])
461
def path2id(self, path):
462
"""Return the id for path in this tree."""
463
return self._inventory.path2id(path)
465
def paths2ids(self, paths, trees=[], require_versioned=True):
466
"""Return all the ids that can be reached by walking from paths.
468
Each path is looked up in this tree and any extras provided in
469
trees, and this is repeated recursively: the children in an extra tree
470
of a directory that has been renamed under a provided path in this tree
471
are all returned, even if none exist under a provided path in this
472
tree, and vice versa.
474
:param paths: An iterable of paths to start converting to ids from.
475
Alternatively, if paths is None, no ids should be calculated and None
476
will be returned. This is offered to make calling the api unconditional
477
for code that *might* take a list of files.
478
:param trees: Additional trees to consider.
479
:param require_versioned: If False, do not raise NotVersionedError if
480
an element of paths is not versioned in this tree and all of trees.
482
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
484
def iter_children(self, file_id):
485
entry = self.iter_entries_by_dir([file_id]).next()[1]
486
for child in getattr(entry, 'children', {}).itervalues():
489
@symbol_versioning.deprecated_method(symbol_versioning.one_six)
490
def print_file(self, file_id):
491
"""Print file with id `file_id` to stdout."""
95
def print_file(self, fileid):
96
"""Print file with id `fileid` to stdout."""
493
sys.stdout.write(self.get_file_text(file_id))
498
def revision_tree(self, revision_id):
499
"""Obtain a revision tree for the revision revision_id.
501
The intention of this method is to allow access to possibly cached
502
tree data. Implementors of this method should raise NoSuchRevision if
503
the tree is not locally available, even if they could obtain the
504
tree via a repository or some other means. Callers are responsible
505
for finding the ultimate source for a revision tree.
507
:param revision_id: The revision_id of the requested tree.
509
:raises: NoSuchRevision if the tree cannot be obtained.
511
raise errors.NoSuchRevisionInTree(self, revision_id)
514
"""What files are present in this tree and unknown.
516
:return: an iterator over the unknown files.
523
def filter_unversioned_files(self, paths):
524
"""Filter out paths that are versioned.
526
:return: set of paths.
528
# NB: we specifically *don't* call self.has_filename, because for
529
# WorkingTrees that can indicate files that exist on disk but that
531
pred = self.inventory.has_filename
532
return set((p for p in paths if not pred(p)))
534
def walkdirs(self, prefix=""):
535
"""Walk the contents of this tree from path down.
537
This yields all the data about the contents of a directory at a time.
538
After each directory has been yielded, if the caller has mutated the
539
list to exclude some directories, they are then not descended into.
541
The data yielded is of the form:
542
((directory-relpath, directory-path-from-root, directory-fileid),
543
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
544
versioned_kind), ...]),
545
- directory-relpath is the containing dirs relpath from prefix
546
- directory-path-from-root is the containing dirs path from /
547
- directory-fileid is the id of the directory if it is versioned.
548
- relpath is the relative path within the subtree being walked.
549
- basename is the basename
550
- kind is the kind of the file now. If unknonwn then the file is not
551
present within the tree - but it may be recorded as versioned. See
553
- lstat is the stat data *if* the file was statted.
554
- path_from_tree_root is the path from the root of the tree.
555
- file_id is the file_id if the entry is versioned.
556
- versioned_kind is the kind of the file as last recorded in the
557
versioning system. If 'unknown' the file is not versioned.
558
One of 'kind' and 'versioned_kind' must not be 'unknown'.
560
:param prefix: Start walking from prefix within the tree rather than
561
at the root. This allows one to walk a subtree but get paths that are
562
relative to a tree rooted higher up.
563
:return: an iterator over the directory data.
565
raise NotImplementedError(self.walkdirs)
567
def iter_search_rules(self, path_names, pref_names=None,
568
_default_searcher=rules._per_user_searcher):
569
"""Find the preferences for filenames in a tree.
571
:param path_names: an iterable of paths to find attributes for.
572
Paths are given relative to the root of the tree.
573
:param pref_names: the list of preferences to lookup - None for all
574
:param _default_searcher: private parameter to assist testing - don't use
575
:return: an iterator of tuple sequences, one per path-name.
576
See _RulesSearcher.get_items for details on the tuple sequence.
578
searcher = self._get_rules_searcher(_default_searcher)
579
if searcher is not None:
580
if pref_names is not None:
581
for path in path_names:
582
yield searcher.get_selected_items(path, pref_names)
584
for path in path_names:
585
yield searcher.get_items(path)
588
def _get_rules_searcher(self, default_searcher):
589
"""Get the RulesSearcher for this tree given the default one."""
590
searcher = default_searcher
98
pumpfile(self.get_file(fileid), sys.stdout)
101
def export(self, dest, format='dir', root=None):
102
"""Export this tree."""
104
exporter = exporters[format]
106
from bzrlib.errors import BzrCommandError
107
raise BzrCommandError("export format %r not supported" % format)
108
exporter(self, dest, root)
112
class RevisionTree(Tree):
113
"""Tree viewing a previous revision.
115
File text can be retrieved from the text store.
117
TODO: Some kind of `__repr__` method, but a good one
118
probably means knowing the branch and revision number,
119
or at least passing a description to the constructor.
122
def __init__(self, store, inv):
124
self._inventory = inv
126
def get_file(self, file_id):
127
ie = self._inventory[file_id]
128
f = self._store[ie.text_id]
129
mutter(" get fileid{%s} from %r" % (file_id, self))
130
self._check_retrieved(ie, f)
133
def get_file_size(self, file_id):
134
return self._inventory[file_id].text_size
136
def get_file_sha1(self, file_id):
137
ie = self._inventory[file_id]
138
if ie.kind == "file":
141
def has_filename(self, filename):
142
return bool(self.inventory.path2id(filename))
144
def list_files(self):
145
# The only files returned by this are those from the version
146
for path, entry in self.inventory.iter_entries():
147
yield path, 'V', entry.kind, entry.file_id
594
150
class EmptyTree(Tree):
596
151
def __init__(self):
597
self._inventory = Inventory(root_id=None)
598
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
599
' use repository.revision_tree instead.',
600
DeprecationWarning, stacklevel=2)
602
def get_parent_ids(self):
605
def get_symlink_target(self, file_id):
152
self._inventory = Inventory()
608
154
def has_filename(self, filename):
611
def kind(self, file_id):
614
def list_files(self, include_root=False):
157
def list_files(self):
158
if False: # just to make it a generator
617
161
def __contains__(self, file_id):
618
return (file_id in self._inventory)
162
return file_id in self._inventory
620
def get_file_sha1(self, file_id, path=None, stat_value=None):
164
def get_file_sha1(self, file_id):
165
assert self._inventory[file_id].kind == "root_directory"
624
171
######################################################################
683
231
new_name = new_inv.id2path(file_id)
684
232
if old_name != new_name:
685
233
yield (old_name, new_name)
688
def find_ids_across_trees(filenames, trees, require_versioned=True):
689
"""Find the ids corresponding to specified filenames.
691
All matches in all trees will be used, and all children of matched
692
directories will be used.
694
:param filenames: The filenames to find file_ids for (if None, returns
696
:param trees: The trees to find file_ids within
697
:param require_versioned: if true, all specified filenames must occur in
699
:return: a set of file ids for the specified filenames and their children.
703
specified_path_ids = _find_ids_across_trees(filenames, trees,
705
return _find_children_across_trees(specified_path_ids, trees)
708
def _find_ids_across_trees(filenames, trees, require_versioned):
709
"""Find the ids corresponding to specified filenames.
711
All matches in all trees will be used, but subdirectories are not scanned.
713
:param filenames: The filenames to find file_ids for
714
:param trees: The trees to find file_ids within
715
:param require_versioned: if true, all specified filenames must occur in
717
:return: a set of file ids for the specified filenames
720
interesting_ids = set()
721
for tree_path in filenames:
724
file_id = tree.path2id(tree_path)
725
if file_id is not None:
726
interesting_ids.add(file_id)
729
not_versioned.append(tree_path)
730
if len(not_versioned) > 0 and require_versioned:
731
raise errors.PathsNotVersionedError(not_versioned)
732
return interesting_ids
735
def _find_children_across_trees(specified_ids, trees):
736
"""Return a set including specified ids and their children.
738
All matches in all trees will be used.
740
:param trees: The trees to find file_ids within
741
:return: a set containing all specified ids and their children
743
interesting_ids = set(specified_ids)
744
pending = interesting_ids
745
# now handle children of interesting ids
746
# we loop so that we handle all children of each id in both trees
747
while len(pending) > 0:
749
for file_id in pending:
751
if not tree.has_id(file_id):
753
for child_id in tree.iter_children(file_id):
754
if child_id not in interesting_ids:
755
new_pending.add(child_id)
756
interesting_ids.update(new_pending)
757
pending = new_pending
758
return interesting_ids
761
class InterTree(InterObject):
762
"""This class represents operations taking place between two Trees.
764
Its instances have methods like 'compare' and contain references to the
765
source and target trees these operations are to be carried out on.
767
Clients of bzrlib should not need to use InterTree directly, rather they
768
should use the convenience methods on Tree such as 'Tree.compare()' which
769
will pass through to InterTree as appropriate.
775
def compare(self, want_unchanged=False, specific_files=None,
776
extra_trees=None, require_versioned=False, include_root=False,
777
want_unversioned=False):
778
"""Return the changes from source to target.
780
:return: A TreeDelta.
781
:param specific_files: An optional list of file paths to restrict the
782
comparison to. When mapping filenames to ids, all matches in all
783
trees (including optional extra_trees) are used, and all children of
784
matched directories are included.
785
:param want_unchanged: An optional boolean requesting the inclusion of
786
unchanged entries in the result.
787
:param extra_trees: An optional list of additional trees to use when
788
mapping the contents of specific_files (paths) to file_ids.
789
:param require_versioned: An optional boolean (defaults to False). When
790
supplied and True all the 'specific_files' must be versioned, or
791
a PathsNotVersionedError will be thrown.
792
:param want_unversioned: Scan for unversioned paths.
794
# NB: show_status depends on being able to pass in non-versioned files
795
# and report them as unknown
796
trees = (self.source,)
797
if extra_trees is not None:
798
trees = trees + tuple(extra_trees)
799
# target is usually the newer tree:
800
specific_file_ids = self.target.paths2ids(specific_files, trees,
801
require_versioned=require_versioned)
802
if specific_files and not specific_file_ids:
803
# All files are unversioned, so just return an empty delta
804
# _compare_trees would think we want a complete delta
805
result = delta.TreeDelta()
806
fake_entry = InventoryFile('unused', 'unused', 'unused')
807
result.unversioned = [(path, None,
808
self.target._comparison_data(fake_entry, path)[0]) for path in
811
return delta._compare_trees(self.source, self.target, want_unchanged,
812
specific_files, include_root, extra_trees=extra_trees,
813
require_versioned=require_versioned,
814
want_unversioned=want_unversioned)
816
def iter_changes(self, include_unchanged=False,
817
specific_files=None, pb=None, extra_trees=[],
818
require_versioned=True, want_unversioned=False):
819
"""Generate an iterator of changes between trees.
822
(file_id, (path_in_source, path_in_target),
823
changed_content, versioned, parent, name, kind,
826
Changed_content is True if the file's content has changed. This
827
includes changes to its kind, and to a symlink's target.
829
versioned, parent, name, kind, executable are tuples of (from, to).
830
If a file is missing in a tree, its kind is None.
832
Iteration is done in parent-to-child order, relative to the target
835
There is no guarantee that all paths are in sorted order: the
836
requirement to expand the search due to renames may result in children
837
that should be found early being found late in the search, after
838
lexically later results have been returned.
839
:param require_versioned: Raise errors.PathsNotVersionedError if a
840
path in the specific_files list is not versioned in one of
841
source, target or extra_trees.
842
:param want_unversioned: Should unversioned files be returned in the
843
output. An unversioned file is defined as one with (False, False)
844
for the versioned pair.
847
lookup_trees = [self.source]
849
lookup_trees.extend(extra_trees)
850
if specific_files == []:
851
specific_file_ids = []
853
specific_file_ids = self.target.paths2ids(specific_files,
854
lookup_trees, require_versioned=require_versioned)
856
all_unversioned = sorted([(p.split('/'), p) for p in
858
if specific_files is None or
859
osutils.is_inside_any(specific_files, p)])
860
all_unversioned = deque(all_unversioned)
862
all_unversioned = deque()
864
from_entries_by_dir = list(self.source.iter_entries_by_dir(
865
specific_file_ids=specific_file_ids))
866
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
867
to_entries_by_dir = list(self.target.iter_entries_by_dir(
868
specific_file_ids=specific_file_ids))
869
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
871
# the unversioned path lookup only occurs on real trees - where there
872
# can be extras. So the fake_entry is solely used to look up
873
# executable it values when execute is not supported.
874
fake_entry = InventoryFile('unused', 'unused', 'unused')
875
for to_path, to_entry in to_entries_by_dir:
876
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
877
unversioned_path = all_unversioned.popleft()
878
to_kind, to_executable, to_stat = \
879
self.target._comparison_data(fake_entry, unversioned_path[1])
880
yield (None, (None, unversioned_path[1]), True, (False, False),
882
(None, unversioned_path[0][-1]),
884
(None, to_executable))
885
file_id = to_entry.file_id
886
to_paths[file_id] = to_path
888
changed_content = False
889
from_path, from_entry = from_data.get(file_id, (None, None))
890
from_versioned = (from_entry is not None)
891
if from_entry is not None:
892
from_versioned = True
893
from_name = from_entry.name
894
from_parent = from_entry.parent_id
895
from_kind, from_executable, from_stat = \
896
self.source._comparison_data(from_entry, from_path)
899
from_versioned = False
903
from_executable = None
904
versioned = (from_versioned, True)
905
to_kind, to_executable, to_stat = \
906
self.target._comparison_data(to_entry, to_path)
907
kind = (from_kind, to_kind)
908
if kind[0] != kind[1]:
909
changed_content = True
910
elif from_kind == 'file':
911
from_size = self.source._file_size(from_entry, from_stat)
912
to_size = self.target._file_size(to_entry, to_stat)
913
if from_size != to_size:
914
changed_content = True
915
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
916
self.target.get_file_sha1(file_id, to_path, to_stat)):
917
changed_content = True
918
elif from_kind == 'symlink':
919
if (self.source.get_symlink_target(file_id) !=
920
self.target.get_symlink_target(file_id)):
921
changed_content = True
922
elif from_kind == 'tree-reference':
923
if (self.source.get_reference_revision(file_id, from_path)
924
!= self.target.get_reference_revision(file_id, to_path)):
925
changed_content = True
926
parent = (from_parent, to_entry.parent_id)
927
name = (from_name, to_entry.name)
928
executable = (from_executable, to_executable)
930
pb.update('comparing files', entry_count, num_entries)
931
if (changed_content is not False or versioned[0] != versioned[1]
932
or parent[0] != parent[1] or name[0] != name[1] or
933
executable[0] != executable[1] or include_unchanged):
934
yield (file_id, (from_path, to_path), changed_content,
935
versioned, parent, name, kind, executable)
937
while all_unversioned:
938
# yield any trailing unversioned paths
939
unversioned_path = all_unversioned.popleft()
940
to_kind, to_executable, to_stat = \
941
self.target._comparison_data(fake_entry, unversioned_path[1])
942
yield (None, (None, unversioned_path[1]), True, (False, False),
944
(None, unversioned_path[0][-1]),
946
(None, to_executable))
948
def get_to_path(to_entry):
949
if to_entry.parent_id is None:
950
to_path = '' # the root
952
if to_entry.parent_id not in to_paths:
954
return get_to_path(self.target.inventory[to_entry.parent_id])
955
to_path = osutils.pathjoin(to_paths[to_entry.parent_id],
957
to_paths[to_entry.file_id] = to_path
960
for path, from_entry in from_entries_by_dir:
961
file_id = from_entry.file_id
962
if file_id in to_paths:
965
if not file_id in self.target.all_file_ids():
966
# common case - paths we have not emitted are not present in
970
to_path = get_to_path(self.target.inventory[file_id])
973
pb.update('comparing files', entry_count, num_entries)
974
versioned = (True, False)
975
parent = (from_entry.parent_id, None)
976
name = (from_entry.name, None)
977
from_kind, from_executable, stat_value = \
978
self.source._comparison_data(from_entry, path)
979
kind = (from_kind, None)
980
executable = (from_executable, None)
981
changed_content = from_kind is not None
982
# the parent's path is necessarily known at this point.
983
yield(file_id, (path, to_path), changed_content, versioned, parent,
984
name, kind, executable)
987
class MultiWalker(object):
988
"""Walk multiple trees simultaneously, getting combined results."""
990
# Note: This could be written to not assume you can do out-of-order
991
# lookups. Instead any nodes that don't match in all trees could be
992
# marked as 'deferred', and then returned in the final cleanup loop.
993
# For now, I think it is "nicer" to return things as close to the
994
# "master_tree" order as we can.
996
def __init__(self, master_tree, other_trees):
997
"""Create a new MultiWalker.
999
All trees being walked must implement "iter_entries_by_dir()", such
1000
that they yield (path, object) tuples, where that object will have a
1001
'.file_id' member, that can be used to check equality.
1003
:param master_tree: All trees will be 'slaved' to the master_tree such
1004
that nodes in master_tree will be used as 'first-pass' sync points.
1005
Any nodes that aren't in master_tree will be merged in a second
1007
:param other_trees: A list of other trees to walk simultaneously.
1009
self._master_tree = master_tree
1010
self._other_trees = other_trees
1012
# Keep track of any nodes that were properly processed just out of
1013
# order, that way we don't return them at the end, we don't have to
1014
# track *all* processed file_ids, just the out-of-order ones
1015
self._out_of_order_processed = set()
1018
def _step_one(iterator):
1019
"""Step an iter_entries_by_dir iterator.
1021
:return: (has_more, path, ie)
1022
If has_more is False, path and ie will be None.
1025
path, ie = iterator.next()
1026
except StopIteration:
1027
return False, None, None
1029
return True, path, ie
1032
def _cmp_path_by_dirblock(path1, path2):
1033
"""Compare two paths based on what directory they are in.
1035
This generates a sort order, such that all children of a directory are
1036
sorted together, and grandchildren are in the same order as the
1037
children appear. But all grandchildren come after all children.
1039
:param path1: first path
1040
:param path2: the second path
1041
:return: negative number if ``path1`` comes first,
1042
0 if paths are equal
1043
and a positive number if ``path2`` sorts first
1045
# Shortcut this special case
1048
# This is stolen from _dirstate_helpers_py.py, only switching it to
1049
# Unicode objects. Consider using encode_utf8() and then using the
1050
# optimized versions, or maybe writing optimized unicode versions.
1051
if not isinstance(path1, unicode):
1052
raise TypeError("'path1' must be a unicode string, not %s: %r"
1053
% (type(path1), path1))
1054
if not isinstance(path2, unicode):
1055
raise TypeError("'path2' must be a unicode string, not %s: %r"
1056
% (type(path2), path2))
1057
return cmp(MultiWalker._path_to_key(path1),
1058
MultiWalker._path_to_key(path2))
1061
def _path_to_key(path):
1062
dirname, basename = osutils.split(path)
1063
return (dirname.split(u'/'), basename)
1065
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1066
"""Lookup an inventory entry by file_id.
1068
This is called when an entry is missing in the normal order.
1069
Generally this is because a file was either renamed, or it was
1070
deleted/added. If the entry was found in the inventory and not in
1071
extra_entries, it will be added to self._out_of_order_processed
1073
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1074
should be filled with entries that were found before they were
1075
used. If file_id is present, it will be removed from the
1077
:param other_tree: The Tree to search, in case we didn't find the entry
1079
:param file_id: The file_id to look for
1080
:return: (path, ie) if found or (None, None) if not present.
1082
if file_id in extra_entries:
1083
return extra_entries.pop(file_id)
1084
# TODO: Is id2path better as the first call, or is
1085
# inventory[file_id] better as a first check?
1087
cur_path = other_tree.id2path(file_id)
1088
except errors.NoSuchId:
1090
if cur_path is None:
1093
self._out_of_order_processed.add(file_id)
1094
cur_ie = other_tree.inventory[file_id]
1095
return (cur_path, cur_ie)
1098
"""Match up the values in the different trees."""
1099
for result in self._walk_master_tree():
1101
self._finish_others()
1102
for result in self._walk_others():
1105
def _walk_master_tree(self):
1106
"""First pass, walk all trees in lock-step.
1108
When we are done, all nodes in the master_tree will have been
1109
processed. _other_walkers, _other_entries, and _others_extra will be
1110
set on 'self' for future processing.
1112
# This iterator has the most "inlining" done, because it tends to touch
1113
# every file in the tree, while the others only hit nodes that don't
1115
master_iterator = self._master_tree.iter_entries_by_dir()
1117
other_walkers = [other.iter_entries_by_dir()
1118
for other in self._other_trees]
1119
other_entries = [self._step_one(walker) for walker in other_walkers]
1120
# Track extra nodes in the other trees
1121
others_extra = [{} for i in xrange(len(self._other_trees))]
1123
master_has_more = True
1124
step_one = self._step_one
1125
lookup_by_file_id = self._lookup_by_file_id
1126
out_of_order_processed = self._out_of_order_processed
1128
while master_has_more:
1129
(master_has_more, path, master_ie) = step_one(master_iterator)
1130
if not master_has_more:
237
######################################################################
240
def dir_exporter(tree, dest, root):
241
"""Export this tree to a new directory.
243
`dest` should not exist, and will be created holding the
244
contents of this tree.
246
TODO: To handle subdirectories we need to create the
249
:note: If the export fails, the destination directory will be
250
left in a half-assed state.
254
mutter('export version %r' % tree)
256
for dp, ie in inv.iter_entries():
258
fullpath = appendpath(dest, dp)
259
if kind == 'directory':
262
pumpfile(tree.get_file(ie.file_id), file(fullpath, 'wb'))
264
raise BzrError("don't know how to export {%s} of kind %r" % (ie.file_id, kind))
265
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
266
exporters['dir'] = dir_exporter
273
def get_root_name(dest):
274
"""Get just the root name for a tarball.
276
>>> get_root_name('mytar.tar')
278
>>> get_root_name('mytar.tar.bz2')
280
>>> get_root_name('tar.tar.tar.tgz')
282
>>> get_root_name('bzr-0.0.5.tar.gz')
284
>>> get_root_name('a/long/path/mytar.tgz')
286
>>> get_root_name('../parent/../dir/other.tbz2')
289
endings = ['.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tbz2']
290
dest = os.path.basename(dest)
292
if dest.endswith(end):
293
return dest[:-len(end)]
295
def tar_exporter(tree, dest, root, compression=None):
296
"""Export this tree to a new tar file.
298
`dest` will be created holding the contents of this tree; if it
299
already exists, it will be clobbered, like with "tar -c".
301
from time import time
303
compression = str(compression or '')
305
root = get_root_name(dest)
307
ball = tarfile.open(dest, 'w:' + compression)
308
except tarfile.CompressionError, e:
309
raise BzrError(str(e))
310
mutter('export version %r' % tree)
312
for dp, ie in inv.iter_entries():
313
mutter(" export {%s} kind %s to %s" % (ie.file_id, ie.kind, dest))
314
item = tarfile.TarInfo(os.path.join(root, dp))
315
# TODO: would be cool to actually set it to the timestamp of the
316
# revision it was last changed
318
if ie.kind == 'directory':
319
item.type = tarfile.DIRTYPE
324
elif ie.kind == 'file':
325
item.type = tarfile.REGTYPE
326
fileobj = tree.get_file(ie.file_id)
327
item.size = _find_file_size(fileobj)
330
raise BzrError("don't know how to export {%s} of kind %r" %
331
(ie.file_id, ie.kind))
333
ball.addfile(item, fileobj)
335
exporters['tar'] = tar_exporter
337
def tgz_exporter(tree, dest, root):
338
tar_exporter(tree, dest, root, compression='gz')
339
exporters['tgz'] = tgz_exporter
341
def tbz_exporter(tree, dest, root):
342
tar_exporter(tree, dest, root, compression='bz2')
343
exporters['tbz2'] = tbz_exporter
346
def _find_file_size(fileobj):
347
offset = fileobj.tell()
350
size = fileobj.tell()
352
# gzip doesn't accept second argument to seek()
356
nread = len(fileobj.read())
1133
file_id = master_ie.file_id
1135
other_values_append = other_values.append
1136
next_other_entries = []
1137
next_other_entries_append = next_other_entries.append
1138
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1139
if not other_has_more:
1140
other_values_append(lookup_by_file_id(
1141
others_extra[idx], self._other_trees[idx], file_id))
1142
next_other_entries_append((False, None, None))
1143
elif file_id == other_ie.file_id:
1144
# This is the critical code path, as most of the entries
1145
# should match between most trees.
1146
other_values_append((other_path, other_ie))
1147
next_other_entries_append(step_one(other_walkers[idx]))
1149
# This walker did not match, step it until it either
1150
# matches, or we know we are past the current walker.
1151
other_walker = other_walkers[idx]
1152
other_extra = others_extra[idx]
1153
while (other_has_more and
1154
self._cmp_path_by_dirblock(other_path, path) < 0):
1155
other_file_id = other_ie.file_id
1156
if other_file_id not in out_of_order_processed:
1157
other_extra[other_file_id] = (other_path, other_ie)
1158
other_has_more, other_path, other_ie = \
1159
step_one(other_walker)
1160
if other_has_more and other_ie.file_id == file_id:
1161
# We ended up walking to this point, match and step
1163
other_values_append((other_path, other_ie))
1164
other_has_more, other_path, other_ie = \
1165
step_one(other_walker)
1167
# This record isn't in the normal order, see if it
1169
other_values_append(lookup_by_file_id(
1170
other_extra, self._other_trees[idx], file_id))
1171
next_other_entries_append((other_has_more, other_path,
1173
other_entries = next_other_entries
1175
# We've matched all the walkers, yield this datapoint
1176
yield path, file_id, master_ie, other_values
1177
self._other_walkers = other_walkers
1178
self._other_entries = other_entries
1179
self._others_extra = others_extra
1181
def _finish_others(self):
1182
"""Finish walking the other iterators, so we get all entries."""
1183
for idx, info in enumerate(self._other_entries):
1184
other_extra = self._others_extra[idx]
1185
(other_has_more, other_path, other_ie) = info
1186
while other_has_more:
1187
other_file_id = other_ie.file_id
1188
if other_file_id not in self._out_of_order_processed:
1189
other_extra[other_file_id] = (other_path, other_ie)
1190
other_has_more, other_path, other_ie = \
1191
self._step_one(self._other_walkers[idx])
1192
del self._other_entries
1194
def _walk_others(self):
1195
"""Finish up by walking all the 'deferred' nodes."""
1196
# TODO: One alternative would be to grab all possible unprocessed
1197
# file_ids, and then sort by path, and then yield them. That
1198
# might ensure better ordering, in case a caller strictly
1199
# requires parents before children.
1200
for idx, other_extra in enumerate(self._others_extra):
1201
others = sorted(other_extra.itervalues(),
1202
key=lambda x: self._path_to_key(x[0]))
1203
for other_path, other_ie in others:
1204
file_id = other_ie.file_id
1205
# We don't need to check out_of_order_processed here, because
1206
# the lookup_by_file_id will be removing anything processed
1207
# from the extras cache
1208
other_extra.pop(file_id)
1209
other_values = [(None, None) for i in xrange(idx)]
1210
other_values.append((other_path, other_ie))
1211
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1212
alt_idx = alt_idx + idx + 1
1213
alt_extra = self._others_extra[alt_idx]
1214
alt_tree = self._other_trees[alt_idx]
1215
other_values.append(self._lookup_by_file_id(
1216
alt_extra, alt_tree, file_id))
1217
yield other_path, file_id, None, other_values