148
return iter(self.inventory)
156
def all_file_ids(self):
157
"""Iterate through all file ids, including ids for missing files."""
158
raise NotImplementedError(self.all_file_ids)
150
160
def id2path(self, file_id):
151
161
"""Return the path for a file id.
153
163
:raises NoSuchId:
155
file_id = osutils.safe_file_id(file_id)
156
return self.inventory.id2path(file_id)
158
def is_control_filename(self, filename):
159
"""True if filename is the name of a control file in this tree.
161
:param filename: A filename within the tree. This is a relative path
162
from the root of this tree.
164
This is true IF and ONLY IF the filename is part of the meta data
165
that bzr controls in this tree. I.E. a random .bzr directory placed
166
on disk will not be a control file for this tree.
168
return self.bzrdir.is_control_filename(filename)
171
def iter_entries_by_dir(self, specific_file_ids=None):
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
172
168
"""Walk the tree in 'by_dir' order.
174
This will yield each entry in the tree as a (path, entry) tuple. The
175
order that they are yielded is: the contents of a directory are
176
preceeded by the parent of a directory, and all the contents of a
177
directory are grouped together.
179
return self.inventory.iter_entries_by_dir(
180
specific_file_ids=specific_file_ids)
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def list_files(self, include_root=False, from_dir=None, recursive=True):
199
"""List all files in this tree.
201
:param include_root: Whether to include the entry for the tree root
202
:param from_dir: Directory under which to list files
203
:param recursive: Whether to list files recursively
204
:return: iterator over tuples of (path, versioned, kind, file_id,
207
raise NotImplementedError(self.list_files)
182
209
def iter_references(self):
183
for path, entry in self.iter_entries_by_dir():
184
if entry.kind == 'tree-reference':
185
yield path, entry.file_id
210
if self.supports_tree_reference():
211
for path, entry in self.iter_entries_by_dir():
212
if entry.kind == 'tree-reference':
213
yield path, entry.file_id
187
215
def kind(self, file_id):
188
216
raise NotImplementedError("Tree subclass %s must implement kind"
189
217
% self.__class__.__name__)
219
def stored_kind(self, file_id):
220
"""File kind stored for this file_id.
222
May not match kind on disk for working trees. Always available
223
for versioned files, even when the file itself is missing.
225
return self.kind(file_id)
227
def path_content_summary(self, path):
228
"""Get a summary of the information about path.
230
All the attributes returned are for the canonical form, not the
231
convenient form (if content filters are in use.)
233
:param path: A relative path within the tree.
234
:return: A tuple containing kind, size, exec, sha1-or-link.
235
Kind is always present (see tree.kind()).
236
size is present if kind is file and the size of the
237
canonical form can be cheaply determined, None otherwise.
238
exec is None unless kind is file and the platform supports the 'x'
240
sha1-or-link is the link target if kind is symlink, or the sha1 if
241
it can be obtained without reading the file.
243
raise NotImplementedError(self.path_content_summary)
191
245
def get_reference_revision(self, file_id, path=None):
192
246
raise NotImplementedError("Tree subclass %s must implement "
193
247
"get_reference_revision"
207
261
def _file_size(self, entry, stat_value):
208
262
raise NotImplementedError(self._file_size)
210
def _get_inventory(self):
211
return self._inventory
213
264
def get_file(self, file_id, path=None):
214
265
"""Return a file object for the file file_id in the tree.
216
267
If both file_id and path are defined, it is implementation defined as
217
268
to which one is used.
219
270
raise NotImplementedError(self.get_file)
272
def get_file_with_stat(self, file_id, path=None):
273
"""Get a file handle and stat object for file_id.
275
The default implementation returns (self.get_file, None) for backwards
278
:param file_id: The file id to read.
279
:param path: The path of the file, if it is known.
280
:return: A tuple (file_handle, stat_value_or_None). If the tree has
281
no stat facility, or need for a stat cache feedback during commit,
282
it may return None for the second element of the tuple.
284
return (self.get_file(file_id, path), None)
286
def get_file_text(self, file_id, path=None):
287
"""Return the byte content of a file.
289
:param file_id: The file_id of the file.
290
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
:returns: A single byte string for the whole file.
297
my_file = self.get_file(file_id, path)
299
return my_file.read()
303
def get_file_lines(self, file_id, path=None):
304
"""Return the content of a file, as lines.
306
:param file_id: The file_id of the file.
307
:param path: The path of the file.
309
If both file_id and path are supplied, an implementation may use
312
return osutils.split_lines(self.get_file_text(file_id, path))
314
def get_file_verifier(self, file_id, path=None, stat_value=None):
315
"""Return a verifier for a file.
317
The default implementation returns a sha1.
319
:param file_id: The handle for this file.
320
:param path: The path that this file can be found at.
321
These must point to the same object.
322
:param stat_value: Optional stat value for the object
323
:return: Tuple with verifier name and verifier data
325
return ("SHA1", self.get_file_sha1(file_id, path=path,
326
stat_value=stat_value))
328
def get_file_sha1(self, file_id, path=None, stat_value=None):
329
"""Return the SHA1 file for a file.
331
:note: callers should use get_file_verifier instead
332
where possible, as the underlying repository implementation may
333
have quicker access to a non-sha1 verifier.
335
:param file_id: The handle for this file.
336
:param path: The path that this file can be found at.
337
These must point to the same object.
338
:param stat_value: Optional stat value for the object
340
raise NotImplementedError(self.get_file_sha1)
221
342
def get_file_mtime(self, file_id, path=None):
222
343
"""Return the modification time for a file.
256
392
cur_file = (self.get_file_text(file_id),)
257
393
yield identifier, cur_file
259
def get_symlink_target(self, file_id):
395
def get_symlink_target(self, file_id, path=None):
260
396
"""Get the target for a given file_id.
262
398
It is assumed that the caller already knows that file_id is referencing
264
400
:param file_id: Handle for the symlink entry.
401
:param path: The path of the file.
402
If both file_id and path are supplied, an implementation may use
265
404
:return: The path the symlink points to.
267
406
raise NotImplementedError(self.get_symlink_target)
269
def annotate_iter(self, file_id):
270
"""Return an iterator of revision_id, line tuples
408
def get_root_id(self):
409
"""Return the file_id for the root of this tree."""
410
raise NotImplementedError(self.get_root_id)
412
def annotate_iter(self, file_id,
413
default_revision=_mod_revision.CURRENT_REVISION):
414
"""Return an iterator of revision_id, line tuples.
272
416
For working trees (and mutable trees in general), the special
273
417
revision_id 'current:' will be used for lines that are new in this
274
418
tree, e.g. uncommitted changes.
275
419
:param file_id: The file to produce an annotated version from
420
:param default_revision: For lines that don't match a basis, mark them
421
with this revision id. Not all implementations will make use of
277
424
raise NotImplementedError(self.annotate_iter)
279
def plan_file_merge(self, file_id, other):
280
"""Generate a merge plan based on annotations
282
If the file contains uncommitted changes in this tree, they will be
283
attributed to the 'current:' pseudo-revision. If the file contains
284
uncommitted changes in the other tree, they will be assigned to the
285
'other:' pseudo-revision.
287
from bzrlib import merge
288
annotated_a = list(self.annotate_iter(file_id,
289
_mod_revision.CURRENT_REVISION))
290
annotated_b = list(other.annotate_iter(file_id, 'other:'))
291
ancestors_a = self._get_ancestors(_mod_revision.CURRENT_REVISION)
292
ancestors_b = other._get_ancestors('other:')
293
return merge._plan_annotate_merge(annotated_a, annotated_b,
294
ancestors_a, ancestors_b)
296
inventory = property(_get_inventory,
297
doc="Inventory of this Tree")
426
def _get_plan_merge_data(self, file_id, other, base):
427
from bzrlib import versionedfile
428
vf = versionedfile._PlanMergeVersionedFile(file_id)
429
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
430
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
432
last_revision_base = None
434
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
435
return vf, last_revision_a, last_revision_b, last_revision_base
437
def plan_file_merge(self, file_id, other, base=None):
438
"""Generate a merge plan based on annotations.
440
If the file contains uncommitted changes in this tree, they will be
441
attributed to the 'current:' pseudo-revision. If the file contains
442
uncommitted changes in the other tree, they will be assigned to the
443
'other:' pseudo-revision.
445
data = self._get_plan_merge_data(file_id, other, base)
446
vf, last_revision_a, last_revision_b, last_revision_base = data
447
return vf.plan_merge(last_revision_a, last_revision_b,
450
def plan_file_lca_merge(self, file_id, other, base=None):
451
"""Generate a merge plan based lca-newness.
453
If the file contains uncommitted changes in this tree, they will be
454
attributed to the 'current:' pseudo-revision. If the file contains
455
uncommitted changes in the other tree, they will be assigned to the
456
'other:' pseudo-revision.
458
data = self._get_plan_merge_data(file_id, other, base)
459
vf, last_revision_a, last_revision_b, last_revision_base = data
460
return vf.plan_lca_merge(last_revision_a, last_revision_b,
463
def _iter_parent_trees(self):
464
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
465
for revision_id in self.get_parent_ids():
467
yield self.revision_tree(revision_id)
468
except errors.NoSuchRevisionInTree:
469
yield self.repository.revision_tree(revision_id)
471
def _get_file_revision(self, file_id, vf, tree_revision):
472
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
474
if getattr(self, '_repository', None) is None:
475
last_revision = tree_revision
476
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
477
self._iter_parent_trees()]
478
vf.add_lines((file_id, last_revision), parent_keys,
479
self.get_file_lines(file_id))
480
repo = self.branch.repository
483
last_revision = self.get_file_revision(file_id)
484
base_vf = self._repository.texts
485
if base_vf not in vf.fallback_versionedfiles:
486
vf.fallback_versionedfiles.append(base_vf)
299
489
def _check_retrieved(self, ie, f):
300
490
if not __debug__:
302
fp = fingerprint_file(f)
492
fp = osutils.fingerprint_file(f)
305
495
if ie.text_size is not None:
306
496
if ie.text_size != fp['size']:
307
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
497
raise errors.BzrError(
498
"mismatched size for file %r in %r" %
499
(ie.file_id, self._store),
308
500
["inventory expects %d bytes" % ie.text_size,
309
501
"file is actually %d bytes" % fp['size'],
310
502
"store is probably damaged/corrupt"])
312
504
if ie.text_sha1 != fp['sha1']:
313
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
505
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
506
(ie.file_id, self._store),
314
507
["inventory expects %s" % ie.text_sha1,
315
508
"file is actually %s" % fp['sha1'],
316
509
"store is probably damaged/corrupt"])
319
511
def path2id(self, path):
320
512
"""Return the id for path in this tree."""
321
return self._inventory.path2id(path)
513
raise NotImplementedError(self.path2id)
323
515
def paths2ids(self, paths, trees=[], require_versioned=True):
324
516
"""Return all the ids that can be reached by walking from paths.
326
Each path is looked up in each this tree and any extras provided in
518
Each path is looked up in this tree and any extras provided in
327
519
trees, and this is repeated recursively: the children in an extra tree
328
520
of a directory that has been renamed under a provided path in this tree
329
are all returned, even if none exist until a provided path in this
521
are all returned, even if none exist under a provided path in this
330
522
tree, and vice versa.
332
524
:param paths: An iterable of paths to start converting to ids from.
418
609
raise NotImplementedError(self.walkdirs)
421
class EmptyTree(Tree):
424
self._inventory = Inventory(root_id=None)
425
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
426
' use repository.revision_tree instead.',
427
DeprecationWarning, stacklevel=2)
429
def get_parent_ids(self):
432
def get_symlink_target(self, file_id):
435
def has_filename(self, filename):
611
def supports_content_filtering(self):
438
def kind(self, file_id):
439
file_id = osutils.safe_file_id(file_id)
440
assert self._inventory[file_id].kind == "directory"
443
def list_files(self, include_root=False):
446
def __contains__(self, file_id):
447
file_id = osutils.safe_file_id(file_id)
448
return (file_id in self._inventory)
450
def get_file_sha1(self, file_id, path=None, stat_value=None):
454
######################################################################
457
# TODO: Merge these two functions into a single one that can operate
458
# on either a whole tree or a set of files.
460
# TODO: Return the diff in order by filename, not by category or in
461
# random order. Can probably be done by lock-stepping through the
462
# filenames from both trees.
465
def file_status(filename, old_tree, new_tree):
466
"""Return single-letter status, old and new names for a file.
468
The complexity here is in deciding how to represent renames;
469
many complex cases are possible.
614
def _content_filter_stack(self, path=None, file_id=None):
615
"""The stack of content filters for a path if filtering is supported.
617
Readers will be applied in first-to-last order.
618
Writers will be applied in last-to-first order.
619
Either the path or the file-id needs to be provided.
621
:param path: path relative to the root of the tree
623
:param file_id: file_id or None if unknown
624
:return: the list of filters - [] if there are none
626
filter_pref_names = filters._get_registered_names()
627
if len(filter_pref_names) == 0:
630
path = self.id2path(file_id)
631
prefs = self.iter_search_rules([path], filter_pref_names).next()
632
stk = filters._get_filter_stack_for(prefs)
633
if 'filters' in debug.debug_flags:
634
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
637
def _content_filter_stack_provider(self):
638
"""A function that returns a stack of ContentFilters.
640
The function takes a path (relative to the top of the tree) and a
641
file-id as parameters.
643
:return: None if content filtering is not supported by this tree.
645
if self.supports_content_filtering():
646
return lambda path, file_id: \
647
self._content_filter_stack(path, file_id)
651
def iter_search_rules(self, path_names, pref_names=None,
652
_default_searcher=None):
653
"""Find the preferences for filenames in a tree.
655
:param path_names: an iterable of paths to find attributes for.
656
Paths are given relative to the root of the tree.
657
:param pref_names: the list of preferences to lookup - None for all
658
:param _default_searcher: private parameter to assist testing - don't use
659
:return: an iterator of tuple sequences, one per path-name.
660
See _RulesSearcher.get_items for details on the tuple sequence.
662
if _default_searcher is None:
663
_default_searcher = rules._per_user_searcher
664
searcher = self._get_rules_searcher(_default_searcher)
665
if searcher is not None:
666
if pref_names is not None:
667
for path in path_names:
668
yield searcher.get_selected_items(path, pref_names)
670
for path in path_names:
671
yield searcher.get_items(path)
673
def _get_rules_searcher(self, default_searcher):
674
"""Get the RulesSearcher for this tree given the default one."""
675
searcher = default_searcher
679
class InventoryTree(Tree):
680
"""A tree that relies on an inventory for its metadata.
682
Trees contain an `Inventory` object, and also know how to retrieve
683
file texts mentioned in the inventory, either from a working
684
directory or from a store.
686
It is possible for trees to contain files that are not described
687
in their inventory or vice versa; for this use `filenames()`.
689
Subclasses should set the _inventory attribute, which is considered
690
private to external API users.
471
old_inv = old_tree.inventory
472
new_inv = new_tree.inventory
473
new_id = new_inv.path2id(filename)
474
old_id = old_inv.path2id(filename)
476
if not new_id and not old_id:
477
# easy: doesn't exist in either; not versioned at all
478
if new_tree.is_ignored(filename):
479
return 'I', None, None
481
return '?', None, None
483
# There is now a file of this name, great.
486
# There is no longer a file of this name, but we can describe
487
# what happened to the file that used to have
488
# this name. There are two possibilities: either it was
489
# deleted entirely, or renamed.
491
if new_inv.has_id(old_id):
492
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
494
return 'D', old_inv.id2path(old_id), None
496
# if the file_id is new in this revision, it is added
497
if new_id and not old_inv.has_id(new_id):
500
# if there used to be a file of this name, but that ID has now
501
# disappeared, it is deleted
502
if old_id and not new_inv.has_id(old_id):
509
def find_renames(old_inv, new_inv):
510
for file_id in old_inv:
511
if file_id not in new_inv:
513
old_name = old_inv.id2path(file_id)
514
new_name = new_inv.id2path(file_id)
515
if old_name != new_name:
516
yield (old_name, new_name)
693
def get_canonical_inventory_paths(self, paths):
694
"""Like get_canonical_inventory_path() but works on multiple items.
696
:param paths: A sequence of paths relative to the root of the tree.
697
:return: A list of paths, with each item the corresponding input path
698
adjusted to account for existing elements that match case
701
return list(self._yield_canonical_inventory_paths(paths))
703
def get_canonical_inventory_path(self, path):
704
"""Returns the first inventory item that case-insensitively matches path.
706
If a path matches exactly, it is returned. If no path matches exactly
707
but more than one path matches case-insensitively, it is implementation
708
defined which is returned.
710
If no path matches case-insensitively, the input path is returned, but
711
with as many path entries that do exist changed to their canonical
714
If you need to resolve many names from the same tree, you should
715
use get_canonical_inventory_paths() to avoid O(N) behaviour.
717
:param path: A paths relative to the root of the tree.
718
:return: The input path adjusted to account for existing elements
719
that match case insensitively.
721
return self._yield_canonical_inventory_paths([path]).next()
723
def _yield_canonical_inventory_paths(self, paths):
725
# First, if the path as specified exists exactly, just use it.
726
if self.path2id(path) is not None:
730
cur_id = self.get_root_id()
732
bit_iter = iter(path.split("/"))
736
for child in self.iter_children(cur_id):
738
# XXX: it seem like if the child is known to be in the
739
# tree, we shouldn't need to go from its id back to
740
# its path -- mbp 2010-02-11
742
# XXX: it seems like we could be more efficient
743
# by just directly looking up the original name and
744
# only then searching all children; also by not
745
# chopping paths so much. -- mbp 2010-02-11
746
child_base = os.path.basename(self.id2path(child))
747
if (child_base == elt):
748
# if we found an exact match, we can stop now; if
749
# we found an approximate match we need to keep
750
# searching because there might be an exact match
753
new_path = osutils.pathjoin(cur_path, child_base)
755
elif child_base.lower() == lelt:
757
new_path = osutils.pathjoin(cur_path, child_base)
758
except errors.NoSuchId:
759
# before a change is committed we can see this error...
764
# got to the end of this directory and no entries matched.
765
# Return what matched so far, plus the rest as specified.
766
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
771
@deprecated_method(deprecated_in((2, 5, 0)))
772
def _get_inventory(self):
773
return self._inventory
775
inventory = property(_get_inventory,
776
doc="Inventory of this Tree")
778
def _get_root_inventory(self):
779
return self._inventory
781
root_inventory = property(_get_root_inventory,
782
doc="Root inventory of this tree")
784
def _unpack_file_id(self, file_id):
785
"""Find the inventory and inventory file id for a tree file id.
787
:param file_id: The tree file id, as bytestring or tuple
788
:return: Inventory and inventory file id
790
if isinstance(file_id, tuple):
791
if len(file_id) != 1:
792
raise ValueError("nested trees not yet supported: %r" % file_id)
794
return self.root_inventory, file_id
797
def path2id(self, path):
798
"""Return the id for path in this tree."""
799
return self._path2inv_file_id(path)[1]
801
def _path2inv_file_id(self, path):
802
"""Lookup a inventory and inventory file id by path.
804
:param path: Path to look up
805
:return: tuple with inventory and inventory file id
807
# FIXME: Support nested trees
808
return self.root_inventory, self.root_inventory.path2id(path)
810
def id2path(self, file_id):
811
"""Return the path for a file id.
815
inventory, file_id = self._unpack_file_id(file_id)
816
return inventory.id2path(file_id)
818
def has_id(self, file_id):
819
inventory, file_id = self._unpack_file_id(file_id)
820
return inventory.has_id(file_id)
822
def has_or_had_id(self, file_id):
823
inventory, file_id = self._unpack_file_id(file_id)
824
return inventory.has_id(file_id)
826
def all_file_ids(self):
828
[entry.file_id for path, entry in self.iter_entries_by_dir()])
830
@deprecated_method(deprecated_in((2, 4, 0)))
832
return iter(self.all_file_ids())
834
def filter_unversioned_files(self, paths):
835
"""Filter out paths that are versioned.
837
:return: set of paths.
839
# NB: we specifically *don't* call self.has_filename, because for
840
# WorkingTrees that can indicate files that exist on disk but that
842
return set((p for p in paths if self.path2id(p) is None))
845
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
846
"""Walk the tree in 'by_dir' order.
848
This will yield each entry in the tree as a (path, entry) tuple.
849
The order that they are yielded is:
851
See Tree.iter_entries_by_dir for details.
853
:param yield_parents: If True, yield the parents from the root leading
854
down to specific_file_ids that have been requested. This has no
855
impact if specific_file_ids is None.
857
if specific_file_ids is None:
858
inventory_file_ids = None
860
inventory_file_ids = []
861
for tree_file_id in specific_file_ids:
862
inventory, inv_file_id = self._unpack_file_id(tree_file_id)
863
if not inventory is self.root_inventory: # for now
864
raise AssertionError("%r != %r" % (
865
inventory, self.root_inventory))
866
inventory_file_ids.append(inv_file_id)
867
# FIXME: Handle nested trees
868
return self.root_inventory.iter_entries_by_dir(
869
specific_file_ids=inventory_file_ids, yield_parents=yield_parents)
871
@deprecated_method(deprecated_in((2, 5, 0)))
872
def get_file_by_path(self, path):
873
return self.get_file(self.path2id(path), path)
519
876
def find_ids_across_trees(filenames, trees, require_versioned=True):
520
877
"""Find the ids corresponding to specified filenames.
522
879
All matches in all trees will be used, and all children of matched
523
880
directories will be used.
596
952
Its instances have methods like 'compare' and contain references to the
597
953
source and target trees these operations are to be carried out on.
599
clients of bzrlib should not need to use InterTree directly, rather they
955
Clients of bzrlib should not need to use InterTree directly, rather they
600
956
should use the convenience methods on Tree such as 'Tree.compare()' which
601
957
will pass through to InterTree as appropriate.
960
# Formats that will be used to test this InterTree. If both are
961
# None, this InterTree will not be tested (e.g. because a complex
963
_matching_from_tree_format = None
964
_matching_to_tree_format = None
969
def is_compatible(kls, source, target):
970
# The default implementation is naive and uses the public API, so
971
# it works for all trees.
974
def _changes_from_entries(self, source_entry, target_entry,
975
source_path=None, target_path=None):
976
"""Generate a iter_changes tuple between source_entry and target_entry.
978
:param source_entry: An inventory entry from self.source, or None.
979
:param target_entry: An inventory entry from self.target, or None.
980
:param source_path: The path of source_entry, if known. If not known
981
it will be looked up.
982
:param target_path: The path of target_entry, if known. If not known
983
it will be looked up.
984
:return: A tuple, item 0 of which is an iter_changes result tuple, and
985
item 1 is True if there are any changes in the result tuple.
987
if source_entry is None:
988
if target_entry is None:
990
file_id = target_entry.file_id
992
file_id = source_entry.file_id
993
if source_entry is not None:
994
source_versioned = True
995
source_name = source_entry.name
996
source_parent = source_entry.parent_id
997
if source_path is None:
998
source_path = self.source.id2path(file_id)
999
source_kind, source_executable, source_stat = \
1000
self.source._comparison_data(source_entry, source_path)
1002
source_versioned = False
1004
source_parent = None
1006
source_executable = None
1007
if target_entry is not None:
1008
target_versioned = True
1009
target_name = target_entry.name
1010
target_parent = target_entry.parent_id
1011
if target_path is None:
1012
target_path = self.target.id2path(file_id)
1013
target_kind, target_executable, target_stat = \
1014
self.target._comparison_data(target_entry, target_path)
1016
target_versioned = False
1018
target_parent = None
1020
target_executable = None
1021
versioned = (source_versioned, target_versioned)
1022
kind = (source_kind, target_kind)
1023
changed_content = False
1024
if source_kind != target_kind:
1025
changed_content = True
1026
elif source_kind == 'file':
1027
if not self.file_content_matches(file_id, file_id, source_path,
1028
target_path, source_stat, target_stat):
1029
changed_content = True
1030
elif source_kind == 'symlink':
1031
if (self.source.get_symlink_target(file_id) !=
1032
self.target.get_symlink_target(file_id)):
1033
changed_content = True
1034
elif source_kind == 'tree-reference':
1035
if (self.source.get_reference_revision(file_id, source_path)
1036
!= self.target.get_reference_revision(file_id, target_path)):
1037
changed_content = True
1038
parent = (source_parent, target_parent)
1039
name = (source_name, target_name)
1040
executable = (source_executable, target_executable)
1041
if (changed_content is not False or versioned[0] != versioned[1]
1042
or parent[0] != parent[1] or name[0] != name[1] or
1043
executable[0] != executable[1]):
1047
return (file_id, (source_path, target_path), changed_content,
1048
versioned, parent, name, kind, executable), changes
606
1050
@needs_read_lock
607
1051
def compare(self, want_unchanged=False, specific_files=None,
608
1052
extra_trees=None, require_versioned=False, include_root=False,
671
1113
:param require_versioned: Raise errors.PathsNotVersionedError if a
672
1114
path in the specific_files list is not versioned in one of
673
1115
source, target or extra_trees.
1116
:param specific_files: An optional list of file paths to restrict the
1117
comparison to. When mapping filenames to ids, all matches in all
1118
trees (including optional extra_trees) are used, and all children
1119
of matched directories are included. The parents in the target tree
1120
of the specific files up to and including the root of the tree are
1121
always evaluated for changes too.
674
1122
:param want_unversioned: Should unversioned files be returned in the
675
1123
output. An unversioned file is defined as one with (False, False)
676
1124
for the versioned pair.
679
1126
lookup_trees = [self.source]
681
1128
lookup_trees.extend(extra_trees)
682
specific_file_ids = self.target.paths2ids(specific_files,
683
lookup_trees, require_versioned=require_versioned)
1129
# The ids of items we need to examine to insure delta consistency.
1130
precise_file_ids = set()
1131
changed_file_ids = []
1132
if specific_files == []:
1133
specific_file_ids = []
1135
specific_file_ids = self.target.paths2ids(specific_files,
1136
lookup_trees, require_versioned=require_versioned)
1137
if specific_files is not None:
1138
# reparented or added entries must have their parents included
1139
# so that valid deltas can be created. The seen_parents set
1140
# tracks the parents that we need to have.
1141
# The seen_dirs set tracks directory entries we've yielded.
1142
# After outputting version object in to_entries we set difference
1143
# the two seen sets and start checking parents.
1144
seen_parents = set()
684
1146
if want_unversioned:
685
all_unversioned = sorted([(p.split('/'), p) for p in self.target.extras()
686
if not specific_files or
1147
all_unversioned = sorted([(p.split('/'), p) for p in
1148
self.target.extras()
1149
if specific_files is None or
687
1150
osutils.is_inside_any(specific_files, p)])
688
all_unversioned = deque(all_unversioned)
1151
all_unversioned = collections.deque(all_unversioned)
690
all_unversioned = deque()
1153
all_unversioned = collections.deque()
692
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1155
from_entries_by_dir = list(self.source.iter_entries_by_dir(
693
1156
specific_file_ids=specific_file_ids))
694
1157
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
695
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1158
to_entries_by_dir = list(self.target.iter_entries_by_dir(
696
1159
specific_file_ids=specific_file_ids))
697
1160
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
699
# the unversioned path lookup only occurs on real trees - where there
1162
# the unversioned path lookup only occurs on real trees - where there
700
1163
# can be extras. So the fake_entry is solely used to look up
701
1164
# executable it values when execute is not supported.
702
fake_entry = InventoryFile('unused', 'unused', 'unused')
703
for to_path, to_entry in to_entries_by_dir:
704
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1165
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1166
for target_path, target_entry in to_entries_by_dir:
1167
while (all_unversioned and
1168
all_unversioned[0][0] < target_path.split('/')):
705
1169
unversioned_path = all_unversioned.popleft()
706
to_kind, to_executable, to_stat = \
1170
target_kind, target_executable, target_stat = \
707
1171
self.target._comparison_data(fake_entry, unversioned_path[1])
708
1172
yield (None, (None, unversioned_path[1]), True, (False, False),
710
1174
(None, unversioned_path[0][-1]),
712
(None, to_executable))
713
file_id = to_entry.file_id
714
to_paths[file_id] = to_path
1175
(None, target_kind),
1176
(None, target_executable))
1177
source_path, source_entry = from_data.get(target_entry.file_id,
1179
result, changes = self._changes_from_entries(source_entry,
1180
target_entry, source_path=source_path, target_path=target_path)
1181
to_paths[result[0]] = result[1][1]
715
1182
entry_count += 1
716
changed_content = False
717
from_path, from_entry = from_data.get(file_id, (None, None))
718
from_versioned = (from_entry is not None)
719
if from_entry is not None:
720
from_versioned = True
721
from_name = from_entry.name
722
from_parent = from_entry.parent_id
723
from_kind, from_executable, from_stat = \
724
self.source._comparison_data(from_entry, from_path)
725
1184
entry_count += 1
727
from_versioned = False
731
from_executable = None
732
versioned = (from_versioned, True)
733
to_kind, to_executable, to_stat = \
734
self.target._comparison_data(to_entry, to_path)
735
kind = (from_kind, to_kind)
736
if kind[0] != kind[1]:
737
changed_content = True
738
elif from_kind == 'file':
739
from_size = self.source._file_size(from_entry, from_stat)
740
to_size = self.target._file_size(to_entry, to_stat)
741
if from_size != to_size:
742
changed_content = True
743
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
744
self.target.get_file_sha1(file_id, to_path, to_stat)):
745
changed_content = True
746
elif from_kind == 'symlink':
747
if (self.source.get_symlink_target(file_id) !=
748
self.target.get_symlink_target(file_id)):
749
changed_content = True
750
elif from_kind == 'tree-reference':
751
if (self.source.get_reference_revision(file_id, from_path)
752
!= self.target.get_reference_revision(file_id, to_path)):
753
changed_content = True
754
parent = (from_parent, to_entry.parent_id)
755
name = (from_name, to_entry.name)
756
executable = (from_executable, to_executable)
757
1185
if pb is not None:
758
1186
pb.update('comparing files', entry_count, num_entries)
759
if (changed_content is not False or versioned[0] != versioned[1]
760
or parent[0] != parent[1] or name[0] != name[1] or
761
executable[0] != executable[1] or include_unchanged):
762
yield (file_id, (from_path, to_path), changed_content,
763
versioned, parent, name, kind, executable)
1187
if changes or include_unchanged:
1188
if specific_file_ids is not None:
1189
new_parent_id = result[4][1]
1190
precise_file_ids.add(new_parent_id)
1191
changed_file_ids.append(result[0])
1193
# Ensure correct behaviour for reparented/added specific files.
1194
if specific_files is not None:
1195
# Record output dirs
1196
if result[6][1] == 'directory':
1197
seen_dirs.add(result[0])
1198
# Record parents of reparented/added entries.
1199
versioned = result[3]
1201
if not versioned[0] or parents[0] != parents[1]:
1202
seen_parents.add(parents[1])
765
1203
while all_unversioned:
766
1204
# yield any trailing unversioned paths
767
1205
unversioned_path = all_unversioned.popleft()
806
1232
self.source._comparison_data(from_entry, path)
807
1233
kind = (from_kind, None)
808
1234
executable = (from_executable, None)
809
changed_content = True
1235
changed_content = from_kind is not None
810
1236
# the parent's path is necessarily known at this point.
1237
changed_file_ids.append(file_id)
811
1238
yield(file_id, (path, to_path), changed_content, versioned, parent,
812
1239
name, kind, executable)
815
# This was deprecated before 0.12, but did not have an official warning
816
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
817
def RevisionTree(*args, **kwargs):
818
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
820
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
823
from bzrlib.revisiontree import RevisionTree as _RevisionTree
824
return _RevisionTree(*args, **kwargs)
1240
changed_file_ids = set(changed_file_ids)
1241
if specific_file_ids is not None:
1242
for result in self._handle_precise_ids(precise_file_ids,
1246
def _get_entry(self, tree, file_id):
1247
"""Get an inventory entry from a tree, with missing entries as None.
1249
If the tree raises NotImplementedError on accessing .inventory, then
1250
this is worked around using iter_entries_by_dir on just the file id
1253
:param tree: The tree to lookup the entry in.
1254
:param file_id: The file_id to lookup.
1257
inventory = tree.root_inventory
1258
except NotImplementedError:
1259
# No inventory available.
1261
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1262
return iterator.next()[1]
1263
except StopIteration:
1267
return inventory[file_id]
1268
except errors.NoSuchId:
1271
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1272
discarded_changes=None):
1273
"""Fill out a partial iter_changes to be consistent.
1275
:param precise_file_ids: The file ids of parents that were seen during
1277
:param changed_file_ids: The file ids of already emitted items.
1278
:param discarded_changes: An optional dict of precalculated
1279
iter_changes items which the partial iter_changes had not output
1281
:return: A generator of iter_changes items to output.
1283
# process parents of things that had changed under the users
1284
# requested paths to prevent incorrect paths or parent ids which
1285
# aren't in the tree.
1286
while precise_file_ids:
1287
precise_file_ids.discard(None)
1288
# Don't emit file_ids twice
1289
precise_file_ids.difference_update(changed_file_ids)
1290
if not precise_file_ids:
1292
# If the there was something at a given output path in source, we
1293
# have to include the entry from source in the delta, or we would
1294
# be putting this entry into a used path.
1296
for parent_id in precise_file_ids:
1298
paths.append(self.target.id2path(parent_id))
1299
except errors.NoSuchId:
1300
# This id has been dragged in from the source by delta
1301
# expansion and isn't present in target at all: we don't
1302
# need to check for path collisions on it.
1305
old_id = self.source.path2id(path)
1306
precise_file_ids.add(old_id)
1307
precise_file_ids.discard(None)
1308
current_ids = precise_file_ids
1309
precise_file_ids = set()
1310
# We have to emit all of precise_file_ids that have been altered.
1311
# We may have to output the children of some of those ids if any
1312
# directories have stopped being directories.
1313
for file_id in current_ids:
1315
if discarded_changes:
1316
result = discarded_changes.get(file_id)
1321
old_entry = self._get_entry(self.source, file_id)
1322
new_entry = self._get_entry(self.target, file_id)
1323
result, changes = self._changes_from_entries(
1324
old_entry, new_entry)
1327
# Get this parents parent to examine.
1328
new_parent_id = result[4][1]
1329
precise_file_ids.add(new_parent_id)
1331
if (result[6][0] == 'directory' and
1332
result[6][1] != 'directory'):
1333
# This stopped being a directory, the old children have
1335
if old_entry is None:
1336
# Reusing a discarded change.
1337
old_entry = self._get_entry(self.source, file_id)
1338
for child in old_entry.children.values():
1339
precise_file_ids.add(child.file_id)
1340
changed_file_ids.add(result[0])
1344
def file_content_matches(self, source_file_id, target_file_id,
1345
source_path=None, target_path=None, source_stat=None, target_stat=None):
1346
"""Check if two files are the same in the source and target trees.
1348
This only checks that the contents of the files are the same,
1349
it does not touch anything else.
1351
:param source_file_id: File id of the file in the source tree
1352
:param target_file_id: File id of the file in the target tree
1353
:param source_path: Path of the file in the source tree
1354
:param target_path: Path of the file in the target tree
1355
:param source_stat: Optional stat value of the file in the source tree
1356
:param target_stat: Optional stat value of the file in the target tree
1357
:return: Boolean indicating whether the files have the same contents
1359
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1360
source_file_id, source_path, source_stat)
1361
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1362
target_file_id, target_path, target_stat)
1363
if source_verifier_kind == target_verifier_kind:
1364
return (source_verifier_data == target_verifier_data)
1365
# Fall back to SHA1 for now
1366
if source_verifier_kind != "SHA1":
1367
source_sha1 = self.source.get_file_sha1(source_file_id,
1368
source_path, source_stat)
1370
source_sha1 = source_verifier_data
1371
if target_verifier_kind != "SHA1":
1372
target_sha1 = self.target.get_file_sha1(target_file_id,
1373
target_path, target_stat)
1375
target_sha1 = target_verifier_data
1376
return (source_sha1 == target_sha1)
1378
InterTree.register_optimiser(InterTree)
1381
class MultiWalker(object):
1382
"""Walk multiple trees simultaneously, getting combined results."""
1384
# Note: This could be written to not assume you can do out-of-order
1385
# lookups. Instead any nodes that don't match in all trees could be
1386
# marked as 'deferred', and then returned in the final cleanup loop.
1387
# For now, I think it is "nicer" to return things as close to the
1388
# "master_tree" order as we can.
1390
def __init__(self, master_tree, other_trees):
1391
"""Create a new MultiWalker.
1393
All trees being walked must implement "iter_entries_by_dir()", such
1394
that they yield (path, object) tuples, where that object will have a
1395
'.file_id' member, that can be used to check equality.
1397
:param master_tree: All trees will be 'slaved' to the master_tree such
1398
that nodes in master_tree will be used as 'first-pass' sync points.
1399
Any nodes that aren't in master_tree will be merged in a second
1401
:param other_trees: A list of other trees to walk simultaneously.
1403
self._master_tree = master_tree
1404
self._other_trees = other_trees
1406
# Keep track of any nodes that were properly processed just out of
1407
# order, that way we don't return them at the end, we don't have to
1408
# track *all* processed file_ids, just the out-of-order ones
1409
self._out_of_order_processed = set()
1412
def _step_one(iterator):
1413
"""Step an iter_entries_by_dir iterator.
1415
:return: (has_more, path, ie)
1416
If has_more is False, path and ie will be None.
1419
path, ie = iterator.next()
1420
except StopIteration:
1421
return False, None, None
1423
return True, path, ie
1426
def _cmp_path_by_dirblock(path1, path2):
1427
"""Compare two paths based on what directory they are in.
1429
This generates a sort order, such that all children of a directory are
1430
sorted together, and grandchildren are in the same order as the
1431
children appear. But all grandchildren come after all children.
1433
:param path1: first path
1434
:param path2: the second path
1435
:return: negative number if ``path1`` comes first,
1436
0 if paths are equal
1437
and a positive number if ``path2`` sorts first
1439
# Shortcut this special case
1442
# This is stolen from _dirstate_helpers_py.py, only switching it to
1443
# Unicode objects. Consider using encode_utf8() and then using the
1444
# optimized versions, or maybe writing optimized unicode versions.
1445
if not isinstance(path1, unicode):
1446
raise TypeError("'path1' must be a unicode string, not %s: %r"
1447
% (type(path1), path1))
1448
if not isinstance(path2, unicode):
1449
raise TypeError("'path2' must be a unicode string, not %s: %r"
1450
% (type(path2), path2))
1451
return cmp(MultiWalker._path_to_key(path1),
1452
MultiWalker._path_to_key(path2))
1455
def _path_to_key(path):
1456
dirname, basename = osutils.split(path)
1457
return (dirname.split(u'/'), basename)
1459
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1460
"""Lookup an inventory entry by file_id.
1462
This is called when an entry is missing in the normal order.
1463
Generally this is because a file was either renamed, or it was
1464
deleted/added. If the entry was found in the inventory and not in
1465
extra_entries, it will be added to self._out_of_order_processed
1467
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1468
should be filled with entries that were found before they were
1469
used. If file_id is present, it will be removed from the
1471
:param other_tree: The Tree to search, in case we didn't find the entry
1473
:param file_id: The file_id to look for
1474
:return: (path, ie) if found or (None, None) if not present.
1476
if file_id in extra_entries:
1477
return extra_entries.pop(file_id)
1478
# TODO: Is id2path better as the first call, or is
1479
# inventory[file_id] better as a first check?
1481
cur_path = other_tree.id2path(file_id)
1482
except errors.NoSuchId:
1484
if cur_path is None:
1487
self._out_of_order_processed.add(file_id)
1488
cur_ie = other_tree.root_inventory[file_id]
1489
return (cur_path, cur_ie)
1492
"""Match up the values in the different trees."""
1493
for result in self._walk_master_tree():
1495
self._finish_others()
1496
for result in self._walk_others():
1499
def _walk_master_tree(self):
1500
"""First pass, walk all trees in lock-step.
1502
When we are done, all nodes in the master_tree will have been
1503
processed. _other_walkers, _other_entries, and _others_extra will be
1504
set on 'self' for future processing.
1506
# This iterator has the most "inlining" done, because it tends to touch
1507
# every file in the tree, while the others only hit nodes that don't
1509
master_iterator = self._master_tree.iter_entries_by_dir()
1511
other_walkers = [other.iter_entries_by_dir()
1512
for other in self._other_trees]
1513
other_entries = [self._step_one(walker) for walker in other_walkers]
1514
# Track extra nodes in the other trees
1515
others_extra = [{} for i in xrange(len(self._other_trees))]
1517
master_has_more = True
1518
step_one = self._step_one
1519
lookup_by_file_id = self._lookup_by_file_id
1520
out_of_order_processed = self._out_of_order_processed
1522
while master_has_more:
1523
(master_has_more, path, master_ie) = step_one(master_iterator)
1524
if not master_has_more:
1527
file_id = master_ie.file_id
1529
other_values_append = other_values.append
1530
next_other_entries = []
1531
next_other_entries_append = next_other_entries.append
1532
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1533
if not other_has_more:
1534
other_values_append(lookup_by_file_id(
1535
others_extra[idx], self._other_trees[idx], file_id))
1536
next_other_entries_append((False, None, None))
1537
elif file_id == other_ie.file_id:
1538
# This is the critical code path, as most of the entries
1539
# should match between most trees.
1540
other_values_append((other_path, other_ie))
1541
next_other_entries_append(step_one(other_walkers[idx]))
1543
# This walker did not match, step it until it either
1544
# matches, or we know we are past the current walker.
1545
other_walker = other_walkers[idx]
1546
other_extra = others_extra[idx]
1547
while (other_has_more and
1548
self._cmp_path_by_dirblock(other_path, path) < 0):
1549
other_file_id = other_ie.file_id
1550
if other_file_id not in out_of_order_processed:
1551
other_extra[other_file_id] = (other_path, other_ie)
1552
other_has_more, other_path, other_ie = \
1553
step_one(other_walker)
1554
if other_has_more and other_ie.file_id == file_id:
1555
# We ended up walking to this point, match and step
1557
other_values_append((other_path, other_ie))
1558
other_has_more, other_path, other_ie = \
1559
step_one(other_walker)
1561
# This record isn't in the normal order, see if it
1563
other_values_append(lookup_by_file_id(
1564
other_extra, self._other_trees[idx], file_id))
1565
next_other_entries_append((other_has_more, other_path,
1567
other_entries = next_other_entries
1569
# We've matched all the walkers, yield this datapoint
1570
yield path, file_id, master_ie, other_values
1571
self._other_walkers = other_walkers
1572
self._other_entries = other_entries
1573
self._others_extra = others_extra
1575
def _finish_others(self):
1576
"""Finish walking the other iterators, so we get all entries."""
1577
for idx, info in enumerate(self._other_entries):
1578
other_extra = self._others_extra[idx]
1579
(other_has_more, other_path, other_ie) = info
1580
while other_has_more:
1581
other_file_id = other_ie.file_id
1582
if other_file_id not in self._out_of_order_processed:
1583
other_extra[other_file_id] = (other_path, other_ie)
1584
other_has_more, other_path, other_ie = \
1585
self._step_one(self._other_walkers[idx])
1586
del self._other_entries
1588
def _walk_others(self):
1589
"""Finish up by walking all the 'deferred' nodes."""
1590
# TODO: One alternative would be to grab all possible unprocessed
1591
# file_ids, and then sort by path, and then yield them. That
1592
# might ensure better ordering, in case a caller strictly
1593
# requires parents before children.
1594
for idx, other_extra in enumerate(self._others_extra):
1595
others = sorted(other_extra.itervalues(),
1596
key=lambda x: self._path_to_key(x[0]))
1597
for other_path, other_ie in others:
1598
file_id = other_ie.file_id
1599
# We don't need to check out_of_order_processed here, because
1600
# the lookup_by_file_id will be removing anything processed
1601
# from the extras cache
1602
other_extra.pop(file_id)
1603
other_values = [(None, None) for i in xrange(idx)]
1604
other_values.append((other_path, other_ie))
1605
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1606
alt_idx = alt_idx + idx + 1
1607
alt_extra = self._others_extra[alt_idx]
1608
alt_tree = self._other_trees[alt_idx]
1609
other_values.append(self._lookup_by_file_id(
1610
alt_extra, alt_tree, file_id))
1611
yield other_path, file_id, None, other_values