156
def all_file_ids(self):
157
"""Iterate through all file ids, including ids for missing files."""
158
raise NotImplementedError(self.all_file_ids)
146
return iter(self.inventory)
160
148
def id2path(self, file_id):
161
149
"""Return the path for a file id.
163
151
:raises NoSuchId:
165
raise NotImplementedError(self.id2path)
167
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
153
file_id = osutils.safe_file_id(file_id)
154
return self.inventory.id2path(file_id)
156
def is_control_filename(self, filename):
157
"""True if filename is the name of a control file in this tree.
159
:param filename: A filename within the tree. This is a relative path
160
from the root of this tree.
162
This is true IF and ONLY IF the filename is part of the meta data
163
that bzr controls in this tree. I.E. a random .bzr directory placed
164
on disk will not be a control file for this tree.
166
return self.bzrdir.is_control_filename(filename)
169
def iter_entries_by_dir(self, specific_file_ids=None):
168
170
"""Walk the tree in 'by_dir' order.
170
This will yield each entry in the tree as a (path, entry) tuple.
171
The order that they are yielded is:
173
Directories are walked in a depth-first lexicographical order,
174
however, whenever a directory is reached, all of its direct child
175
nodes are yielded in lexicographical order before yielding the
178
For example, in the tree::
188
The yield order (ignoring root) would be::
190
a, f, a/b, a/d, a/b/c, a/d/e, f/g
192
:param yield_parents: If True, yield the parents from the root leading
193
down to specific_file_ids that have been requested. This has no
194
impact if specific_file_ids is None.
196
raise NotImplementedError(self.iter_entries_by_dir)
198
def list_files(self, include_root=False, from_dir=None, recursive=True):
199
"""List all files in this tree.
201
:param include_root: Whether to include the entry for the tree root
202
:param from_dir: Directory under which to list files
203
:param recursive: Whether to list files recursively
204
:return: iterator over tuples of (path, versioned, kind, file_id,
207
raise NotImplementedError(self.list_files)
172
This will yield each entry in the tree as a (path, entry) tuple. The
173
order that they are yielded is: the contents of a directory are
174
preceeded by the parent of a directory, and all the contents of a
175
directory are grouped together.
177
return self.inventory.iter_entries_by_dir(
178
specific_file_ids=specific_file_ids)
209
180
def iter_references(self):
210
if self.supports_tree_reference():
211
for path, entry in self.iter_entries_by_dir():
212
if entry.kind == 'tree-reference':
213
yield path, entry.file_id
181
for path, entry in self.iter_entries_by_dir():
182
if entry.kind == 'tree-reference':
183
yield path, entry.file_id
215
185
def kind(self, file_id):
216
186
raise NotImplementedError("Tree subclass %s must implement kind"
217
187
% self.__class__.__name__)
219
def stored_kind(self, file_id):
220
"""File kind stored for this file_id.
222
May not match kind on disk for working trees. Always available
223
for versioned files, even when the file itself is missing.
225
return self.kind(file_id)
227
def path_content_summary(self, path):
228
"""Get a summary of the information about path.
230
All the attributes returned are for the canonical form, not the
231
convenient form (if content filters are in use.)
233
:param path: A relative path within the tree.
234
:return: A tuple containing kind, size, exec, sha1-or-link.
235
Kind is always present (see tree.kind()).
236
size is present if kind is file and the size of the
237
canonical form can be cheaply determined, None otherwise.
238
exec is None unless kind is file and the platform supports the 'x'
240
sha1-or-link is the link target if kind is symlink, or the sha1 if
241
it can be obtained without reading the file.
243
raise NotImplementedError(self.path_content_summary)
245
189
def get_reference_revision(self, file_id, path=None):
246
190
raise NotImplementedError("Tree subclass %s must implement "
247
191
"get_reference_revision"
261
205
def _file_size(self, entry, stat_value):
262
206
raise NotImplementedError(self._file_size)
264
def get_file(self, file_id, path=None):
265
"""Return a file object for the file file_id in the tree.
267
If both file_id and path are defined, it is implementation defined as
268
to which one is used.
208
def _get_inventory(self):
209
return self._inventory
211
def get_file(self, file_id):
212
"""Return a file object for the file file_id in the tree."""
270
213
raise NotImplementedError(self.get_file)
272
def get_file_with_stat(self, file_id, path=None):
273
"""Get a file handle and stat object for file_id.
275
The default implementation returns (self.get_file, None) for backwards
278
:param file_id: The file id to read.
279
:param path: The path of the file, if it is known.
280
:return: A tuple (file_handle, stat_value_or_None). If the tree has
281
no stat facility, or need for a stat cache feedback during commit,
282
it may return None for the second element of the tuple.
284
return (self.get_file(file_id, path), None)
286
def get_file_text(self, file_id, path=None):
287
"""Return the byte content of a file.
289
:param file_id: The file_id of the file.
290
:param path: The path of the file.
292
If both file_id and path are supplied, an implementation may use
295
:returns: A single byte string for the whole file.
297
my_file = self.get_file(file_id, path)
299
return my_file.read()
303
def get_file_lines(self, file_id, path=None):
304
"""Return the content of a file, as lines.
306
:param file_id: The file_id of the file.
307
:param path: The path of the file.
309
If both file_id and path are supplied, an implementation may use
312
return osutils.split_lines(self.get_file_text(file_id, path))
314
def get_file_verifier(self, file_id, path=None, stat_value=None):
315
"""Return a verifier for a file.
317
The default implementation returns a sha1.
319
:param file_id: The handle for this file.
320
:param path: The path that this file can be found at.
321
These must point to the same object.
322
:param stat_value: Optional stat value for the object
323
:return: Tuple with verifier name and verifier data
325
return ("SHA1", self.get_file_sha1(file_id, path=path,
326
stat_value=stat_value))
328
def get_file_sha1(self, file_id, path=None, stat_value=None):
329
"""Return the SHA1 file for a file.
331
:note: callers should use get_file_verifier instead
332
where possible, as the underlying repository implementation may
333
have quicker access to a non-sha1 verifier.
335
:param file_id: The handle for this file.
336
:param path: The path that this file can be found at.
337
These must point to the same object.
338
:param stat_value: Optional stat value for the object
340
raise NotImplementedError(self.get_file_sha1)
342
215
def get_file_mtime(self, file_id, path=None):
343
216
"""Return the modification time for a file.
349
222
raise NotImplementedError(self.get_file_mtime)
351
def get_file_size(self, file_id):
352
"""Return the size of a file in bytes.
354
This applies only to regular files. If invoked on directories or
355
symlinks, it will return None.
356
:param file_id: The file-id of the file
358
raise NotImplementedError(self.get_file_size)
360
def is_executable(self, file_id, path=None):
361
"""Check if a file is executable.
363
:param file_id: The handle for this file.
364
:param path: The path that this file can be found at.
365
These must point to the same object.
367
raise NotImplementedError(self.is_executable)
369
def iter_files_bytes(self, desired_files):
370
"""Iterate through file contents.
372
Files will not necessarily be returned in the order they occur in
373
desired_files. No specific order is guaranteed.
375
Yields pairs of identifier, bytes_iterator. identifier is an opaque
376
value supplied by the caller as part of desired_files. It should
377
uniquely identify the file version in the caller's context. (Examples:
378
an index number or a TreeTransform trans_id.)
380
bytes_iterator is an iterable of bytestrings for the file. The
381
kind of iterable and length of the bytestrings are unspecified, but for
382
this implementation, it is a tuple containing a single bytestring with
383
the complete text of the file.
385
:param desired_files: a list of (file_id, identifier) pairs
387
for file_id, identifier in desired_files:
388
# We wrap the string in a tuple so that we can return an iterable
389
# of bytestrings. (Technically, a bytestring is also an iterable
390
# of bytestrings, but iterating through each character is not
392
cur_file = (self.get_file_text(file_id),)
393
yield identifier, cur_file
395
def get_symlink_target(self, file_id, path=None):
224
def get_file_by_path(self, path):
225
return self.get_file(self._inventory.path2id(path))
227
def get_symlink_target(self, file_id):
396
228
"""Get the target for a given file_id.
398
230
It is assumed that the caller already knows that file_id is referencing
400
232
:param file_id: Handle for the symlink entry.
401
:param path: The path of the file.
402
If both file_id and path are supplied, an implementation may use
404
233
:return: The path the symlink points to.
406
235
raise NotImplementedError(self.get_symlink_target)
408
def get_root_id(self):
409
"""Return the file_id for the root of this tree."""
410
raise NotImplementedError(self.get_root_id)
412
def annotate_iter(self, file_id,
413
default_revision=_mod_revision.CURRENT_REVISION):
414
"""Return an iterator of revision_id, line tuples.
237
def annotate_iter(self, file_id):
238
"""Return an iterator of revision_id, line tuples
416
240
For working trees (and mutable trees in general), the special
417
241
revision_id 'current:' will be used for lines that are new in this
418
242
tree, e.g. uncommitted changes.
419
243
:param file_id: The file to produce an annotated version from
420
:param default_revision: For lines that don't match a basis, mark them
421
with this revision id. Not all implementations will make use of
424
245
raise NotImplementedError(self.annotate_iter)
426
def _get_plan_merge_data(self, file_id, other, base):
427
from bzrlib import versionedfile
428
vf = versionedfile._PlanMergeVersionedFile(file_id)
429
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
430
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
432
last_revision_base = None
434
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
435
return vf, last_revision_a, last_revision_b, last_revision_base
437
def plan_file_merge(self, file_id, other, base=None):
438
"""Generate a merge plan based on annotations.
440
If the file contains uncommitted changes in this tree, they will be
441
attributed to the 'current:' pseudo-revision. If the file contains
442
uncommitted changes in the other tree, they will be assigned to the
443
'other:' pseudo-revision.
445
data = self._get_plan_merge_data(file_id, other, base)
446
vf, last_revision_a, last_revision_b, last_revision_base = data
447
return vf.plan_merge(last_revision_a, last_revision_b,
450
def plan_file_lca_merge(self, file_id, other, base=None):
451
"""Generate a merge plan based lca-newness.
453
If the file contains uncommitted changes in this tree, they will be
454
attributed to the 'current:' pseudo-revision. If the file contains
455
uncommitted changes in the other tree, they will be assigned to the
456
'other:' pseudo-revision.
458
data = self._get_plan_merge_data(file_id, other, base)
459
vf, last_revision_a, last_revision_b, last_revision_base = data
460
return vf.plan_lca_merge(last_revision_a, last_revision_b,
463
def _iter_parent_trees(self):
464
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
465
for revision_id in self.get_parent_ids():
467
yield self.revision_tree(revision_id)
468
except errors.NoSuchRevisionInTree:
469
yield self.repository.revision_tree(revision_id)
471
def _get_file_revision(self, file_id, vf, tree_revision):
472
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
474
if getattr(self, '_repository', None) is None:
475
last_revision = tree_revision
476
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
477
self._iter_parent_trees()]
478
vf.add_lines((file_id, last_revision), parent_keys,
479
self.get_file_lines(file_id))
480
repo = self.branch.repository
483
last_revision = self.get_file_revision(file_id)
484
base_vf = self._repository.texts
485
if base_vf not in vf.fallback_versionedfiles:
486
vf.fallback_versionedfiles.append(base_vf)
247
inventory = property(_get_inventory,
248
doc="Inventory of this Tree")
489
250
def _check_retrieved(self, ie, f):
490
251
if not __debug__:
492
fp = osutils.fingerprint_file(f)
253
fp = fingerprint_file(f)
495
256
if ie.text_size is not None:
496
257
if ie.text_size != fp['size']:
497
raise errors.BzrError(
498
"mismatched size for file %r in %r" %
499
(ie.file_id, self._store),
258
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
500
259
["inventory expects %d bytes" % ie.text_size,
501
260
"file is actually %d bytes" % fp['size'],
502
261
"store is probably damaged/corrupt"])
504
263
if ie.text_sha1 != fp['sha1']:
505
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
506
(ie.file_id, self._store),
264
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
507
265
["inventory expects %s" % ie.text_sha1,
508
266
"file is actually %s" % fp['sha1'],
509
267
"store is probably damaged/corrupt"])
511
270
def path2id(self, path):
512
271
"""Return the id for path in this tree."""
513
raise NotImplementedError(self.path2id)
272
return self._inventory.path2id(path)
515
274
def paths2ids(self, paths, trees=[], require_versioned=True):
516
275
"""Return all the ids that can be reached by walking from paths.
518
Each path is looked up in this tree and any extras provided in
277
Each path is looked up in each this tree and any extras provided in
519
278
trees, and this is repeated recursively: the children in an extra tree
520
279
of a directory that has been renamed under a provided path in this tree
521
are all returned, even if none exist under a provided path in this
280
are all returned, even if none exist until a provided path in this
522
281
tree, and vice versa.
524
283
:param paths: An iterable of paths to start converting to ids from.
609
369
raise NotImplementedError(self.walkdirs)
611
def supports_content_filtering(self):
372
class EmptyTree(Tree):
375
self._inventory = Inventory(root_id=None)
376
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
377
' use repository.revision_tree instead.',
378
DeprecationWarning, stacklevel=2)
380
def get_parent_ids(self):
383
def get_symlink_target(self, file_id):
386
def has_filename(self, filename):
614
def _content_filter_stack(self, path=None, file_id=None):
615
"""The stack of content filters for a path if filtering is supported.
617
Readers will be applied in first-to-last order.
618
Writers will be applied in last-to-first order.
619
Either the path or the file-id needs to be provided.
621
:param path: path relative to the root of the tree
623
:param file_id: file_id or None if unknown
624
:return: the list of filters - [] if there are none
626
filter_pref_names = filters._get_registered_names()
627
if len(filter_pref_names) == 0:
630
path = self.id2path(file_id)
631
prefs = self.iter_search_rules([path], filter_pref_names).next()
632
stk = filters._get_filter_stack_for(prefs)
633
if 'filters' in debug.debug_flags:
634
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
637
def _content_filter_stack_provider(self):
638
"""A function that returns a stack of ContentFilters.
640
The function takes a path (relative to the top of the tree) and a
641
file-id as parameters.
643
:return: None if content filtering is not supported by this tree.
645
if self.supports_content_filtering():
646
return lambda path, file_id: \
647
self._content_filter_stack(path, file_id)
651
def iter_search_rules(self, path_names, pref_names=None,
652
_default_searcher=None):
653
"""Find the preferences for filenames in a tree.
655
:param path_names: an iterable of paths to find attributes for.
656
Paths are given relative to the root of the tree.
657
:param pref_names: the list of preferences to lookup - None for all
658
:param _default_searcher: private parameter to assist testing - don't use
659
:return: an iterator of tuple sequences, one per path-name.
660
See _RulesSearcher.get_items for details on the tuple sequence.
662
if _default_searcher is None:
663
_default_searcher = rules._per_user_searcher
664
searcher = self._get_rules_searcher(_default_searcher)
665
if searcher is not None:
666
if pref_names is not None:
667
for path in path_names:
668
yield searcher.get_selected_items(path, pref_names)
670
for path in path_names:
671
yield searcher.get_items(path)
673
def _get_rules_searcher(self, default_searcher):
674
"""Get the RulesSearcher for this tree given the default one."""
675
searcher = default_searcher
679
class InventoryTree(Tree):
680
"""A tree that relies on an inventory for its metadata.
682
Trees contain an `Inventory` object, and also know how to retrieve
683
file texts mentioned in the inventory, either from a working
684
directory or from a store.
686
It is possible for trees to contain files that are not described
687
in their inventory or vice versa; for this use `filenames()`.
689
Subclasses should set the _inventory attribute, which is considered
690
private to external API users.
389
def kind(self, file_id):
390
file_id = osutils.safe_file_id(file_id)
391
assert self._inventory[file_id].kind == "directory"
394
def list_files(self, include_root=False):
397
def __contains__(self, file_id):
398
file_id = osutils.safe_file_id(file_id)
399
return (file_id in self._inventory)
401
def get_file_sha1(self, file_id, path=None, stat_value=None):
405
######################################################################
408
# TODO: Merge these two functions into a single one that can operate
409
# on either a whole tree or a set of files.
411
# TODO: Return the diff in order by filename, not by category or in
412
# random order. Can probably be done by lock-stepping through the
413
# filenames from both trees.
416
def file_status(filename, old_tree, new_tree):
417
"""Return single-letter status, old and new names for a file.
419
The complexity here is in deciding how to represent renames;
420
many complex cases are possible.
693
def get_canonical_inventory_paths(self, paths):
694
"""Like get_canonical_inventory_path() but works on multiple items.
696
:param paths: A sequence of paths relative to the root of the tree.
697
:return: A list of paths, with each item the corresponding input path
698
adjusted to account for existing elements that match case
701
return list(self._yield_canonical_inventory_paths(paths))
703
def get_canonical_inventory_path(self, path):
704
"""Returns the first inventory item that case-insensitively matches path.
706
If a path matches exactly, it is returned. If no path matches exactly
707
but more than one path matches case-insensitively, it is implementation
708
defined which is returned.
710
If no path matches case-insensitively, the input path is returned, but
711
with as many path entries that do exist changed to their canonical
714
If you need to resolve many names from the same tree, you should
715
use get_canonical_inventory_paths() to avoid O(N) behaviour.
717
:param path: A paths relative to the root of the tree.
718
:return: The input path adjusted to account for existing elements
719
that match case insensitively.
721
return self._yield_canonical_inventory_paths([path]).next()
723
def _yield_canonical_inventory_paths(self, paths):
725
# First, if the path as specified exists exactly, just use it.
726
if self.path2id(path) is not None:
730
cur_id = self.get_root_id()
732
bit_iter = iter(path.split("/"))
736
for child in self.iter_children(cur_id):
738
# XXX: it seem like if the child is known to be in the
739
# tree, we shouldn't need to go from its id back to
740
# its path -- mbp 2010-02-11
742
# XXX: it seems like we could be more efficient
743
# by just directly looking up the original name and
744
# only then searching all children; also by not
745
# chopping paths so much. -- mbp 2010-02-11
746
child_base = os.path.basename(self.id2path(child))
747
if (child_base == elt):
748
# if we found an exact match, we can stop now; if
749
# we found an approximate match we need to keep
750
# searching because there might be an exact match
753
new_path = osutils.pathjoin(cur_path, child_base)
755
elif child_base.lower() == lelt:
757
new_path = osutils.pathjoin(cur_path, child_base)
758
except errors.NoSuchId:
759
# before a change is committed we can see this error...
764
# got to the end of this directory and no entries matched.
765
# Return what matched so far, plus the rest as specified.
766
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
771
def _get_inventory(self):
772
return self._inventory
774
inventory = property(_get_inventory,
775
doc="Inventory of this Tree")
778
def path2id(self, path):
779
"""Return the id for path in this tree."""
780
return self._inventory.path2id(path)
782
def id2path(self, file_id):
783
"""Return the path for a file id.
787
return self.inventory.id2path(file_id)
789
def has_id(self, file_id):
790
return self.inventory.has_id(file_id)
792
def has_or_had_id(self, file_id):
793
return self.inventory.has_id(file_id)
795
def all_file_ids(self):
796
return set(self.inventory)
798
@deprecated_method(deprecated_in((2, 4, 0)))
800
return iter(self.inventory)
802
def filter_unversioned_files(self, paths):
803
"""Filter out paths that are versioned.
805
:return: set of paths.
807
# NB: we specifically *don't* call self.has_filename, because for
808
# WorkingTrees that can indicate files that exist on disk but that
810
pred = self.inventory.has_filename
811
return set((p for p in paths if not pred(p)))
814
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
815
"""Walk the tree in 'by_dir' order.
817
This will yield each entry in the tree as a (path, entry) tuple.
818
The order that they are yielded is:
820
See Tree.iter_entries_by_dir for details.
822
:param yield_parents: If True, yield the parents from the root leading
823
down to specific_file_ids that have been requested. This has no
824
impact if specific_file_ids is None.
826
return self.inventory.iter_entries_by_dir(
827
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
829
@deprecated_method(deprecated_in((2, 5, 0)))
830
def get_file_by_path(self, path):
831
return self.get_file(self.path2id(path), path)
422
old_inv = old_tree.inventory
423
new_inv = new_tree.inventory
424
new_id = new_inv.path2id(filename)
425
old_id = old_inv.path2id(filename)
427
if not new_id and not old_id:
428
# easy: doesn't exist in either; not versioned at all
429
if new_tree.is_ignored(filename):
430
return 'I', None, None
432
return '?', None, None
434
# There is now a file of this name, great.
437
# There is no longer a file of this name, but we can describe
438
# what happened to the file that used to have
439
# this name. There are two possibilities: either it was
440
# deleted entirely, or renamed.
442
if new_inv.has_id(old_id):
443
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
445
return 'D', old_inv.id2path(old_id), None
447
# if the file_id is new in this revision, it is added
448
if new_id and not old_inv.has_id(new_id):
451
# if there used to be a file of this name, but that ID has now
452
# disappeared, it is deleted
453
if old_id and not new_inv.has_id(old_id):
460
def find_renames(old_inv, new_inv):
461
for file_id in old_inv:
462
if file_id not in new_inv:
464
old_name = old_inv.id2path(file_id)
465
new_name = new_inv.id2path(file_id)
466
if old_name != new_name:
467
yield (old_name, new_name)
834
470
def find_ids_across_trees(filenames, trees, require_versioned=True):
835
471
"""Find the ids corresponding to specified filenames.
837
473
All matches in all trees will be used, and all children of matched
838
474
directories will be used.
910
547
Its instances have methods like 'compare' and contain references to the
911
548
source and target trees these operations are to be carried out on.
913
Clients of bzrlib should not need to use InterTree directly, rather they
550
clients of bzrlib should not need to use InterTree directly, rather they
914
551
should use the convenience methods on Tree such as 'Tree.compare()' which
915
552
will pass through to InterTree as appropriate.
918
# Formats that will be used to test this InterTree. If both are
919
# None, this InterTree will not be tested (e.g. because a complex
921
_matching_from_tree_format = None
922
_matching_to_tree_format = None
927
def is_compatible(kls, source, target):
928
# The default implementation is naive and uses the public API, so
929
# it works for all trees.
932
def _changes_from_entries(self, source_entry, target_entry,
933
source_path=None, target_path=None):
934
"""Generate a iter_changes tuple between source_entry and target_entry.
936
:param source_entry: An inventory entry from self.source, or None.
937
:param target_entry: An inventory entry from self.target, or None.
938
:param source_path: The path of source_entry, if known. If not known
939
it will be looked up.
940
:param target_path: The path of target_entry, if known. If not known
941
it will be looked up.
942
:return: A tuple, item 0 of which is an iter_changes result tuple, and
943
item 1 is True if there are any changes in the result tuple.
945
if source_entry is None:
946
if target_entry is None:
948
file_id = target_entry.file_id
950
file_id = source_entry.file_id
951
if source_entry is not None:
952
source_versioned = True
953
source_name = source_entry.name
954
source_parent = source_entry.parent_id
955
if source_path is None:
956
source_path = self.source.id2path(file_id)
957
source_kind, source_executable, source_stat = \
958
self.source._comparison_data(source_entry, source_path)
960
source_versioned = False
964
source_executable = None
965
if target_entry is not None:
966
target_versioned = True
967
target_name = target_entry.name
968
target_parent = target_entry.parent_id
969
if target_path is None:
970
target_path = self.target.id2path(file_id)
971
target_kind, target_executable, target_stat = \
972
self.target._comparison_data(target_entry, target_path)
974
target_versioned = False
978
target_executable = None
979
versioned = (source_versioned, target_versioned)
980
kind = (source_kind, target_kind)
981
changed_content = False
982
if source_kind != target_kind:
983
changed_content = True
984
elif source_kind == 'file':
985
if not self.file_content_matches(file_id, file_id, source_path,
986
target_path, source_stat, target_stat):
987
changed_content = True
988
elif source_kind == 'symlink':
989
if (self.source.get_symlink_target(file_id) !=
990
self.target.get_symlink_target(file_id)):
991
changed_content = True
992
# XXX: Yes, the indentation below is wrong. But fixing it broke
993
# test_merge.TestMergerEntriesLCAOnDisk.
994
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
995
# the fix from bzr.dev -- vila 2009026
996
elif source_kind == 'tree-reference':
997
if (self.source.get_reference_revision(file_id, source_path)
998
!= self.target.get_reference_revision(file_id, target_path)):
999
changed_content = True
1000
parent = (source_parent, target_parent)
1001
name = (source_name, target_name)
1002
executable = (source_executable, target_executable)
1003
if (changed_content is not False or versioned[0] != versioned[1]
1004
or parent[0] != parent[1] or name[0] != name[1] or
1005
executable[0] != executable[1]):
1009
return (file_id, (source_path, target_path), changed_content,
1010
versioned, parent, name, kind, executable), changes
1012
557
@needs_read_lock
1013
558
def compare(self, want_unchanged=False, specific_files=None,
1014
559
extra_trees=None, require_versioned=False, include_root=False,
1075
621
:param require_versioned: Raise errors.PathsNotVersionedError if a
1076
622
path in the specific_files list is not versioned in one of
1077
623
source, target or extra_trees.
1078
:param specific_files: An optional list of file paths to restrict the
1079
comparison to. When mapping filenames to ids, all matches in all
1080
trees (including optional extra_trees) are used, and all children
1081
of matched directories are included. The parents in the target tree
1082
of the specific files up to and including the root of the tree are
1083
always evaluated for changes too.
1084
624
:param want_unversioned: Should unversioned files be returned in the
1085
625
output. An unversioned file is defined as one with (False, False)
1086
626
for the versioned pair.
1088
629
lookup_trees = [self.source]
1090
631
lookup_trees.extend(extra_trees)
1091
# The ids of items we need to examine to insure delta consistency.
1092
precise_file_ids = set()
1093
changed_file_ids = []
1094
if specific_files == []:
1095
specific_file_ids = []
1097
specific_file_ids = self.target.paths2ids(specific_files,
1098
lookup_trees, require_versioned=require_versioned)
1099
if specific_files is not None:
1100
# reparented or added entries must have their parents included
1101
# so that valid deltas can be created. The seen_parents set
1102
# tracks the parents that we need to have.
1103
# The seen_dirs set tracks directory entries we've yielded.
1104
# After outputting version object in to_entries we set difference
1105
# the two seen sets and start checking parents.
1106
seen_parents = set()
632
specific_file_ids = self.target.paths2ids(specific_files,
633
lookup_trees, require_versioned=require_versioned)
1108
634
if want_unversioned:
1109
all_unversioned = sorted([(p.split('/'), p) for p in
1110
self.target.extras()
1111
if specific_files is None or
635
all_unversioned = sorted([(p.split('/'), p) for p in self.target.extras()
636
if not specific_files or
1112
637
osutils.is_inside_any(specific_files, p)])
1113
all_unversioned = collections.deque(all_unversioned)
638
all_unversioned = deque(all_unversioned)
1115
all_unversioned = collections.deque()
640
all_unversioned = deque()
1117
from_entries_by_dir = list(self.source.iter_entries_by_dir(
642
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1118
643
specific_file_ids=specific_file_ids))
1119
644
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1120
to_entries_by_dir = list(self.target.iter_entries_by_dir(
645
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1121
646
specific_file_ids=specific_file_ids))
1122
647
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1124
# the unversioned path lookup only occurs on real trees - where there
649
# the unversioned path lookup only occurs on real trees - where there
1125
650
# can be extras. So the fake_entry is solely used to look up
1126
651
# executable it values when execute is not supported.
1127
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1128
for target_path, target_entry in to_entries_by_dir:
1129
while (all_unversioned and
1130
all_unversioned[0][0] < target_path.split('/')):
652
fake_entry = InventoryFile('unused', 'unused', 'unused')
653
for to_path, to_entry in to_entries_by_dir:
654
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1131
655
unversioned_path = all_unversioned.popleft()
1132
target_kind, target_executable, target_stat = \
656
to_kind, to_executable, to_stat = \
1133
657
self.target._comparison_data(fake_entry, unversioned_path[1])
1134
658
yield (None, (None, unversioned_path[1]), True, (False, False),
1136
660
(None, unversioned_path[0][-1]),
1137
(None, target_kind),
1138
(None, target_executable))
1139
source_path, source_entry = from_data.get(target_entry.file_id,
1141
result, changes = self._changes_from_entries(source_entry,
1142
target_entry, source_path=source_path, target_path=target_path)
1143
to_paths[result[0]] = result[1][1]
662
(None, to_executable))
663
file_id = to_entry.file_id
664
to_paths[file_id] = to_path
1144
665
entry_count += 1
666
changed_content = False
667
from_path, from_entry = from_data.get(file_id, (None, None))
668
from_versioned = (from_entry is not None)
669
if from_entry is not None:
670
from_versioned = True
671
from_name = from_entry.name
672
from_parent = from_entry.parent_id
673
from_kind, from_executable, from_stat = \
674
self.source._comparison_data(from_entry, from_path)
1146
675
entry_count += 1
677
from_versioned = False
681
from_executable = None
682
versioned = (from_versioned, True)
683
to_kind, to_executable, to_stat = \
684
self.target._comparison_data(to_entry, to_path)
685
kind = (from_kind, to_kind)
686
if kind[0] != kind[1]:
687
changed_content = True
688
elif from_kind == 'file':
689
from_size = self.source._file_size(from_entry, from_stat)
690
to_size = self.target._file_size(to_entry, to_stat)
691
if from_size != to_size:
692
changed_content = True
693
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
694
self.target.get_file_sha1(file_id, to_path, to_stat)):
695
changed_content = True
696
elif from_kind == 'symlink':
697
if (self.source.get_symlink_target(file_id) !=
698
self.target.get_symlink_target(file_id)):
699
changed_content = True
700
elif from_kind == 'tree-reference':
701
if (self.source.get_reference_revision(file_id, from_path)
702
!= self.target.get_reference_revision(file_id, to_path)):
703
changed_content = True
704
parent = (from_parent, to_entry.parent_id)
705
name = (from_name, to_entry.name)
706
executable = (from_executable, to_executable)
1147
707
if pb is not None:
1148
708
pb.update('comparing files', entry_count, num_entries)
1149
if changes or include_unchanged:
1150
if specific_file_ids is not None:
1151
new_parent_id = result[4][1]
1152
precise_file_ids.add(new_parent_id)
1153
changed_file_ids.append(result[0])
1155
# Ensure correct behaviour for reparented/added specific files.
1156
if specific_files is not None:
1157
# Record output dirs
1158
if result[6][1] == 'directory':
1159
seen_dirs.add(result[0])
1160
# Record parents of reparented/added entries.
1161
versioned = result[3]
1163
if not versioned[0] or parents[0] != parents[1]:
1164
seen_parents.add(parents[1])
709
if (changed_content is not False or versioned[0] != versioned[1]
710
or parent[0] != parent[1] or name[0] != name[1] or
711
executable[0] != executable[1] or include_unchanged):
712
yield (file_id, (from_path, to_path), changed_content,
713
versioned, parent, name, kind, executable)
1165
715
while all_unversioned:
1166
716
# yield any trailing unversioned paths
1167
717
unversioned_path = all_unversioned.popleft()
1194
756
self.source._comparison_data(from_entry, path)
1195
757
kind = (from_kind, None)
1196
758
executable = (from_executable, None)
1197
changed_content = from_kind is not None
759
changed_content = True
1198
760
# the parent's path is necessarily known at this point.
1199
changed_file_ids.append(file_id)
1200
761
yield(file_id, (path, to_path), changed_content, versioned, parent,
1201
762
name, kind, executable)
1202
changed_file_ids = set(changed_file_ids)
1203
if specific_file_ids is not None:
1204
for result in self._handle_precise_ids(precise_file_ids,
1208
def _get_entry(self, tree, file_id):
1209
"""Get an inventory entry from a tree, with missing entries as None.
1211
If the tree raises NotImplementedError on accessing .inventory, then
1212
this is worked around using iter_entries_by_dir on just the file id
1215
:param tree: The tree to lookup the entry in.
1216
:param file_id: The file_id to lookup.
1219
inventory = tree.inventory
1220
except NotImplementedError:
1221
# No inventory available.
1223
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1224
return iterator.next()[1]
1225
except StopIteration:
1229
return inventory[file_id]
1230
except errors.NoSuchId:
1233
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1234
discarded_changes=None):
1235
"""Fill out a partial iter_changes to be consistent.
1237
:param precise_file_ids: The file ids of parents that were seen during
1239
:param changed_file_ids: The file ids of already emitted items.
1240
:param discarded_changes: An optional dict of precalculated
1241
iter_changes items which the partial iter_changes had not output
1243
:return: A generator of iter_changes items to output.
1245
# process parents of things that had changed under the users
1246
# requested paths to prevent incorrect paths or parent ids which
1247
# aren't in the tree.
1248
while precise_file_ids:
1249
precise_file_ids.discard(None)
1250
# Don't emit file_ids twice
1251
precise_file_ids.difference_update(changed_file_ids)
1252
if not precise_file_ids:
1254
# If the there was something at a given output path in source, we
1255
# have to include the entry from source in the delta, or we would
1256
# be putting this entry into a used path.
1258
for parent_id in precise_file_ids:
1260
paths.append(self.target.id2path(parent_id))
1261
except errors.NoSuchId:
1262
# This id has been dragged in from the source by delta
1263
# expansion and isn't present in target at all: we don't
1264
# need to check for path collisions on it.
1267
old_id = self.source.path2id(path)
1268
precise_file_ids.add(old_id)
1269
precise_file_ids.discard(None)
1270
current_ids = precise_file_ids
1271
precise_file_ids = set()
1272
# We have to emit all of precise_file_ids that have been altered.
1273
# We may have to output the children of some of those ids if any
1274
# directories have stopped being directories.
1275
for file_id in current_ids:
1277
if discarded_changes:
1278
result = discarded_changes.get(file_id)
1283
old_entry = self._get_entry(self.source, file_id)
1284
new_entry = self._get_entry(self.target, file_id)
1285
result, changes = self._changes_from_entries(
1286
old_entry, new_entry)
1289
# Get this parents parent to examine.
1290
new_parent_id = result[4][1]
1291
precise_file_ids.add(new_parent_id)
1293
if (result[6][0] == 'directory' and
1294
result[6][1] != 'directory'):
1295
# This stopped being a directory, the old children have
1297
if old_entry is None:
1298
# Reusing a discarded change.
1299
old_entry = self._get_entry(self.source, file_id)
1300
for child in old_entry.children.values():
1301
precise_file_ids.add(child.file_id)
1302
changed_file_ids.add(result[0])
1306
def file_content_matches(self, source_file_id, target_file_id,
1307
source_path=None, target_path=None, source_stat=None, target_stat=None):
1308
"""Check if two files are the same in the source and target trees.
1310
This only checks that the contents of the files are the same,
1311
it does not touch anything else.
1313
:param source_file_id: File id of the file in the source tree
1314
:param target_file_id: File id of the file in the target tree
1315
:param source_path: Path of the file in the source tree
1316
:param target_path: Path of the file in the target tree
1317
:param source_stat: Optional stat value of the file in the source tree
1318
:param target_stat: Optional stat value of the file in the target tree
1319
:return: Boolean indicating whether the files have the same contents
1321
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1322
source_file_id, source_path, source_stat)
1323
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1324
target_file_id, target_path, target_stat)
1325
if source_verifier_kind == target_verifier_kind:
1326
return (source_verifier_data == target_verifier_data)
1327
# Fall back to SHA1 for now
1328
if source_verifier_kind != "SHA1":
1329
source_sha1 = self.source.get_file_sha1(source_file_id,
1330
source_path, source_stat)
1332
source_sha1 = source_verifier_data
1333
if target_verifier_kind != "SHA1":
1334
target_sha1 = self.target.get_file_sha1(target_file_id,
1335
target_path, target_stat)
1337
target_sha1 = target_verifier_data
1338
return (source_sha1 == target_sha1)
1340
InterTree.register_optimiser(InterTree)
1343
class MultiWalker(object):
1344
"""Walk multiple trees simultaneously, getting combined results."""
1346
# Note: This could be written to not assume you can do out-of-order
1347
# lookups. Instead any nodes that don't match in all trees could be
1348
# marked as 'deferred', and then returned in the final cleanup loop.
1349
# For now, I think it is "nicer" to return things as close to the
1350
# "master_tree" order as we can.
1352
def __init__(self, master_tree, other_trees):
1353
"""Create a new MultiWalker.
1355
All trees being walked must implement "iter_entries_by_dir()", such
1356
that they yield (path, object) tuples, where that object will have a
1357
'.file_id' member, that can be used to check equality.
1359
:param master_tree: All trees will be 'slaved' to the master_tree such
1360
that nodes in master_tree will be used as 'first-pass' sync points.
1361
Any nodes that aren't in master_tree will be merged in a second
1363
:param other_trees: A list of other trees to walk simultaneously.
1365
self._master_tree = master_tree
1366
self._other_trees = other_trees
1368
# Keep track of any nodes that were properly processed just out of
1369
# order, that way we don't return them at the end, we don't have to
1370
# track *all* processed file_ids, just the out-of-order ones
1371
self._out_of_order_processed = set()
1374
def _step_one(iterator):
1375
"""Step an iter_entries_by_dir iterator.
1377
:return: (has_more, path, ie)
1378
If has_more is False, path and ie will be None.
1381
path, ie = iterator.next()
1382
except StopIteration:
1383
return False, None, None
1385
return True, path, ie
1388
def _cmp_path_by_dirblock(path1, path2):
1389
"""Compare two paths based on what directory they are in.
1391
This generates a sort order, such that all children of a directory are
1392
sorted together, and grandchildren are in the same order as the
1393
children appear. But all grandchildren come after all children.
1395
:param path1: first path
1396
:param path2: the second path
1397
:return: negative number if ``path1`` comes first,
1398
0 if paths are equal
1399
and a positive number if ``path2`` sorts first
1401
# Shortcut this special case
1404
# This is stolen from _dirstate_helpers_py.py, only switching it to
1405
# Unicode objects. Consider using encode_utf8() and then using the
1406
# optimized versions, or maybe writing optimized unicode versions.
1407
if not isinstance(path1, unicode):
1408
raise TypeError("'path1' must be a unicode string, not %s: %r"
1409
% (type(path1), path1))
1410
if not isinstance(path2, unicode):
1411
raise TypeError("'path2' must be a unicode string, not %s: %r"
1412
% (type(path2), path2))
1413
return cmp(MultiWalker._path_to_key(path1),
1414
MultiWalker._path_to_key(path2))
1417
def _path_to_key(path):
1418
dirname, basename = osutils.split(path)
1419
return (dirname.split(u'/'), basename)
1421
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1422
"""Lookup an inventory entry by file_id.
1424
This is called when an entry is missing in the normal order.
1425
Generally this is because a file was either renamed, or it was
1426
deleted/added. If the entry was found in the inventory and not in
1427
extra_entries, it will be added to self._out_of_order_processed
1429
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1430
should be filled with entries that were found before they were
1431
used. If file_id is present, it will be removed from the
1433
:param other_tree: The Tree to search, in case we didn't find the entry
1435
:param file_id: The file_id to look for
1436
:return: (path, ie) if found or (None, None) if not present.
1438
if file_id in extra_entries:
1439
return extra_entries.pop(file_id)
1440
# TODO: Is id2path better as the first call, or is
1441
# inventory[file_id] better as a first check?
1443
cur_path = other_tree.id2path(file_id)
1444
except errors.NoSuchId:
1446
if cur_path is None:
1449
self._out_of_order_processed.add(file_id)
1450
cur_ie = other_tree.inventory[file_id]
1451
return (cur_path, cur_ie)
1454
"""Match up the values in the different trees."""
1455
for result in self._walk_master_tree():
1457
self._finish_others()
1458
for result in self._walk_others():
1461
def _walk_master_tree(self):
1462
"""First pass, walk all trees in lock-step.
1464
When we are done, all nodes in the master_tree will have been
1465
processed. _other_walkers, _other_entries, and _others_extra will be
1466
set on 'self' for future processing.
1468
# This iterator has the most "inlining" done, because it tends to touch
1469
# every file in the tree, while the others only hit nodes that don't
1471
master_iterator = self._master_tree.iter_entries_by_dir()
1473
other_walkers = [other.iter_entries_by_dir()
1474
for other in self._other_trees]
1475
other_entries = [self._step_one(walker) for walker in other_walkers]
1476
# Track extra nodes in the other trees
1477
others_extra = [{} for i in xrange(len(self._other_trees))]
1479
master_has_more = True
1480
step_one = self._step_one
1481
lookup_by_file_id = self._lookup_by_file_id
1482
out_of_order_processed = self._out_of_order_processed
1484
while master_has_more:
1485
(master_has_more, path, master_ie) = step_one(master_iterator)
1486
if not master_has_more:
1489
file_id = master_ie.file_id
1491
other_values_append = other_values.append
1492
next_other_entries = []
1493
next_other_entries_append = next_other_entries.append
1494
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1495
if not other_has_more:
1496
other_values_append(lookup_by_file_id(
1497
others_extra[idx], self._other_trees[idx], file_id))
1498
next_other_entries_append((False, None, None))
1499
elif file_id == other_ie.file_id:
1500
# This is the critical code path, as most of the entries
1501
# should match between most trees.
1502
other_values_append((other_path, other_ie))
1503
next_other_entries_append(step_one(other_walkers[idx]))
1505
# This walker did not match, step it until it either
1506
# matches, or we know we are past the current walker.
1507
other_walker = other_walkers[idx]
1508
other_extra = others_extra[idx]
1509
while (other_has_more and
1510
self._cmp_path_by_dirblock(other_path, path) < 0):
1511
other_file_id = other_ie.file_id
1512
if other_file_id not in out_of_order_processed:
1513
other_extra[other_file_id] = (other_path, other_ie)
1514
other_has_more, other_path, other_ie = \
1515
step_one(other_walker)
1516
if other_has_more and other_ie.file_id == file_id:
1517
# We ended up walking to this point, match and step
1519
other_values_append((other_path, other_ie))
1520
other_has_more, other_path, other_ie = \
1521
step_one(other_walker)
1523
# This record isn't in the normal order, see if it
1525
other_values_append(lookup_by_file_id(
1526
other_extra, self._other_trees[idx], file_id))
1527
next_other_entries_append((other_has_more, other_path,
1529
other_entries = next_other_entries
1531
# We've matched all the walkers, yield this datapoint
1532
yield path, file_id, master_ie, other_values
1533
self._other_walkers = other_walkers
1534
self._other_entries = other_entries
1535
self._others_extra = others_extra
1537
def _finish_others(self):
1538
"""Finish walking the other iterators, so we get all entries."""
1539
for idx, info in enumerate(self._other_entries):
1540
other_extra = self._others_extra[idx]
1541
(other_has_more, other_path, other_ie) = info
1542
while other_has_more:
1543
other_file_id = other_ie.file_id
1544
if other_file_id not in self._out_of_order_processed:
1545
other_extra[other_file_id] = (other_path, other_ie)
1546
other_has_more, other_path, other_ie = \
1547
self._step_one(self._other_walkers[idx])
1548
del self._other_entries
1550
def _walk_others(self):
1551
"""Finish up by walking all the 'deferred' nodes."""
1552
# TODO: One alternative would be to grab all possible unprocessed
1553
# file_ids, and then sort by path, and then yield them. That
1554
# might ensure better ordering, in case a caller strictly
1555
# requires parents before children.
1556
for idx, other_extra in enumerate(self._others_extra):
1557
others = sorted(other_extra.itervalues(),
1558
key=lambda x: self._path_to_key(x[0]))
1559
for other_path, other_ie in others:
1560
file_id = other_ie.file_id
1561
# We don't need to check out_of_order_processed here, because
1562
# the lookup_by_file_id will be removing anything processed
1563
# from the extras cache
1564
other_extra.pop(file_id)
1565
other_values = [(None, None) for i in xrange(idx)]
1566
other_values.append((other_path, other_ie))
1567
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1568
alt_idx = alt_idx + idx + 1
1569
alt_extra = self._others_extra[alt_idx]
1570
alt_tree = self._other_trees[alt_idx]
1571
other_values.append(self._lookup_by_file_id(
1572
alt_extra, alt_tree, file_id))
1573
yield other_path, file_id, None, other_values
765
# This was deprecated before 0.12, but did not have an official warning
766
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
767
def RevisionTree(*args, **kwargs):
768
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
770
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
773
from bzrlib.revisiontree import RevisionTree as _RevisionTree
774
return _RevisionTree(*args, **kwargs)