154
def all_file_ids(self):
155
"""Iterate through all file ids, including ids for missing files."""
156
raise NotImplementedError(self.all_file_ids)
147
return iter(self.inventory)
158
149
def id2path(self, file_id):
159
150
"""Return the path for a file id.
161
152
:raises NoSuchId:
163
raise NotImplementedError(self.id2path)
165
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
154
file_id = osutils.safe_file_id(file_id)
155
return self.inventory.id2path(file_id)
157
def is_control_filename(self, filename):
158
"""True if filename is the name of a control file in this tree.
160
:param filename: A filename within the tree. This is a relative path
161
from the root of this tree.
163
This is true IF and ONLY IF the filename is part of the meta data
164
that bzr controls in this tree. I.E. a random .bzr directory placed
165
on disk will not be a control file for this tree.
167
return self.bzrdir.is_control_filename(filename)
170
def iter_entries_by_dir(self, specific_file_ids=None):
166
171
"""Walk the tree in 'by_dir' order.
168
This will yield each entry in the tree as a (path, entry) tuple.
169
The order that they are yielded is:
171
Directories are walked in a depth-first lexicographical order,
172
however, whenever a directory is reached, all of its direct child
173
nodes are yielded in lexicographical order before yielding the
176
For example, in the tree::
186
The yield order (ignoring root) would be::
188
a, f, a/b, a/d, a/b/c, a/d/e, f/g
190
:param yield_parents: If True, yield the parents from the root leading
191
down to specific_file_ids that have been requested. This has no
192
impact if specific_file_ids is None.
194
raise NotImplementedError(self.iter_entries_by_dir)
196
def list_files(self, include_root=False, from_dir=None, recursive=True):
197
"""List all files in this tree.
199
:param include_root: Whether to include the entry for the tree root
200
:param from_dir: Directory under which to list files
201
:param recursive: Whether to list files recursively
202
:return: iterator over tuples of (path, versioned, kind, file_id,
205
raise NotImplementedError(self.list_files)
173
This will yield each entry in the tree as a (path, entry) tuple. The
174
order that they are yielded is: the contents of a directory are
175
preceeded by the parent of a directory, and all the contents of a
176
directory are grouped together.
178
return self.inventory.iter_entries_by_dir(
179
specific_file_ids=specific_file_ids)
207
181
def iter_references(self):
208
if self.supports_tree_reference():
209
for path, entry in self.iter_entries_by_dir():
210
if entry.kind == 'tree-reference':
211
yield path, entry.file_id
182
for path, entry in self.iter_entries_by_dir():
183
if entry.kind == 'tree-reference':
184
yield path, entry.file_id
213
186
def kind(self, file_id):
214
187
raise NotImplementedError("Tree subclass %s must implement kind"
215
188
% self.__class__.__name__)
217
def stored_kind(self, file_id):
218
"""File kind stored for this file_id.
220
May not match kind on disk for working trees. Always available
221
for versioned files, even when the file itself is missing.
223
return self.kind(file_id)
225
def path_content_summary(self, path):
226
"""Get a summary of the information about path.
228
All the attributes returned are for the canonical form, not the
229
convenient form (if content filters are in use.)
231
:param path: A relative path within the tree.
232
:return: A tuple containing kind, size, exec, sha1-or-link.
233
Kind is always present (see tree.kind()).
234
size is present if kind is file and the size of the
235
canonical form can be cheaply determined, None otherwise.
236
exec is None unless kind is file and the platform supports the 'x'
238
sha1-or-link is the link target if kind is symlink, or the sha1 if
239
it can be obtained without reading the file.
241
raise NotImplementedError(self.path_content_summary)
243
190
def get_reference_revision(self, file_id, path=None):
244
191
raise NotImplementedError("Tree subclass %s must implement "
245
192
"get_reference_revision"
259
206
def _file_size(self, entry, stat_value):
260
207
raise NotImplementedError(self._file_size)
262
def get_file(self, file_id, path=None):
263
"""Return a file object for the file file_id in the tree.
265
If both file_id and path are defined, it is implementation defined as
266
to which one is used.
209
def _get_inventory(self):
210
return self._inventory
212
def get_file(self, file_id):
213
"""Return a file object for the file file_id in the tree."""
268
214
raise NotImplementedError(self.get_file)
270
def get_file_with_stat(self, file_id, path=None):
271
"""Get a file handle and stat object for file_id.
273
The default implementation returns (self.get_file, None) for backwards
276
:param file_id: The file id to read.
277
:param path: The path of the file, if it is known.
278
:return: A tuple (file_handle, stat_value_or_None). If the tree has
279
no stat facility, or need for a stat cache feedback during commit,
280
it may return None for the second element of the tuple.
282
return (self.get_file(file_id, path), None)
284
def get_file_text(self, file_id, path=None):
285
"""Return the byte content of a file.
287
:param file_id: The file_id of the file.
288
:param path: The path of the file.
290
If both file_id and path are supplied, an implementation may use
293
:returns: A single byte string for the whole file.
295
my_file = self.get_file(file_id, path)
297
return my_file.read()
301
def get_file_lines(self, file_id, path=None):
302
"""Return the content of a file, as lines.
304
:param file_id: The file_id of the file.
305
:param path: The path of the file.
307
If both file_id and path are supplied, an implementation may use
310
return osutils.split_lines(self.get_file_text(file_id, path))
312
def get_file_verifier(self, file_id, path=None, stat_value=None):
313
"""Return a verifier for a file.
315
The default implementation returns a sha1.
317
:param file_id: The handle for this file.
318
:param path: The path that this file can be found at.
319
These must point to the same object.
320
:param stat_value: Optional stat value for the object
321
:return: Tuple with verifier name and verifier data
323
return ("SHA1", self.get_file_sha1(file_id, path=path,
324
stat_value=stat_value))
326
def get_file_sha1(self, file_id, path=None, stat_value=None):
327
"""Return the SHA1 file for a file.
329
:note: callers should use get_file_verifier instead
330
where possible, as the underlying repository implementation may
331
have quicker access to a non-sha1 verifier.
333
:param file_id: The handle for this file.
334
:param path: The path that this file can be found at.
335
These must point to the same object.
336
:param stat_value: Optional stat value for the object
338
raise NotImplementedError(self.get_file_sha1)
340
216
def get_file_mtime(self, file_id, path=None):
341
217
"""Return the modification time for a file.
347
223
raise NotImplementedError(self.get_file_mtime)
349
def get_file_size(self, file_id):
350
"""Return the size of a file in bytes.
352
This applies only to regular files. If invoked on directories or
353
symlinks, it will return None.
354
:param file_id: The file-id of the file
356
raise NotImplementedError(self.get_file_size)
358
225
def get_file_by_path(self, path):
359
raise NotImplementedError(self.get_file_by_path)
361
def is_executable(self, file_id, path=None):
362
"""Check if a file is executable.
364
:param file_id: The handle for this file.
365
:param path: The path that this file can be found at.
366
These must point to the same object.
368
raise NotImplementedError(self.is_executable)
370
def iter_files_bytes(self, desired_files):
371
"""Iterate through file contents.
373
Files will not necessarily be returned in the order they occur in
374
desired_files. No specific order is guaranteed.
376
Yields pairs of identifier, bytes_iterator. identifier is an opaque
377
value supplied by the caller as part of desired_files. It should
378
uniquely identify the file version in the caller's context. (Examples:
379
an index number or a TreeTransform trans_id.)
381
bytes_iterator is an iterable of bytestrings for the file. The
382
kind of iterable and length of the bytestrings are unspecified, but for
383
this implementation, it is a tuple containing a single bytestring with
384
the complete text of the file.
386
:param desired_files: a list of (file_id, identifier) pairs
388
for file_id, identifier in desired_files:
389
# We wrap the string in a tuple so that we can return an iterable
390
# of bytestrings. (Technically, a bytestring is also an iterable
391
# of bytestrings, but iterating through each character is not
393
cur_file = (self.get_file_text(file_id),)
394
yield identifier, cur_file
396
def get_symlink_target(self, file_id, path=None):
226
return self.get_file(self._inventory.path2id(path))
228
def get_symlink_target(self, file_id):
397
229
"""Get the target for a given file_id.
399
231
It is assumed that the caller already knows that file_id is referencing
401
233
:param file_id: Handle for the symlink entry.
402
:param path: The path of the file.
403
If both file_id and path are supplied, an implementation may use
405
234
:return: The path the symlink points to.
407
236
raise NotImplementedError(self.get_symlink_target)
409
def get_root_id(self):
410
"""Return the file_id for the root of this tree."""
411
raise NotImplementedError(self.get_root_id)
413
def annotate_iter(self, file_id,
414
default_revision=_mod_revision.CURRENT_REVISION):
415
"""Return an iterator of revision_id, line tuples.
238
def annotate_iter(self, file_id):
239
"""Return an iterator of revision_id, line tuples
417
241
For working trees (and mutable trees in general), the special
418
242
revision_id 'current:' will be used for lines that are new in this
419
243
tree, e.g. uncommitted changes.
420
244
:param file_id: The file to produce an annotated version from
421
:param default_revision: For lines that don't match a basis, mark them
422
with this revision id. Not all implementations will make use of
425
246
raise NotImplementedError(self.annotate_iter)
427
def _get_plan_merge_data(self, file_id, other, base):
428
from bzrlib import versionedfile
429
vf = versionedfile._PlanMergeVersionedFile(file_id)
430
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
431
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
433
last_revision_base = None
435
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
436
return vf, last_revision_a, last_revision_b, last_revision_base
438
def plan_file_merge(self, file_id, other, base=None):
439
"""Generate a merge plan based on annotations.
441
If the file contains uncommitted changes in this tree, they will be
442
attributed to the 'current:' pseudo-revision. If the file contains
443
uncommitted changes in the other tree, they will be assigned to the
444
'other:' pseudo-revision.
446
data = self._get_plan_merge_data(file_id, other, base)
447
vf, last_revision_a, last_revision_b, last_revision_base = data
448
return vf.plan_merge(last_revision_a, last_revision_b,
451
def plan_file_lca_merge(self, file_id, other, base=None):
452
"""Generate a merge plan based lca-newness.
454
If the file contains uncommitted changes in this tree, they will be
455
attributed to the 'current:' pseudo-revision. If the file contains
456
uncommitted changes in the other tree, they will be assigned to the
457
'other:' pseudo-revision.
459
data = self._get_plan_merge_data(file_id, other, base)
460
vf, last_revision_a, last_revision_b, last_revision_base = data
461
return vf.plan_lca_merge(last_revision_a, last_revision_b,
464
def _iter_parent_trees(self):
465
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
466
for revision_id in self.get_parent_ids():
468
yield self.revision_tree(revision_id)
469
except errors.NoSuchRevisionInTree:
470
yield self.repository.revision_tree(revision_id)
472
def _get_file_revision(self, file_id, vf, tree_revision):
473
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
475
if getattr(self, '_repository', None) is None:
476
last_revision = tree_revision
477
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
478
self._iter_parent_trees()]
479
vf.add_lines((file_id, last_revision), parent_keys,
480
self.get_file_lines(file_id))
481
repo = self.branch.repository
484
last_revision = self.get_file_revision(file_id)
485
base_vf = self._repository.texts
486
if base_vf not in vf.fallback_versionedfiles:
487
vf.fallback_versionedfiles.append(base_vf)
248
def plan_file_merge(self, file_id, other):
249
"""Generate a merge plan based on annotations
251
If the file contains uncommitted changes in this tree, they will be
252
attributed to the 'current:' pseudo-revision. If the file contains
253
uncommitted changes in the other tree, they will be assigned to the
254
'other:' pseudo-revision.
256
from bzrlib import merge
257
annotated_a = list(self.annotate_iter(file_id,
258
_mod_revision.CURRENT_REVISION))
259
annotated_b = list(other.annotate_iter(file_id, 'other:'))
260
ancestors_a = self._get_ancestors(_mod_revision.CURRENT_REVISION)
261
ancestors_b = other._get_ancestors('other:')
262
return merge._plan_annotate_merge(annotated_a, annotated_b,
263
ancestors_a, ancestors_b)
265
inventory = property(_get_inventory,
266
doc="Inventory of this Tree")
490
268
def _check_retrieved(self, ie, f):
491
269
if not __debug__:
493
fp = osutils.fingerprint_file(f)
271
fp = fingerprint_file(f)
496
274
if ie.text_size is not None:
497
275
if ie.text_size != fp['size']:
498
raise errors.BzrError(
499
"mismatched size for file %r in %r" %
500
(ie.file_id, self._store),
276
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
501
277
["inventory expects %d bytes" % ie.text_size,
502
278
"file is actually %d bytes" % fp['size'],
503
279
"store is probably damaged/corrupt"])
505
281
if ie.text_sha1 != fp['sha1']:
506
raise errors.BzrError("wrong SHA-1 for file %r in %r" %
507
(ie.file_id, self._store),
282
raise BzrError("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
508
283
["inventory expects %s" % ie.text_sha1,
509
284
"file is actually %s" % fp['sha1'],
510
285
"store is probably damaged/corrupt"])
512
288
def path2id(self, path):
513
289
"""Return the id for path in this tree."""
514
raise NotImplementedError(self.path2id)
290
return self._inventory.path2id(path)
516
292
def paths2ids(self, paths, trees=[], require_versioned=True):
517
293
"""Return all the ids that can be reached by walking from paths.
519
Each path is looked up in this tree and any extras provided in
295
Each path is looked up in each this tree and any extras provided in
520
296
trees, and this is repeated recursively: the children in an extra tree
521
297
of a directory that has been renamed under a provided path in this tree
522
are all returned, even if none exist under a provided path in this
298
are all returned, even if none exist until a provided path in this
523
299
tree, and vice versa.
525
301
:param paths: An iterable of paths to start converting to ids from.
610
387
raise NotImplementedError(self.walkdirs)
612
def supports_content_filtering(self):
390
class EmptyTree(Tree):
393
self._inventory = Inventory(root_id=None)
394
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
395
' use repository.revision_tree instead.',
396
DeprecationWarning, stacklevel=2)
398
def get_parent_ids(self):
401
def get_symlink_target(self, file_id):
404
def has_filename(self, filename):
615
def _content_filter_stack(self, path=None, file_id=None):
616
"""The stack of content filters for a path if filtering is supported.
618
Readers will be applied in first-to-last order.
619
Writers will be applied in last-to-first order.
620
Either the path or the file-id needs to be provided.
622
:param path: path relative to the root of the tree
624
:param file_id: file_id or None if unknown
625
:return: the list of filters - [] if there are none
627
filter_pref_names = filters._get_registered_names()
628
if len(filter_pref_names) == 0:
631
path = self.id2path(file_id)
632
prefs = self.iter_search_rules([path], filter_pref_names).next()
633
stk = filters._get_filter_stack_for(prefs)
634
if 'filters' in debug.debug_flags:
635
trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
638
def _content_filter_stack_provider(self):
639
"""A function that returns a stack of ContentFilters.
641
The function takes a path (relative to the top of the tree) and a
642
file-id as parameters.
644
:return: None if content filtering is not supported by this tree.
646
if self.supports_content_filtering():
647
return lambda path, file_id: \
648
self._content_filter_stack(path, file_id)
652
def iter_search_rules(self, path_names, pref_names=None,
653
_default_searcher=None):
654
"""Find the preferences for filenames in a tree.
656
:param path_names: an iterable of paths to find attributes for.
657
Paths are given relative to the root of the tree.
658
:param pref_names: the list of preferences to lookup - None for all
659
:param _default_searcher: private parameter to assist testing - don't use
660
:return: an iterator of tuple sequences, one per path-name.
661
See _RulesSearcher.get_items for details on the tuple sequence.
663
if _default_searcher is None:
664
_default_searcher = rules._per_user_searcher
665
searcher = self._get_rules_searcher(_default_searcher)
666
if searcher is not None:
667
if pref_names is not None:
668
for path in path_names:
669
yield searcher.get_selected_items(path, pref_names)
671
for path in path_names:
672
yield searcher.get_items(path)
674
def _get_rules_searcher(self, default_searcher):
675
"""Get the RulesSearcher for this tree given the default one."""
676
searcher = default_searcher
680
class InventoryTree(Tree):
681
"""A tree that relies on an inventory for its metadata.
683
Trees contain an `Inventory` object, and also know how to retrieve
684
file texts mentioned in the inventory, either from a working
685
directory or from a store.
687
It is possible for trees to contain files that are not described
688
in their inventory or vice versa; for this use `filenames()`.
690
Subclasses should set the _inventory attribute, which is considered
691
private to external API users.
407
def kind(self, file_id):
408
file_id = osutils.safe_file_id(file_id)
409
assert self._inventory[file_id].kind == "directory"
412
def list_files(self, include_root=False):
415
def __contains__(self, file_id):
416
file_id = osutils.safe_file_id(file_id)
417
return (file_id in self._inventory)
419
def get_file_sha1(self, file_id, path=None, stat_value=None):
423
######################################################################
426
# TODO: Merge these two functions into a single one that can operate
427
# on either a whole tree or a set of files.
429
# TODO: Return the diff in order by filename, not by category or in
430
# random order. Can probably be done by lock-stepping through the
431
# filenames from both trees.
434
def file_status(filename, old_tree, new_tree):
435
"""Return single-letter status, old and new names for a file.
437
The complexity here is in deciding how to represent renames;
438
many complex cases are possible.
694
def get_canonical_inventory_paths(self, paths):
695
"""Like get_canonical_inventory_path() but works on multiple items.
697
:param paths: A sequence of paths relative to the root of the tree.
698
:return: A list of paths, with each item the corresponding input path
699
adjusted to account for existing elements that match case
702
return list(self._yield_canonical_inventory_paths(paths))
704
def get_canonical_inventory_path(self, path):
705
"""Returns the first inventory item that case-insensitively matches path.
707
If a path matches exactly, it is returned. If no path matches exactly
708
but more than one path matches case-insensitively, it is implementation
709
defined which is returned.
711
If no path matches case-insensitively, the input path is returned, but
712
with as many path entries that do exist changed to their canonical
715
If you need to resolve many names from the same tree, you should
716
use get_canonical_inventory_paths() to avoid O(N) behaviour.
718
:param path: A paths relative to the root of the tree.
719
:return: The input path adjusted to account for existing elements
720
that match case insensitively.
722
return self._yield_canonical_inventory_paths([path]).next()
724
def _yield_canonical_inventory_paths(self, paths):
726
# First, if the path as specified exists exactly, just use it.
727
if self.path2id(path) is not None:
731
cur_id = self.get_root_id()
733
bit_iter = iter(path.split("/"))
737
for child in self.iter_children(cur_id):
739
# XXX: it seem like if the child is known to be in the
740
# tree, we shouldn't need to go from its id back to
741
# its path -- mbp 2010-02-11
743
# XXX: it seems like we could be more efficient
744
# by just directly looking up the original name and
745
# only then searching all children; also by not
746
# chopping paths so much. -- mbp 2010-02-11
747
child_base = os.path.basename(self.id2path(child))
748
if (child_base == elt):
749
# if we found an exact match, we can stop now; if
750
# we found an approximate match we need to keep
751
# searching because there might be an exact match
754
new_path = osutils.pathjoin(cur_path, child_base)
756
elif child_base.lower() == lelt:
758
new_path = osutils.pathjoin(cur_path, child_base)
759
except errors.NoSuchId:
760
# before a change is committed we can see this error...
765
# got to the end of this directory and no entries matched.
766
# Return what matched so far, plus the rest as specified.
767
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
772
def _get_inventory(self):
773
return self._inventory
775
inventory = property(_get_inventory,
776
doc="Inventory of this Tree")
779
def path2id(self, path):
780
"""Return the id for path in this tree."""
781
return self._inventory.path2id(path)
783
def id2path(self, file_id):
784
"""Return the path for a file id.
788
return self.inventory.id2path(file_id)
790
def has_id(self, file_id):
791
return self.inventory.has_id(file_id)
793
def has_or_had_id(self, file_id):
794
return self.inventory.has_id(file_id)
796
def all_file_ids(self):
797
return set(self.inventory)
799
@deprecated_method(deprecated_in((2, 4, 0)))
801
return iter(self.inventory)
803
def filter_unversioned_files(self, paths):
804
"""Filter out paths that are versioned.
806
:return: set of paths.
808
# NB: we specifically *don't* call self.has_filename, because for
809
# WorkingTrees that can indicate files that exist on disk but that
811
pred = self.inventory.has_filename
812
return set((p for p in paths if not pred(p)))
815
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
816
"""Walk the tree in 'by_dir' order.
818
This will yield each entry in the tree as a (path, entry) tuple.
819
The order that they are yielded is:
821
See Tree.iter_entries_by_dir for details.
823
:param yield_parents: If True, yield the parents from the root leading
824
down to specific_file_ids that have been requested. This has no
825
impact if specific_file_ids is None.
827
return self.inventory.iter_entries_by_dir(
828
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
830
def get_file_by_path(self, path):
831
return self.get_file(self._inventory.path2id(path), path)
440
old_inv = old_tree.inventory
441
new_inv = new_tree.inventory
442
new_id = new_inv.path2id(filename)
443
old_id = old_inv.path2id(filename)
445
if not new_id and not old_id:
446
# easy: doesn't exist in either; not versioned at all
447
if new_tree.is_ignored(filename):
448
return 'I', None, None
450
return '?', None, None
452
# There is now a file of this name, great.
455
# There is no longer a file of this name, but we can describe
456
# what happened to the file that used to have
457
# this name. There are two possibilities: either it was
458
# deleted entirely, or renamed.
460
if new_inv.has_id(old_id):
461
return 'X', old_inv.id2path(old_id), new_inv.id2path(old_id)
463
return 'D', old_inv.id2path(old_id), None
465
# if the file_id is new in this revision, it is added
466
if new_id and not old_inv.has_id(new_id):
469
# if there used to be a file of this name, but that ID has now
470
# disappeared, it is deleted
471
if old_id and not new_inv.has_id(old_id):
478
def find_renames(old_inv, new_inv):
479
for file_id in old_inv:
480
if file_id not in new_inv:
482
old_name = old_inv.id2path(file_id)
483
new_name = new_inv.id2path(file_id)
484
if old_name != new_name:
485
yield (old_name, new_name)
834
488
def find_ids_across_trees(filenames, trees, require_versioned=True):
835
489
"""Find the ids corresponding to specified filenames.
837
491
All matches in all trees will be used, and all children of matched
838
492
directories will be used.
910
565
Its instances have methods like 'compare' and contain references to the
911
566
source and target trees these operations are to be carried out on.
913
Clients of bzrlib should not need to use InterTree directly, rather they
568
clients of bzrlib should not need to use InterTree directly, rather they
914
569
should use the convenience methods on Tree such as 'Tree.compare()' which
915
570
will pass through to InterTree as appropriate.
918
# Formats that will be used to test this InterTree. If both are
919
# None, this InterTree will not be tested (e.g. because a complex
921
_matching_from_tree_format = None
922
_matching_to_tree_format = None
927
def is_compatible(kls, source, target):
928
# The default implementation is naive and uses the public API, so
929
# it works for all trees.
932
def _changes_from_entries(self, source_entry, target_entry,
933
source_path=None, target_path=None):
934
"""Generate a iter_changes tuple between source_entry and target_entry.
936
:param source_entry: An inventory entry from self.source, or None.
937
:param target_entry: An inventory entry from self.target, or None.
938
:param source_path: The path of source_entry, if known. If not known
939
it will be looked up.
940
:param target_path: The path of target_entry, if known. If not known
941
it will be looked up.
942
:return: A tuple, item 0 of which is an iter_changes result tuple, and
943
item 1 is True if there are any changes in the result tuple.
945
if source_entry is None:
946
if target_entry is None:
948
file_id = target_entry.file_id
950
file_id = source_entry.file_id
951
if source_entry is not None:
952
source_versioned = True
953
source_name = source_entry.name
954
source_parent = source_entry.parent_id
955
if source_path is None:
956
source_path = self.source.id2path(file_id)
957
source_kind, source_executable, source_stat = \
958
self.source._comparison_data(source_entry, source_path)
960
source_versioned = False
964
source_executable = None
965
if target_entry is not None:
966
target_versioned = True
967
target_name = target_entry.name
968
target_parent = target_entry.parent_id
969
if target_path is None:
970
target_path = self.target.id2path(file_id)
971
target_kind, target_executable, target_stat = \
972
self.target._comparison_data(target_entry, target_path)
974
target_versioned = False
978
target_executable = None
979
versioned = (source_versioned, target_versioned)
980
kind = (source_kind, target_kind)
981
changed_content = False
982
if source_kind != target_kind:
983
changed_content = True
984
elif source_kind == 'file':
985
if not self.file_content_matches(file_id, file_id, source_path,
986
target_path, source_stat, target_stat):
987
changed_content = True
988
elif source_kind == 'symlink':
989
if (self.source.get_symlink_target(file_id) !=
990
self.target.get_symlink_target(file_id)):
991
changed_content = True
992
# XXX: Yes, the indentation below is wrong. But fixing it broke
993
# test_merge.TestMergerEntriesLCAOnDisk.
994
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
995
# the fix from bzr.dev -- vila 2009026
996
elif source_kind == 'tree-reference':
997
if (self.source.get_reference_revision(file_id, source_path)
998
!= self.target.get_reference_revision(file_id, target_path)):
999
changed_content = True
1000
parent = (source_parent, target_parent)
1001
name = (source_name, target_name)
1002
executable = (source_executable, target_executable)
1003
if (changed_content is not False or versioned[0] != versioned[1]
1004
or parent[0] != parent[1] or name[0] != name[1] or
1005
executable[0] != executable[1]):
1009
return (file_id, (source_path, target_path), changed_content,
1010
versioned, parent, name, kind, executable), changes
1012
575
@needs_read_lock
1013
576
def compare(self, want_unchanged=False, specific_files=None,
1014
577
extra_trees=None, require_versioned=False, include_root=False,
1075
640
:param require_versioned: Raise errors.PathsNotVersionedError if a
1076
641
path in the specific_files list is not versioned in one of
1077
642
source, target or extra_trees.
1078
:param specific_files: An optional list of file paths to restrict the
1079
comparison to. When mapping filenames to ids, all matches in all
1080
trees (including optional extra_trees) are used, and all children
1081
of matched directories are included. The parents in the target tree
1082
of the specific files up to and including the root of the tree are
1083
always evaluated for changes too.
1084
643
:param want_unversioned: Should unversioned files be returned in the
1085
644
output. An unversioned file is defined as one with (False, False)
1086
645
for the versioned pair.
1088
648
lookup_trees = [self.source]
1090
650
lookup_trees.extend(extra_trees)
1091
# The ids of items we need to examine to insure delta consistency.
1092
precise_file_ids = set()
1093
changed_file_ids = []
1094
if specific_files == []:
1095
specific_file_ids = []
1097
specific_file_ids = self.target.paths2ids(specific_files,
1098
lookup_trees, require_versioned=require_versioned)
1099
if specific_files is not None:
1100
# reparented or added entries must have their parents included
1101
# so that valid deltas can be created. The seen_parents set
1102
# tracks the parents that we need to have.
1103
# The seen_dirs set tracks directory entries we've yielded.
1104
# After outputting version object in to_entries we set difference
1105
# the two seen sets and start checking parents.
1106
seen_parents = set()
651
specific_file_ids = self.target.paths2ids(specific_files,
652
lookup_trees, require_versioned=require_versioned)
1108
653
if want_unversioned:
1109
all_unversioned = sorted([(p.split('/'), p) for p in
1110
self.target.extras()
1111
if specific_files is None or
654
all_unversioned = sorted([(p.split('/'), p) for p in self.target.extras()
655
if not specific_files or
1112
656
osutils.is_inside_any(specific_files, p)])
1113
all_unversioned = collections.deque(all_unversioned)
657
all_unversioned = deque(all_unversioned)
1115
all_unversioned = collections.deque()
659
all_unversioned = deque()
1117
from_entries_by_dir = list(self.source.iter_entries_by_dir(
661
from_entries_by_dir = list(self.source.inventory.iter_entries_by_dir(
1118
662
specific_file_ids=specific_file_ids))
1119
663
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1120
to_entries_by_dir = list(self.target.iter_entries_by_dir(
664
to_entries_by_dir = list(self.target.inventory.iter_entries_by_dir(
1121
665
specific_file_ids=specific_file_ids))
1122
666
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1124
# the unversioned path lookup only occurs on real trees - where there
668
# the unversioned path lookup only occurs on real trees - where there
1125
669
# can be extras. So the fake_entry is solely used to look up
1126
670
# executable it values when execute is not supported.
1127
fake_entry = inventory.InventoryFile('unused', 'unused', 'unused')
1128
for target_path, target_entry in to_entries_by_dir:
1129
while (all_unversioned and
1130
all_unversioned[0][0] < target_path.split('/')):
671
fake_entry = InventoryFile('unused', 'unused', 'unused')
672
for to_path, to_entry in to_entries_by_dir:
673
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
1131
674
unversioned_path = all_unversioned.popleft()
1132
target_kind, target_executable, target_stat = \
675
to_kind, to_executable, to_stat = \
1133
676
self.target._comparison_data(fake_entry, unversioned_path[1])
1134
677
yield (None, (None, unversioned_path[1]), True, (False, False),
1136
679
(None, unversioned_path[0][-1]),
1137
(None, target_kind),
1138
(None, target_executable))
1139
source_path, source_entry = from_data.get(target_entry.file_id,
1141
result, changes = self._changes_from_entries(source_entry,
1142
target_entry, source_path=source_path, target_path=target_path)
1143
to_paths[result[0]] = result[1][1]
681
(None, to_executable))
682
file_id = to_entry.file_id
683
to_paths[file_id] = to_path
1144
684
entry_count += 1
685
changed_content = False
686
from_path, from_entry = from_data.get(file_id, (None, None))
687
from_versioned = (from_entry is not None)
688
if from_entry is not None:
689
from_versioned = True
690
from_name = from_entry.name
691
from_parent = from_entry.parent_id
692
from_kind, from_executable, from_stat = \
693
self.source._comparison_data(from_entry, from_path)
1146
694
entry_count += 1
696
from_versioned = False
700
from_executable = None
701
versioned = (from_versioned, True)
702
to_kind, to_executable, to_stat = \
703
self.target._comparison_data(to_entry, to_path)
704
kind = (from_kind, to_kind)
705
if kind[0] != kind[1]:
706
changed_content = True
707
elif from_kind == 'file':
708
from_size = self.source._file_size(from_entry, from_stat)
709
to_size = self.target._file_size(to_entry, to_stat)
710
if from_size != to_size:
711
changed_content = True
712
elif (self.source.get_file_sha1(file_id, from_path, from_stat) !=
713
self.target.get_file_sha1(file_id, to_path, to_stat)):
714
changed_content = True
715
elif from_kind == 'symlink':
716
if (self.source.get_symlink_target(file_id) !=
717
self.target.get_symlink_target(file_id)):
718
changed_content = True
719
elif from_kind == 'tree-reference':
720
if (self.source.get_reference_revision(file_id, from_path)
721
!= self.target.get_reference_revision(file_id, to_path)):
722
changed_content = True
723
parent = (from_parent, to_entry.parent_id)
724
name = (from_name, to_entry.name)
725
executable = (from_executable, to_executable)
1147
726
if pb is not None:
1148
727
pb.update('comparing files', entry_count, num_entries)
1149
if changes or include_unchanged:
1150
if specific_file_ids is not None:
1151
new_parent_id = result[4][1]
1152
precise_file_ids.add(new_parent_id)
1153
changed_file_ids.append(result[0])
1155
# Ensure correct behaviour for reparented/added specific files.
1156
if specific_files is not None:
1157
# Record output dirs
1158
if result[6][1] == 'directory':
1159
seen_dirs.add(result[0])
1160
# Record parents of reparented/added entries.
1161
versioned = result[3]
1163
if not versioned[0] or parents[0] != parents[1]:
1164
seen_parents.add(parents[1])
728
if (changed_content is not False or versioned[0] != versioned[1]
729
or parent[0] != parent[1] or name[0] != name[1] or
730
executable[0] != executable[1] or include_unchanged):
731
yield (file_id, (from_path, to_path), changed_content,
732
versioned, parent, name, kind, executable)
1165
734
while all_unversioned:
1166
735
# yield any trailing unversioned paths
1167
736
unversioned_path = all_unversioned.popleft()
1194
775
self.source._comparison_data(from_entry, path)
1195
776
kind = (from_kind, None)
1196
777
executable = (from_executable, None)
1197
changed_content = from_kind is not None
778
changed_content = True
1198
779
# the parent's path is necessarily known at this point.
1199
changed_file_ids.append(file_id)
1200
780
yield(file_id, (path, to_path), changed_content, versioned, parent,
1201
781
name, kind, executable)
1202
changed_file_ids = set(changed_file_ids)
1203
if specific_file_ids is not None:
1204
for result in self._handle_precise_ids(precise_file_ids,
1208
def _get_entry(self, tree, file_id):
1209
"""Get an inventory entry from a tree, with missing entries as None.
1211
If the tree raises NotImplementedError on accessing .inventory, then
1212
this is worked around using iter_entries_by_dir on just the file id
1215
:param tree: The tree to lookup the entry in.
1216
:param file_id: The file_id to lookup.
1219
inventory = tree.inventory
1220
except NotImplementedError:
1221
# No inventory available.
1223
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1224
return iterator.next()[1]
1225
except StopIteration:
1229
return inventory[file_id]
1230
except errors.NoSuchId:
1233
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1234
discarded_changes=None):
1235
"""Fill out a partial iter_changes to be consistent.
1237
:param precise_file_ids: The file ids of parents that were seen during
1239
:param changed_file_ids: The file ids of already emitted items.
1240
:param discarded_changes: An optional dict of precalculated
1241
iter_changes items which the partial iter_changes had not output
1243
:return: A generator of iter_changes items to output.
1245
# process parents of things that had changed under the users
1246
# requested paths to prevent incorrect paths or parent ids which
1247
# aren't in the tree.
1248
while precise_file_ids:
1249
precise_file_ids.discard(None)
1250
# Don't emit file_ids twice
1251
precise_file_ids.difference_update(changed_file_ids)
1252
if not precise_file_ids:
1254
# If the there was something at a given output path in source, we
1255
# have to include the entry from source in the delta, or we would
1256
# be putting this entry into a used path.
1258
for parent_id in precise_file_ids:
1260
paths.append(self.target.id2path(parent_id))
1261
except errors.NoSuchId:
1262
# This id has been dragged in from the source by delta
1263
# expansion and isn't present in target at all: we don't
1264
# need to check for path collisions on it.
1267
old_id = self.source.path2id(path)
1268
precise_file_ids.add(old_id)
1269
precise_file_ids.discard(None)
1270
current_ids = precise_file_ids
1271
precise_file_ids = set()
1272
# We have to emit all of precise_file_ids that have been altered.
1273
# We may have to output the children of some of those ids if any
1274
# directories have stopped being directories.
1275
for file_id in current_ids:
1277
if discarded_changes:
1278
result = discarded_changes.get(file_id)
1283
old_entry = self._get_entry(self.source, file_id)
1284
new_entry = self._get_entry(self.target, file_id)
1285
result, changes = self._changes_from_entries(
1286
old_entry, new_entry)
1289
# Get this parents parent to examine.
1290
new_parent_id = result[4][1]
1291
precise_file_ids.add(new_parent_id)
1293
if (result[6][0] == 'directory' and
1294
result[6][1] != 'directory'):
1295
# This stopped being a directory, the old children have
1297
if old_entry is None:
1298
# Reusing a discarded change.
1299
old_entry = self._get_entry(self.source, file_id)
1300
for child in old_entry.children.values():
1301
precise_file_ids.add(child.file_id)
1302
changed_file_ids.add(result[0])
1306
def file_content_matches(self, source_file_id, target_file_id,
1307
source_path=None, target_path=None, source_stat=None, target_stat=None):
1308
"""Check if two files are the same in the source and target trees.
1310
This only checks that the contents of the files are the same,
1311
it does not touch anything else.
1313
:param source_file_id: File id of the file in the source tree
1314
:param target_file_id: File id of the file in the target tree
1315
:param source_path: Path of the file in the source tree
1316
:param target_path: Path of the file in the target tree
1317
:param source_stat: Optional stat value of the file in the source tree
1318
:param target_stat: Optional stat value of the file in the target tree
1319
:return: Boolean indicating whether the files have the same contents
1321
source_verifier_kind, source_verifier_data = self.source.get_file_verifier(
1322
source_file_id, source_path, source_stat)
1323
target_verifier_kind, target_verifier_data = self.target.get_file_verifier(
1324
target_file_id, target_path, target_stat)
1325
if source_verifier_kind == target_verifier_kind:
1326
return (source_verifier_data == target_verifier_data)
1327
# Fall back to SHA1 for now
1328
if source_verifier_kind != "SHA1":
1329
source_sha1 = self.source.get_file_sha1(source_file_id,
1330
source_path, source_stat)
1332
source_sha1 = source_verifier_data
1333
if target_verifier_kind != "SHA1":
1334
target_sha1 = self.target.get_file_sha1(target_file_id,
1335
target_path, target_stat)
1337
target_sha1 = target_verifier_data
1338
return (source_sha1 == target_sha1)
1340
InterTree.register_optimiser(InterTree)
1343
class MultiWalker(object):
1344
"""Walk multiple trees simultaneously, getting combined results."""
1346
# Note: This could be written to not assume you can do out-of-order
1347
# lookups. Instead any nodes that don't match in all trees could be
1348
# marked as 'deferred', and then returned in the final cleanup loop.
1349
# For now, I think it is "nicer" to return things as close to the
1350
# "master_tree" order as we can.
1352
def __init__(self, master_tree, other_trees):
1353
"""Create a new MultiWalker.
1355
All trees being walked must implement "iter_entries_by_dir()", such
1356
that they yield (path, object) tuples, where that object will have a
1357
'.file_id' member, that can be used to check equality.
1359
:param master_tree: All trees will be 'slaved' to the master_tree such
1360
that nodes in master_tree will be used as 'first-pass' sync points.
1361
Any nodes that aren't in master_tree will be merged in a second
1363
:param other_trees: A list of other trees to walk simultaneously.
1365
self._master_tree = master_tree
1366
self._other_trees = other_trees
1368
# Keep track of any nodes that were properly processed just out of
1369
# order, that way we don't return them at the end, we don't have to
1370
# track *all* processed file_ids, just the out-of-order ones
1371
self._out_of_order_processed = set()
1374
def _step_one(iterator):
1375
"""Step an iter_entries_by_dir iterator.
1377
:return: (has_more, path, ie)
1378
If has_more is False, path and ie will be None.
1381
path, ie = iterator.next()
1382
except StopIteration:
1383
return False, None, None
1385
return True, path, ie
1388
def _cmp_path_by_dirblock(path1, path2):
1389
"""Compare two paths based on what directory they are in.
1391
This generates a sort order, such that all children of a directory are
1392
sorted together, and grandchildren are in the same order as the
1393
children appear. But all grandchildren come after all children.
1395
:param path1: first path
1396
:param path2: the second path
1397
:return: negative number if ``path1`` comes first,
1398
0 if paths are equal
1399
and a positive number if ``path2`` sorts first
1401
# Shortcut this special case
1404
# This is stolen from _dirstate_helpers_py.py, only switching it to
1405
# Unicode objects. Consider using encode_utf8() and then using the
1406
# optimized versions, or maybe writing optimized unicode versions.
1407
if not isinstance(path1, unicode):
1408
raise TypeError("'path1' must be a unicode string, not %s: %r"
1409
% (type(path1), path1))
1410
if not isinstance(path2, unicode):
1411
raise TypeError("'path2' must be a unicode string, not %s: %r"
1412
% (type(path2), path2))
1413
return cmp(MultiWalker._path_to_key(path1),
1414
MultiWalker._path_to_key(path2))
1417
def _path_to_key(path):
1418
dirname, basename = osutils.split(path)
1419
return (dirname.split(u'/'), basename)
1421
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1422
"""Lookup an inventory entry by file_id.
1424
This is called when an entry is missing in the normal order.
1425
Generally this is because a file was either renamed, or it was
1426
deleted/added. If the entry was found in the inventory and not in
1427
extra_entries, it will be added to self._out_of_order_processed
1429
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1430
should be filled with entries that were found before they were
1431
used. If file_id is present, it will be removed from the
1433
:param other_tree: The Tree to search, in case we didn't find the entry
1435
:param file_id: The file_id to look for
1436
:return: (path, ie) if found or (None, None) if not present.
1438
if file_id in extra_entries:
1439
return extra_entries.pop(file_id)
1440
# TODO: Is id2path better as the first call, or is
1441
# inventory[file_id] better as a first check?
1443
cur_path = other_tree.id2path(file_id)
1444
except errors.NoSuchId:
1446
if cur_path is None:
1449
self._out_of_order_processed.add(file_id)
1450
cur_ie = other_tree.inventory[file_id]
1451
return (cur_path, cur_ie)
1454
"""Match up the values in the different trees."""
1455
for result in self._walk_master_tree():
1457
self._finish_others()
1458
for result in self._walk_others():
1461
def _walk_master_tree(self):
1462
"""First pass, walk all trees in lock-step.
1464
When we are done, all nodes in the master_tree will have been
1465
processed. _other_walkers, _other_entries, and _others_extra will be
1466
set on 'self' for future processing.
1468
# This iterator has the most "inlining" done, because it tends to touch
1469
# every file in the tree, while the others only hit nodes that don't
1471
master_iterator = self._master_tree.iter_entries_by_dir()
1473
other_walkers = [other.iter_entries_by_dir()
1474
for other in self._other_trees]
1475
other_entries = [self._step_one(walker) for walker in other_walkers]
1476
# Track extra nodes in the other trees
1477
others_extra = [{} for i in xrange(len(self._other_trees))]
1479
master_has_more = True
1480
step_one = self._step_one
1481
lookup_by_file_id = self._lookup_by_file_id
1482
out_of_order_processed = self._out_of_order_processed
1484
while master_has_more:
1485
(master_has_more, path, master_ie) = step_one(master_iterator)
1486
if not master_has_more:
1489
file_id = master_ie.file_id
1491
other_values_append = other_values.append
1492
next_other_entries = []
1493
next_other_entries_append = next_other_entries.append
1494
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1495
if not other_has_more:
1496
other_values_append(lookup_by_file_id(
1497
others_extra[idx], self._other_trees[idx], file_id))
1498
next_other_entries_append((False, None, None))
1499
elif file_id == other_ie.file_id:
1500
# This is the critical code path, as most of the entries
1501
# should match between most trees.
1502
other_values_append((other_path, other_ie))
1503
next_other_entries_append(step_one(other_walkers[idx]))
1505
# This walker did not match, step it until it either
1506
# matches, or we know we are past the current walker.
1507
other_walker = other_walkers[idx]
1508
other_extra = others_extra[idx]
1509
while (other_has_more and
1510
self._cmp_path_by_dirblock(other_path, path) < 0):
1511
other_file_id = other_ie.file_id
1512
if other_file_id not in out_of_order_processed:
1513
other_extra[other_file_id] = (other_path, other_ie)
1514
other_has_more, other_path, other_ie = \
1515
step_one(other_walker)
1516
if other_has_more and other_ie.file_id == file_id:
1517
# We ended up walking to this point, match and step
1519
other_values_append((other_path, other_ie))
1520
other_has_more, other_path, other_ie = \
1521
step_one(other_walker)
1523
# This record isn't in the normal order, see if it
1525
other_values_append(lookup_by_file_id(
1526
other_extra, self._other_trees[idx], file_id))
1527
next_other_entries_append((other_has_more, other_path,
1529
other_entries = next_other_entries
1531
# We've matched all the walkers, yield this datapoint
1532
yield path, file_id, master_ie, other_values
1533
self._other_walkers = other_walkers
1534
self._other_entries = other_entries
1535
self._others_extra = others_extra
1537
def _finish_others(self):
1538
"""Finish walking the other iterators, so we get all entries."""
1539
for idx, info in enumerate(self._other_entries):
1540
other_extra = self._others_extra[idx]
1541
(other_has_more, other_path, other_ie) = info
1542
while other_has_more:
1543
other_file_id = other_ie.file_id
1544
if other_file_id not in self._out_of_order_processed:
1545
other_extra[other_file_id] = (other_path, other_ie)
1546
other_has_more, other_path, other_ie = \
1547
self._step_one(self._other_walkers[idx])
1548
del self._other_entries
1550
def _walk_others(self):
1551
"""Finish up by walking all the 'deferred' nodes."""
1552
# TODO: One alternative would be to grab all possible unprocessed
1553
# file_ids, and then sort by path, and then yield them. That
1554
# might ensure better ordering, in case a caller strictly
1555
# requires parents before children.
1556
for idx, other_extra in enumerate(self._others_extra):
1557
others = sorted(other_extra.itervalues(),
1558
key=lambda x: self._path_to_key(x[0]))
1559
for other_path, other_ie in others:
1560
file_id = other_ie.file_id
1561
# We don't need to check out_of_order_processed here, because
1562
# the lookup_by_file_id will be removing anything processed
1563
# from the extras cache
1564
other_extra.pop(file_id)
1565
other_values = [(None, None) for i in xrange(idx)]
1566
other_values.append((other_path, other_ie))
1567
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1568
alt_idx = alt_idx + idx + 1
1569
alt_extra = self._others_extra[alt_idx]
1570
alt_tree = self._other_trees[alt_idx]
1571
other_values.append(self._lookup_by_file_id(
1572
alt_extra, alt_tree, file_id))
1573
yield other_path, file_id, None, other_values
784
# This was deprecated before 0.12, but did not have an official warning
785
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
786
def RevisionTree(*args, **kwargs):
787
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
789
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
792
from bzrlib.revisiontree import RevisionTree as _RevisionTree
793
return _RevisionTree(*args, **kwargs)