95
91
want_unversioned=want_unversioned,
98
def iter_changes(self, from_tree, include_unchanged=False,
94
def _iter_changes(self, from_tree, include_unchanged=False,
99
95
specific_files=None, pb=None, extra_trees=None,
100
96
require_versioned=True, want_unversioned=False):
101
97
intertree = InterTree.get(from_tree, self)
102
return intertree.iter_changes(include_unchanged, specific_files, pb,
98
return intertree._iter_changes(include_unchanged, specific_files, pb,
103
99
extra_trees, require_versioned, want_unversioned=want_unversioned)
105
101
def conflicts(self):
106
102
"""Get a list of the conflicts in the tree.
108
104
Each conflict is an instance of bzrlib.conflicts.Conflict.
110
return _mod_conflicts.ConflictList()
112
108
def extras(self):
113
109
"""For trees that can have unversioned files, return all such paths."""
116
112
def get_parent_ids(self):
117
"""Get the parent ids for this tree.
113
"""Get the parent ids for this tree.
119
115
:return: a list of parent ids. [] is returned to indicate
120
116
a tree with no parents.
121
117
:raises: BzrError if the parents are not known.
123
119
raise NotImplementedError(self.get_parent_ids)
125
121
def has_filename(self, filename):
126
122
"""True if the tree has given filename."""
127
raise NotImplementedError(self.has_filename)
123
raise NotImplementedError()
129
125
def has_id(self, file_id):
126
file_id = osutils.safe_file_id(file_id)
130
127
return self.inventory.has_id(file_id)
132
def __contains__(self, file_id):
133
return self.has_id(file_id)
129
__contains__ = has_id
135
131
def has_or_had_id(self, file_id):
132
file_id = osutils.safe_file_id(file_id)
133
if file_id == self.inventory.root.file_id:
136
135
return self.inventory.has_id(file_id)
138
137
def is_ignored(self, filename):
173
169
def iter_entries_by_dir(self, specific_file_ids=None):
174
170
"""Walk the tree in 'by_dir' order.
176
This will yield each entry in the tree as a (path, entry) tuple.
177
The order that they are yielded is:
179
Directories are walked in a depth-first lexicographical order,
180
however, whenever a directory is reached, all of its direct child
181
nodes are yielded in lexicographical order before yielding the
184
For example, in the tree::
194
The yield order (ignoring root) would be::
195
a, f, a/b, a/d, a/b/c, a/d/e, f/g
172
This will yield each entry in the tree as a (path, entry) tuple. The
173
order that they are yielded is: the contents of a directory are
174
preceeded by the parent of a directory, and all the contents of a
175
directory are grouped together.
197
177
return self.inventory.iter_entries_by_dir(
198
178
specific_file_ids=specific_file_ids)
200
180
def iter_references(self):
201
if self.supports_tree_reference():
202
for path, entry in self.iter_entries_by_dir():
203
if entry.kind == 'tree-reference':
204
yield path, entry.file_id
181
for path, entry in self.iter_entries_by_dir():
182
if entry.kind == 'tree-reference':
183
yield path, entry.file_id
206
185
def kind(self, file_id):
207
186
raise NotImplementedError("Tree subclass %s must implement kind"
208
187
% self.__class__.__name__)
210
def stored_kind(self, file_id):
211
"""File kind stored for this file_id.
213
May not match kind on disk for working trees. Always available
214
for versioned files, even when the file itself is missing.
216
return self.kind(file_id)
218
def path_content_summary(self, path):
219
"""Get a summary of the information about path.
221
:param path: A relative path within the tree.
222
:return: A tuple containing kind, size, exec, sha1-or-link.
223
Kind is always present (see tree.kind()).
224
size is present if kind is file, None otherwise.
225
exec is None unless kind is file and the platform supports the 'x'
227
sha1-or-link is the link target if kind is symlink, or the sha1 if
228
it can be obtained without reading the file.
230
raise NotImplementedError(self.path_content_summary)
232
189
def get_reference_revision(self, file_id, path=None):
233
190
raise NotImplementedError("Tree subclass %s must implement "
234
191
"get_reference_revision"
251
208
def _get_inventory(self):
252
209
return self._inventory
254
def get_file(self, file_id, path=None):
255
"""Return a file object for the file file_id in the tree.
257
If both file_id and path are defined, it is implementation defined as
258
to which one is used.
211
def get_file(self, file_id):
212
"""Return a file object for the file file_id in the tree."""
260
213
raise NotImplementedError(self.get_file)
262
def get_file_with_stat(self, file_id, path=None):
263
"""Get a file handle and stat object for file_id.
265
The default implementation returns (self.get_file, None) for backwards
268
:param file_id: The file id to read.
269
:param path: The path of the file, if it is known.
270
:return: A tuple (file_handle, stat_value_or_None). If the tree has
271
no stat facility, or need for a stat cache feedback during commit,
272
it may return None for the second element of the tuple.
274
return (self.get_file(file_id, path), None)
276
def get_file_text(self, file_id, path=None):
277
"""Return the byte content of a file.
279
:param file_id: The file_id of the file.
280
:param path: The path of the file.
281
If both file_id and path are supplied, an implementation may use
284
my_file = self.get_file(file_id, path)
286
return my_file.read()
290
def get_file_lines(self, file_id, path=None):
291
"""Return the content of a file, as lines.
293
:param file_id: The file_id of the file.
294
:param path: The path of the file.
295
If both file_id and path are supplied, an implementation may use
298
return osutils.split_lines(self.get_file_text(file_id, path))
300
215
def get_file_mtime(self, file_id, path=None):
301
216
"""Return the modification time for a file.
355
235
raise NotImplementedError(self.get_symlink_target)
357
def get_canonical_inventory_paths(self, paths):
358
"""Like get_canonical_inventory_path() but works on multiple items.
360
:param paths: A sequence of paths relative to the root of the tree.
361
:return: A list of paths, with each item the corresponding input path
362
adjusted to account for existing elements that match case
365
return list(self._yield_canonical_inventory_paths(paths))
367
def get_canonical_inventory_path(self, path):
368
"""Returns the first inventory item that case-insensitively matches path.
370
If a path matches exactly, it is returned. If no path matches exactly
371
but more than one path matches case-insensitively, it is implementation
372
defined which is returned.
374
If no path matches case-insensitively, the input path is returned, but
375
with as many path entries that do exist changed to their canonical
378
If you need to resolve many names from the same tree, you should
379
use get_canonical_inventory_paths() to avoid O(N) behaviour.
381
:param path: A paths relative to the root of the tree.
382
:return: The input path adjusted to account for existing elements
383
that match case insensitively.
385
return self._yield_canonical_inventory_paths([path]).next()
387
def _yield_canonical_inventory_paths(self, paths):
389
# First, if the path as specified exists exactly, just use it.
390
if self.path2id(path) is not None:
394
cur_id = self.get_root_id()
396
bit_iter = iter(path.split("/"))
399
for child in self.iter_children(cur_id):
401
child_base = os.path.basename(self.id2path(child))
402
if child_base.lower() == lelt:
404
cur_path = osutils.pathjoin(cur_path, child_base)
407
# before a change is committed we can see this error...
410
# got to the end of this directory and no entries matched.
411
# Return what matched so far, plus the rest as specified.
412
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
417
def get_root_id(self):
418
"""Return the file_id for the root of this tree."""
419
raise NotImplementedError(self.get_root_id)
421
def annotate_iter(self, file_id,
422
default_revision=_mod_revision.CURRENT_REVISION):
423
"""Return an iterator of revision_id, line tuples.
237
def annotate_iter(self, file_id):
238
"""Return an iterator of revision_id, line tuples
425
240
For working trees (and mutable trees in general), the special
426
241
revision_id 'current:' will be used for lines that are new in this
427
242
tree, e.g. uncommitted changes.
428
243
:param file_id: The file to produce an annotated version from
429
:param default_revision: For lines that don't match a basis, mark them
430
with this revision id. Not all implementations will make use of
433
245
raise NotImplementedError(self.annotate_iter)
435
def _get_plan_merge_data(self, file_id, other, base):
436
from bzrlib import versionedfile
437
vf = versionedfile._PlanMergeVersionedFile(file_id)
438
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
439
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
441
last_revision_base = None
443
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
444
return vf, last_revision_a, last_revision_b, last_revision_base
446
def plan_file_merge(self, file_id, other, base=None):
447
"""Generate a merge plan based on annotations.
449
If the file contains uncommitted changes in this tree, they will be
450
attributed to the 'current:' pseudo-revision. If the file contains
451
uncommitted changes in the other tree, they will be assigned to the
452
'other:' pseudo-revision.
454
data = self._get_plan_merge_data(file_id, other, base)
455
vf, last_revision_a, last_revision_b, last_revision_base = data
456
return vf.plan_merge(last_revision_a, last_revision_b,
459
def plan_file_lca_merge(self, file_id, other, base=None):
460
"""Generate a merge plan based lca-newness.
462
If the file contains uncommitted changes in this tree, they will be
463
attributed to the 'current:' pseudo-revision. If the file contains
464
uncommitted changes in the other tree, they will be assigned to the
465
'other:' pseudo-revision.
467
data = self._get_plan_merge_data(file_id, other, base)
468
vf, last_revision_a, last_revision_b, last_revision_base = data
469
return vf.plan_lca_merge(last_revision_a, last_revision_b,
472
def _iter_parent_trees(self):
473
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
474
for revision_id in self.get_parent_ids():
476
yield self.revision_tree(revision_id)
477
except errors.NoSuchRevisionInTree:
478
yield self.repository.revision_tree(revision_id)
481
def _file_revision(revision_tree, file_id):
482
"""Determine the revision associated with a file in a given tree."""
483
revision_tree.lock_read()
485
return revision_tree.inventory[file_id].revision
487
revision_tree.unlock()
489
def _get_file_revision(self, file_id, vf, tree_revision):
490
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
492
if getattr(self, '_repository', None) is None:
493
last_revision = tree_revision
494
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
495
self._iter_parent_trees()]
496
vf.add_lines((file_id, last_revision), parent_keys,
497
self.get_file(file_id).readlines())
498
repo = self.branch.repository
501
last_revision = self._file_revision(self, file_id)
502
base_vf = self._repository.texts
503
if base_vf not in vf.fallback_versionedfiles:
504
vf.fallback_versionedfiles.append(base_vf)
507
247
inventory = property(_get_inventory,
508
248
doc="Inventory of this Tree")
510
250
def _check_retrieved(self, ie, f):
511
251
if not __debug__:
513
253
fp = fingerprint_file(f)
516
256
if ie.text_size is not None:
517
257
if ie.text_size != fp['size']:
518
258
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
628
369
raise NotImplementedError(self.walkdirs)
630
def supports_content_filtering(self):
372
class EmptyTree(Tree):
375
self._inventory = Inventory(root_id=None)
376
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
377
' use repository.revision_tree instead.',
378
DeprecationWarning, stacklevel=2)
380
def get_parent_ids(self):
383
def get_symlink_target(self, file_id):
386
def has_filename(self, filename):
633
def _content_filter_stack(self, path=None, file_id=None):
634
"""The stack of content filters for a path if filtering is supported.
636
Readers will be applied in first-to-last order.
637
Writers will be applied in last-to-first order.
638
Either the path or the file-id needs to be provided.
640
:param path: path relative to the root of the tree
642
:param file_id: file_id or None if unknown
643
:return: the list of filters - [] if there are none
645
filter_pref_names = filters._get_registered_names()
646
if len(filter_pref_names) == 0:
649
path = self.id2path(file_id)
650
prefs = self.iter_search_rules([path], filter_pref_names).next()
651
stk = filters._get_filter_stack_for(prefs)
652
if 'filters' in debug.debug_flags:
653
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
656
def _content_filter_stack_provider(self):
657
"""A function that returns a stack of ContentFilters.
659
The function takes a path (relative to the top of the tree) and a
660
file-id as parameters.
662
:return: None if content filtering is not supported by this tree.
664
if self.supports_content_filtering():
665
return lambda path, file_id: \
666
self._content_filter_stack(path, file_id)
670
def iter_search_rules(self, path_names, pref_names=None,
671
_default_searcher=None):
672
"""Find the preferences for filenames in a tree.
674
:param path_names: an iterable of paths to find attributes for.
675
Paths are given relative to the root of the tree.
676
:param pref_names: the list of preferences to lookup - None for all
677
:param _default_searcher: private parameter to assist testing - don't use
678
:return: an iterator of tuple sequences, one per path-name.
679
See _RulesSearcher.get_items for details on the tuple sequence.
681
if _default_searcher is None:
682
_default_searcher = rules._per_user_searcher
683
searcher = self._get_rules_searcher(_default_searcher)
684
if searcher is not None:
685
if pref_names is not None:
686
for path in path_names:
687
yield searcher.get_selected_items(path, pref_names)
689
for path in path_names:
690
yield searcher.get_items(path)
693
def _get_rules_searcher(self, default_searcher):
694
"""Get the RulesSearcher for this tree given the default one."""
695
searcher = default_searcher
389
def kind(self, file_id):
390
file_id = osutils.safe_file_id(file_id)
391
assert self._inventory[file_id].kind == "directory"
394
def list_files(self, include_root=False):
397
def __contains__(self, file_id):
398
file_id = osutils.safe_file_id(file_id)
399
return (file_id in self._inventory)
401
def get_file_sha1(self, file_id, path=None, stat_value=None):
699
405
######################################################################
1052
756
self.source._comparison_data(from_entry, path)
1053
757
kind = (from_kind, None)
1054
758
executable = (from_executable, None)
1055
changed_content = from_kind is not None
759
changed_content = True
1056
760
# the parent's path is necessarily known at this point.
1057
761
yield(file_id, (path, to_path), changed_content, versioned, parent,
1058
762
name, kind, executable)
1061
class MultiWalker(object):
1062
"""Walk multiple trees simultaneously, getting combined results."""
1064
# Note: This could be written to not assume you can do out-of-order
1065
# lookups. Instead any nodes that don't match in all trees could be
1066
# marked as 'deferred', and then returned in the final cleanup loop.
1067
# For now, I think it is "nicer" to return things as close to the
1068
# "master_tree" order as we can.
1070
def __init__(self, master_tree, other_trees):
1071
"""Create a new MultiWalker.
1073
All trees being walked must implement "iter_entries_by_dir()", such
1074
that they yield (path, object) tuples, where that object will have a
1075
'.file_id' member, that can be used to check equality.
1077
:param master_tree: All trees will be 'slaved' to the master_tree such
1078
that nodes in master_tree will be used as 'first-pass' sync points.
1079
Any nodes that aren't in master_tree will be merged in a second
1081
:param other_trees: A list of other trees to walk simultaneously.
1083
self._master_tree = master_tree
1084
self._other_trees = other_trees
1086
# Keep track of any nodes that were properly processed just out of
1087
# order, that way we don't return them at the end, we don't have to
1088
# track *all* processed file_ids, just the out-of-order ones
1089
self._out_of_order_processed = set()
1092
def _step_one(iterator):
1093
"""Step an iter_entries_by_dir iterator.
1095
:return: (has_more, path, ie)
1096
If has_more is False, path and ie will be None.
1099
path, ie = iterator.next()
1100
except StopIteration:
1101
return False, None, None
1103
return True, path, ie
1106
def _cmp_path_by_dirblock(path1, path2):
1107
"""Compare two paths based on what directory they are in.
1109
This generates a sort order, such that all children of a directory are
1110
sorted together, and grandchildren are in the same order as the
1111
children appear. But all grandchildren come after all children.
1113
:param path1: first path
1114
:param path2: the second path
1115
:return: negative number if ``path1`` comes first,
1116
0 if paths are equal
1117
and a positive number if ``path2`` sorts first
1119
# Shortcut this special case
1122
# This is stolen from _dirstate_helpers_py.py, only switching it to
1123
# Unicode objects. Consider using encode_utf8() and then using the
1124
# optimized versions, or maybe writing optimized unicode versions.
1125
if not isinstance(path1, unicode):
1126
raise TypeError("'path1' must be a unicode string, not %s: %r"
1127
% (type(path1), path1))
1128
if not isinstance(path2, unicode):
1129
raise TypeError("'path2' must be a unicode string, not %s: %r"
1130
% (type(path2), path2))
1131
return cmp(MultiWalker._path_to_key(path1),
1132
MultiWalker._path_to_key(path2))
1135
def _path_to_key(path):
1136
dirname, basename = osutils.split(path)
1137
return (dirname.split(u'/'), basename)
1139
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1140
"""Lookup an inventory entry by file_id.
1142
This is called when an entry is missing in the normal order.
1143
Generally this is because a file was either renamed, or it was
1144
deleted/added. If the entry was found in the inventory and not in
1145
extra_entries, it will be added to self._out_of_order_processed
1147
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1148
should be filled with entries that were found before they were
1149
used. If file_id is present, it will be removed from the
1151
:param other_tree: The Tree to search, in case we didn't find the entry
1153
:param file_id: The file_id to look for
1154
:return: (path, ie) if found or (None, None) if not present.
1156
if file_id in extra_entries:
1157
return extra_entries.pop(file_id)
1158
# TODO: Is id2path better as the first call, or is
1159
# inventory[file_id] better as a first check?
1161
cur_path = other_tree.id2path(file_id)
1162
except errors.NoSuchId:
1164
if cur_path is None:
1167
self._out_of_order_processed.add(file_id)
1168
cur_ie = other_tree.inventory[file_id]
1169
return (cur_path, cur_ie)
1172
"""Match up the values in the different trees."""
1173
for result in self._walk_master_tree():
1175
self._finish_others()
1176
for result in self._walk_others():
1179
def _walk_master_tree(self):
1180
"""First pass, walk all trees in lock-step.
1182
When we are done, all nodes in the master_tree will have been
1183
processed. _other_walkers, _other_entries, and _others_extra will be
1184
set on 'self' for future processing.
1186
# This iterator has the most "inlining" done, because it tends to touch
1187
# every file in the tree, while the others only hit nodes that don't
1189
master_iterator = self._master_tree.iter_entries_by_dir()
1191
other_walkers = [other.iter_entries_by_dir()
1192
for other in self._other_trees]
1193
other_entries = [self._step_one(walker) for walker in other_walkers]
1194
# Track extra nodes in the other trees
1195
others_extra = [{} for i in xrange(len(self._other_trees))]
1197
master_has_more = True
1198
step_one = self._step_one
1199
lookup_by_file_id = self._lookup_by_file_id
1200
out_of_order_processed = self._out_of_order_processed
1202
while master_has_more:
1203
(master_has_more, path, master_ie) = step_one(master_iterator)
1204
if not master_has_more:
1207
file_id = master_ie.file_id
1209
other_values_append = other_values.append
1210
next_other_entries = []
1211
next_other_entries_append = next_other_entries.append
1212
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1213
if not other_has_more:
1214
other_values_append(lookup_by_file_id(
1215
others_extra[idx], self._other_trees[idx], file_id))
1216
next_other_entries_append((False, None, None))
1217
elif file_id == other_ie.file_id:
1218
# This is the critical code path, as most of the entries
1219
# should match between most trees.
1220
other_values_append((other_path, other_ie))
1221
next_other_entries_append(step_one(other_walkers[idx]))
1223
# This walker did not match, step it until it either
1224
# matches, or we know we are past the current walker.
1225
other_walker = other_walkers[idx]
1226
other_extra = others_extra[idx]
1227
while (other_has_more and
1228
self._cmp_path_by_dirblock(other_path, path) < 0):
1229
other_file_id = other_ie.file_id
1230
if other_file_id not in out_of_order_processed:
1231
other_extra[other_file_id] = (other_path, other_ie)
1232
other_has_more, other_path, other_ie = \
1233
step_one(other_walker)
1234
if other_has_more and other_ie.file_id == file_id:
1235
# We ended up walking to this point, match and step
1237
other_values_append((other_path, other_ie))
1238
other_has_more, other_path, other_ie = \
1239
step_one(other_walker)
1241
# This record isn't in the normal order, see if it
1243
other_values_append(lookup_by_file_id(
1244
other_extra, self._other_trees[idx], file_id))
1245
next_other_entries_append((other_has_more, other_path,
1247
other_entries = next_other_entries
1249
# We've matched all the walkers, yield this datapoint
1250
yield path, file_id, master_ie, other_values
1251
self._other_walkers = other_walkers
1252
self._other_entries = other_entries
1253
self._others_extra = others_extra
1255
def _finish_others(self):
1256
"""Finish walking the other iterators, so we get all entries."""
1257
for idx, info in enumerate(self._other_entries):
1258
other_extra = self._others_extra[idx]
1259
(other_has_more, other_path, other_ie) = info
1260
while other_has_more:
1261
other_file_id = other_ie.file_id
1262
if other_file_id not in self._out_of_order_processed:
1263
other_extra[other_file_id] = (other_path, other_ie)
1264
other_has_more, other_path, other_ie = \
1265
self._step_one(self._other_walkers[idx])
1266
del self._other_entries
1268
def _walk_others(self):
1269
"""Finish up by walking all the 'deferred' nodes."""
1270
# TODO: One alternative would be to grab all possible unprocessed
1271
# file_ids, and then sort by path, and then yield them. That
1272
# might ensure better ordering, in case a caller strictly
1273
# requires parents before children.
1274
for idx, other_extra in enumerate(self._others_extra):
1275
others = sorted(other_extra.itervalues(),
1276
key=lambda x: self._path_to_key(x[0]))
1277
for other_path, other_ie in others:
1278
file_id = other_ie.file_id
1279
# We don't need to check out_of_order_processed here, because
1280
# the lookup_by_file_id will be removing anything processed
1281
# from the extras cache
1282
other_extra.pop(file_id)
1283
other_values = [(None, None) for i in xrange(idx)]
1284
other_values.append((other_path, other_ie))
1285
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1286
alt_idx = alt_idx + idx + 1
1287
alt_extra = self._others_extra[alt_idx]
1288
alt_tree = self._other_trees[alt_idx]
1289
other_values.append(self._lookup_by_file_id(
1290
alt_extra, alt_tree, file_id))
1291
yield other_path, file_id, None, other_values
765
# This was deprecated before 0.12, but did not have an official warning
766
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
767
def RevisionTree(*args, **kwargs):
768
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
770
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
773
from bzrlib.revisiontree import RevisionTree as _RevisionTree
774
return _RevisionTree(*args, **kwargs)