359
294
raise NotImplementedError(self.get_symlink_target)
361
def get_canonical_inventory_paths(self, paths):
362
"""Like get_canonical_inventory_path() but works on multiple items.
364
:param paths: A sequence of paths relative to the root of the tree.
365
:return: A list of paths, with each item the corresponding input path
366
adjusted to account for existing elements that match case
369
return list(self._yield_canonical_inventory_paths(paths))
371
def get_canonical_inventory_path(self, path):
372
"""Returns the first inventory item that case-insensitively matches path.
374
If a path matches exactly, it is returned. If no path matches exactly
375
but more than one path matches case-insensitively, it is implementation
376
defined which is returned.
378
If no path matches case-insensitively, the input path is returned, but
379
with as many path entries that do exist changed to their canonical
382
If you need to resolve many names from the same tree, you should
383
use get_canonical_inventory_paths() to avoid O(N) behaviour.
385
:param path: A paths relative to the root of the tree.
386
:return: The input path adjusted to account for existing elements
387
that match case insensitively.
389
return self._yield_canonical_inventory_paths([path]).next()
391
def _yield_canonical_inventory_paths(self, paths):
393
# First, if the path as specified exists exactly, just use it.
394
if self.path2id(path) is not None:
398
cur_id = self.get_root_id()
400
bit_iter = iter(path.split("/"))
403
for child in self.iter_children(cur_id):
405
child_base = os.path.basename(self.id2path(child))
406
if child_base.lower() == lelt:
408
cur_path = osutils.pathjoin(cur_path, child_base)
411
# before a change is committed we can see this error...
414
# got to the end of this directory and no entries matched.
415
# Return what matched so far, plus the rest as specified.
416
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
421
296
def get_root_id(self):
422
297
"""Return the file_id for the root of this tree."""
423
298
raise NotImplementedError(self.get_root_id)
473
348
return vf.plan_lca_merge(last_revision_a, last_revision_b,
474
349
last_revision_base)
476
def _iter_parent_trees(self):
477
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
478
for revision_id in self.get_parent_ids():
351
def _get_file_revision(self, file_id, vf, tree_revision):
352
def file_revision(revision_tree):
353
revision_tree.lock_read()
480
yield self.revision_tree(revision_id)
481
except errors.NoSuchRevisionInTree:
482
yield self.repository.revision_tree(revision_id)
485
def _file_revision(revision_tree, file_id):
486
"""Determine the revision associated with a file in a given tree."""
487
revision_tree.lock_read()
489
return revision_tree.inventory[file_id].revision
491
revision_tree.unlock()
493
def _get_file_revision(self, file_id, vf, tree_revision):
494
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
496
if getattr(self, '_repository', None) is None:
355
return revision_tree.inventory[file_id].revision
357
revision_tree.unlock()
359
def iter_parent_trees():
360
for revision_id in self.get_parent_ids():
362
yield self.revision_tree(revision_id)
364
yield self.repository.revision_tree(revision_id)
366
if getattr(self, '_get_weave', None) is None:
497
367
last_revision = tree_revision
498
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
499
self._iter_parent_trees()]
500
vf.add_lines((file_id, last_revision), parent_keys,
368
parent_revisions = [file_revision(t) for t in iter_parent_trees()]
369
vf.add_lines(last_revision, parent_revisions,
501
370
self.get_file(file_id).readlines())
502
371
repo = self.branch.repository
372
transaction = repo.get_transaction()
373
base_vf = repo.weave_store.get_weave(file_id, transaction)
505
last_revision = self._file_revision(self, file_id)
506
base_vf = self._repository.texts
507
if base_vf not in vf.fallback_versionedfiles:
508
vf.fallback_versionedfiles.append(base_vf)
375
last_revision = file_revision(self)
376
base_vf = self._get_weave(file_id)
377
vf.fallback_versionedfiles.append(base_vf)
509
378
return last_revision
511
380
inventory = property(_get_inventory,
632
501
raise NotImplementedError(self.walkdirs)
634
def supports_content_filtering(self):
504
class EmptyTree(Tree):
507
self._inventory = Inventory(root_id=None)
508
symbol_versioning.warn('EmptyTree is deprecated as of bzr 0.9 please'
509
' use repository.revision_tree instead.',
510
DeprecationWarning, stacklevel=2)
512
def get_parent_ids(self):
515
def get_symlink_target(self, file_id):
518
def has_filename(self, filename):
637
def _content_filter_stack(self, path=None, file_id=None):
638
"""The stack of content filters for a path if filtering is supported.
640
Readers will be applied in first-to-last order.
641
Writers will be applied in last-to-first order.
642
Either the path or the file-id needs to be provided.
644
:param path: path relative to the root of the tree
646
:param file_id: file_id or None if unknown
647
:return: the list of filters - [] if there are none
649
filter_pref_names = filters._get_registered_names()
650
if len(filter_pref_names) == 0:
653
path = self.id2path(file_id)
654
prefs = self.iter_search_rules([path], filter_pref_names).next()
655
stk = filters._get_filter_stack_for(prefs)
656
if 'filters' in debug.debug_flags:
657
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
660
def _content_filter_stack_provider(self):
661
"""A function that returns a stack of ContentFilters.
663
The function takes a path (relative to the top of the tree) and a
664
file-id as parameters.
666
:return: None if content filtering is not supported by this tree.
668
if self.supports_content_filtering():
669
return lambda path, file_id: \
670
self._content_filter_stack(path, file_id)
674
def iter_search_rules(self, path_names, pref_names=None,
675
_default_searcher=None):
676
"""Find the preferences for filenames in a tree.
678
:param path_names: an iterable of paths to find attributes for.
679
Paths are given relative to the root of the tree.
680
:param pref_names: the list of preferences to lookup - None for all
681
:param _default_searcher: private parameter to assist testing - don't use
682
:return: an iterator of tuple sequences, one per path-name.
683
See _RulesSearcher.get_items for details on the tuple sequence.
685
if _default_searcher is None:
686
_default_searcher = rules._per_user_searcher
687
searcher = self._get_rules_searcher(_default_searcher)
688
if searcher is not None:
689
if pref_names is not None:
690
for path in path_names:
691
yield searcher.get_selected_items(path, pref_names)
693
for path in path_names:
694
yield searcher.get_items(path)
697
def _get_rules_searcher(self, default_searcher):
698
"""Get the RulesSearcher for this tree given the default one."""
699
searcher = default_searcher
521
def kind(self, file_id):
522
assert self._inventory[file_id].kind == "directory"
525
def list_files(self, include_root=False):
528
def __contains__(self, file_id):
529
return (file_id in self._inventory)
531
def get_file_sha1(self, file_id, path=None, stat_value=None):
703
535
######################################################################
1056
891
self.source._comparison_data(from_entry, path)
1057
892
kind = (from_kind, None)
1058
893
executable = (from_executable, None)
1059
changed_content = from_kind is not None
894
changed_content = True
1060
895
# the parent's path is necessarily known at this point.
1061
896
yield(file_id, (path, to_path), changed_content, versioned, parent,
1062
897
name, kind, executable)
1065
class MultiWalker(object):
1066
"""Walk multiple trees simultaneously, getting combined results."""
1068
# Note: This could be written to not assume you can do out-of-order
1069
# lookups. Instead any nodes that don't match in all trees could be
1070
# marked as 'deferred', and then returned in the final cleanup loop.
1071
# For now, I think it is "nicer" to return things as close to the
1072
# "master_tree" order as we can.
1074
def __init__(self, master_tree, other_trees):
1075
"""Create a new MultiWalker.
1077
All trees being walked must implement "iter_entries_by_dir()", such
1078
that they yield (path, object) tuples, where that object will have a
1079
'.file_id' member, that can be used to check equality.
1081
:param master_tree: All trees will be 'slaved' to the master_tree such
1082
that nodes in master_tree will be used as 'first-pass' sync points.
1083
Any nodes that aren't in master_tree will be merged in a second
1085
:param other_trees: A list of other trees to walk simultaneously.
1087
self._master_tree = master_tree
1088
self._other_trees = other_trees
1090
# Keep track of any nodes that were properly processed just out of
1091
# order, that way we don't return them at the end, we don't have to
1092
# track *all* processed file_ids, just the out-of-order ones
1093
self._out_of_order_processed = set()
1096
def _step_one(iterator):
1097
"""Step an iter_entries_by_dir iterator.
1099
:return: (has_more, path, ie)
1100
If has_more is False, path and ie will be None.
1103
path, ie = iterator.next()
1104
except StopIteration:
1105
return False, None, None
1107
return True, path, ie
1110
def _cmp_path_by_dirblock(path1, path2):
1111
"""Compare two paths based on what directory they are in.
1113
This generates a sort order, such that all children of a directory are
1114
sorted together, and grandchildren are in the same order as the
1115
children appear. But all grandchildren come after all children.
1117
:param path1: first path
1118
:param path2: the second path
1119
:return: negative number if ``path1`` comes first,
1120
0 if paths are equal
1121
and a positive number if ``path2`` sorts first
1123
# Shortcut this special case
1126
# This is stolen from _dirstate_helpers_py.py, only switching it to
1127
# Unicode objects. Consider using encode_utf8() and then using the
1128
# optimized versions, or maybe writing optimized unicode versions.
1129
if not isinstance(path1, unicode):
1130
raise TypeError("'path1' must be a unicode string, not %s: %r"
1131
% (type(path1), path1))
1132
if not isinstance(path2, unicode):
1133
raise TypeError("'path2' must be a unicode string, not %s: %r"
1134
% (type(path2), path2))
1135
return cmp(MultiWalker._path_to_key(path1),
1136
MultiWalker._path_to_key(path2))
1139
def _path_to_key(path):
1140
dirname, basename = osutils.split(path)
1141
return (dirname.split(u'/'), basename)
1143
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1144
"""Lookup an inventory entry by file_id.
1146
This is called when an entry is missing in the normal order.
1147
Generally this is because a file was either renamed, or it was
1148
deleted/added. If the entry was found in the inventory and not in
1149
extra_entries, it will be added to self._out_of_order_processed
1151
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1152
should be filled with entries that were found before they were
1153
used. If file_id is present, it will be removed from the
1155
:param other_tree: The Tree to search, in case we didn't find the entry
1157
:param file_id: The file_id to look for
1158
:return: (path, ie) if found or (None, None) if not present.
1160
if file_id in extra_entries:
1161
return extra_entries.pop(file_id)
1162
# TODO: Is id2path better as the first call, or is
1163
# inventory[file_id] better as a first check?
1165
cur_path = other_tree.id2path(file_id)
1166
except errors.NoSuchId:
1168
if cur_path is None:
1171
self._out_of_order_processed.add(file_id)
1172
cur_ie = other_tree.inventory[file_id]
1173
return (cur_path, cur_ie)
1176
"""Match up the values in the different trees."""
1177
for result in self._walk_master_tree():
1179
self._finish_others()
1180
for result in self._walk_others():
1183
def _walk_master_tree(self):
1184
"""First pass, walk all trees in lock-step.
1186
When we are done, all nodes in the master_tree will have been
1187
processed. _other_walkers, _other_entries, and _others_extra will be
1188
set on 'self' for future processing.
1190
# This iterator has the most "inlining" done, because it tends to touch
1191
# every file in the tree, while the others only hit nodes that don't
1193
master_iterator = self._master_tree.iter_entries_by_dir()
1195
other_walkers = [other.iter_entries_by_dir()
1196
for other in self._other_trees]
1197
other_entries = [self._step_one(walker) for walker in other_walkers]
1198
# Track extra nodes in the other trees
1199
others_extra = [{} for i in xrange(len(self._other_trees))]
1201
master_has_more = True
1202
step_one = self._step_one
1203
lookup_by_file_id = self._lookup_by_file_id
1204
out_of_order_processed = self._out_of_order_processed
1206
while master_has_more:
1207
(master_has_more, path, master_ie) = step_one(master_iterator)
1208
if not master_has_more:
1211
file_id = master_ie.file_id
1213
other_values_append = other_values.append
1214
next_other_entries = []
1215
next_other_entries_append = next_other_entries.append
1216
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1217
if not other_has_more:
1218
other_values_append(lookup_by_file_id(
1219
others_extra[idx], self._other_trees[idx], file_id))
1220
next_other_entries_append((False, None, None))
1221
elif file_id == other_ie.file_id:
1222
# This is the critical code path, as most of the entries
1223
# should match between most trees.
1224
other_values_append((other_path, other_ie))
1225
next_other_entries_append(step_one(other_walkers[idx]))
1227
# This walker did not match, step it until it either
1228
# matches, or we know we are past the current walker.
1229
other_walker = other_walkers[idx]
1230
other_extra = others_extra[idx]
1231
while (other_has_more and
1232
self._cmp_path_by_dirblock(other_path, path) < 0):
1233
other_file_id = other_ie.file_id
1234
if other_file_id not in out_of_order_processed:
1235
other_extra[other_file_id] = (other_path, other_ie)
1236
other_has_more, other_path, other_ie = \
1237
step_one(other_walker)
1238
if other_has_more and other_ie.file_id == file_id:
1239
# We ended up walking to this point, match and step
1241
other_values_append((other_path, other_ie))
1242
other_has_more, other_path, other_ie = \
1243
step_one(other_walker)
1245
# This record isn't in the normal order, see if it
1247
other_values_append(lookup_by_file_id(
1248
other_extra, self._other_trees[idx], file_id))
1249
next_other_entries_append((other_has_more, other_path,
1251
other_entries = next_other_entries
1253
# We've matched all the walkers, yield this datapoint
1254
yield path, file_id, master_ie, other_values
1255
self._other_walkers = other_walkers
1256
self._other_entries = other_entries
1257
self._others_extra = others_extra
1259
def _finish_others(self):
1260
"""Finish walking the other iterators, so we get all entries."""
1261
for idx, info in enumerate(self._other_entries):
1262
other_extra = self._others_extra[idx]
1263
(other_has_more, other_path, other_ie) = info
1264
while other_has_more:
1265
other_file_id = other_ie.file_id
1266
if other_file_id not in self._out_of_order_processed:
1267
other_extra[other_file_id] = (other_path, other_ie)
1268
other_has_more, other_path, other_ie = \
1269
self._step_one(self._other_walkers[idx])
1270
del self._other_entries
1272
def _walk_others(self):
1273
"""Finish up by walking all the 'deferred' nodes."""
1274
# TODO: One alternative would be to grab all possible unprocessed
1275
# file_ids, and then sort by path, and then yield them. That
1276
# might ensure better ordering, in case a caller strictly
1277
# requires parents before children.
1278
for idx, other_extra in enumerate(self._others_extra):
1279
others = sorted(other_extra.itervalues(),
1280
key=lambda x: self._path_to_key(x[0]))
1281
for other_path, other_ie in others:
1282
file_id = other_ie.file_id
1283
# We don't need to check out_of_order_processed here, because
1284
# the lookup_by_file_id will be removing anything processed
1285
# from the extras cache
1286
other_extra.pop(file_id)
1287
other_values = [(None, None) for i in xrange(idx)]
1288
other_values.append((other_path, other_ie))
1289
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1290
alt_idx = alt_idx + idx + 1
1291
alt_extra = self._others_extra[alt_idx]
1292
alt_tree = self._other_trees[alt_idx]
1293
other_values.append(self._lookup_by_file_id(
1294
alt_extra, alt_tree, file_id))
1295
yield other_path, file_id, None, other_values
900
# This was deprecated before 0.12, but did not have an official warning
901
@symbol_versioning.deprecated_function(symbol_versioning.zero_twelve)
902
def RevisionTree(*args, **kwargs):
903
"""RevisionTree has moved to bzrlib.revisiontree.RevisionTree()
905
Accessing it as bzrlib.tree.RevisionTree has been deprecated as of
908
from bzrlib.revisiontree import RevisionTree as _RevisionTree
909
return _RevisionTree(*args, **kwargs)