50
62
Trees can be compared, etc, regardless of whether they are working
51
63
trees or versioned trees.
66
def changes_from(self, other, want_unchanged=False, specific_files=None,
67
extra_trees=None, require_versioned=False, include_root=False,
68
want_unversioned=False):
69
"""Return a TreeDelta of the changes from other to this tree.
71
:param other: A tree to compare with.
72
:param specific_files: An optional list of file paths to restrict the
73
comparison to. When mapping filenames to ids, all matches in all
74
trees (including optional extra_trees) are used, and all children of
75
matched directories are included.
76
:param want_unchanged: An optional boolean requesting the inclusion of
77
unchanged entries in the result.
78
:param extra_trees: An optional list of additional trees to use when
79
mapping the contents of specific_files (paths) to file_ids.
80
:param require_versioned: An optional boolean (defaults to False). When
81
supplied and True all the 'specific_files' must be versioned, or
82
a PathsNotVersionedError will be thrown.
83
:param want_unversioned: Scan for unversioned paths.
85
The comparison will be performed by an InterTree object looked up on
88
# Martin observes that Tree.changes_from returns a TreeDelta and this
89
# may confuse people, because the class name of the returned object is
90
# a synonym of the object referenced in the method name.
91
return InterTree.get(other, self).compare(
92
want_unchanged=want_unchanged,
93
specific_files=specific_files,
94
extra_trees=extra_trees,
95
require_versioned=require_versioned,
96
include_root=include_root,
97
want_unversioned=want_unversioned,
100
def iter_changes(self, from_tree, include_unchanged=False,
101
specific_files=None, pb=None, extra_trees=None,
102
require_versioned=True, want_unversioned=False):
103
intertree = InterTree.get(from_tree, self)
104
return intertree.iter_changes(include_unchanged, specific_files, pb,
105
extra_trees, require_versioned, want_unversioned=want_unversioned)
108
"""Get a list of the conflicts in the tree.
110
Each conflict is an instance of bzrlib.conflicts.Conflict.
112
return _mod_conflicts.ConflictList()
115
"""For trees that can have unversioned files, return all such paths."""
118
def get_parent_ids(self):
119
"""Get the parent ids for this tree.
121
:return: a list of parent ids. [] is returned to indicate
122
a tree with no parents.
123
:raises: BzrError if the parents are not known.
125
raise NotImplementedError(self.get_parent_ids)
54
127
def has_filename(self, filename):
55
128
"""True if the tree has given filename."""
56
raise NotImplementedError()
129
raise NotImplementedError(self.has_filename)
58
131
def has_id(self, file_id):
59
132
return self.inventory.has_id(file_id)
134
def __contains__(self, file_id):
135
return self.has_id(file_id)
137
def has_or_had_id(self, file_id):
138
if file_id == self.inventory.root.file_id:
140
return self.inventory.has_id(file_id)
142
def is_ignored(self, filename):
143
"""Check whether the filename is ignored by this tree.
145
:param filename: The relative filename within the tree.
146
:return: True if the filename is ignored.
63
150
def __iter__(self):
64
151
return iter(self.inventory)
153
def all_file_ids(self):
154
"""Iterate through all file ids, including ids for missing files."""
155
return set(self.inventory)
66
157
def id2path(self, file_id):
158
"""Return the path for a file id.
67
162
return self.inventory.id2path(file_id)
164
def is_control_filename(self, filename):
165
"""True if filename is the name of a control file in this tree.
167
:param filename: A filename within the tree. This is a relative path
168
from the root of this tree.
170
This is true IF and ONLY IF the filename is part of the meta data
171
that bzr controls in this tree. I.E. a random .bzr directory placed
172
on disk will not be a control file for this tree.
174
return self.bzrdir.is_control_filename(filename)
177
def iter_entries_by_dir(self, specific_file_ids=None):
178
"""Walk the tree in 'by_dir' order.
180
This will yield each entry in the tree as a (path, entry) tuple.
181
The order that they are yielded is:
183
Directories are walked in a depth-first lexicographical order,
184
however, whenever a directory is reached, all of its direct child
185
nodes are yielded in lexicographical order before yielding the
188
For example, in the tree::
198
The yield order (ignoring root) would be::
199
a, f, a/b, a/d, a/b/c, a/d/e, f/g
201
return self.inventory.iter_entries_by_dir(
202
specific_file_ids=specific_file_ids)
204
def iter_references(self):
205
if self.supports_tree_reference():
206
for path, entry in self.iter_entries_by_dir():
207
if entry.kind == 'tree-reference':
208
yield path, entry.file_id
210
def kind(self, file_id):
211
raise NotImplementedError("Tree subclass %s must implement kind"
212
% self.__class__.__name__)
214
def stored_kind(self, file_id):
215
"""File kind stored for this file_id.
217
May not match kind on disk for working trees. Always available
218
for versioned files, even when the file itself is missing.
220
return self.kind(file_id)
222
def path_content_summary(self, path):
223
"""Get a summary of the information about path.
225
:param path: A relative path within the tree.
226
:return: A tuple containing kind, size, exec, sha1-or-link.
227
Kind is always present (see tree.kind()).
228
size is present if kind is file, None otherwise.
229
exec is None unless kind is file and the platform supports the 'x'
231
sha1-or-link is the link target if kind is symlink, or the sha1 if
232
it can be obtained without reading the file.
234
raise NotImplementedError(self.path_content_summary)
236
def get_reference_revision(self, file_id, path=None):
237
raise NotImplementedError("Tree subclass %s must implement "
238
"get_reference_revision"
239
% self.__class__.__name__)
241
def _comparison_data(self, entry, path):
242
"""Return a tuple of kind, executable, stat_value for a file.
244
entry may be None if there is no inventory entry for the file, but
245
path must always be supplied.
247
kind is None if there is no file present (even if an inventory id is
248
present). executable is False for non-file entries.
250
raise NotImplementedError(self._comparison_data)
252
def _file_size(self, entry, stat_value):
253
raise NotImplementedError(self._file_size)
69
255
def _get_inventory(self):
70
256
return self._inventory
258
def get_file(self, file_id, path=None):
259
"""Return a file object for the file file_id in the tree.
261
If both file_id and path are defined, it is implementation defined as
262
to which one is used.
264
raise NotImplementedError(self.get_file)
266
def get_file_text(self, file_id, path=None):
267
"""Return the byte content of a file.
269
:param file_id: The file_id of the file.
270
:param path: The path of the file.
271
If both file_id and path are supplied, an implementation may use
274
my_file = self.get_file(file_id, path)
276
return my_file.read()
280
def get_file_lines(self, file_id, path=None):
281
"""Return the content of a file, as lines.
283
:param file_id: The file_id of the file.
284
:param path: The path of the file.
285
If both file_id and path are supplied, an implementation may use
288
return osutils.split_lines(self.get_file_text(file_id, path))
290
def get_file_mtime(self, file_id, path=None):
291
"""Return the modification time for a file.
293
:param file_id: The handle for this file.
294
:param path: The path that this file can be found at.
295
These must point to the same object.
297
raise NotImplementedError(self.get_file_mtime)
299
def get_file_size(self, file_id):
300
"""Return the size of a file in bytes.
302
This applies only to regular files. If invoked on directories or
303
symlinks, it will return None.
304
:param file_id: The file-id of the file
306
raise NotImplementedError(self.get_file_size)
72
308
def get_file_by_path(self, path):
73
return self.get_file(self._inventory.path2id(path))
309
return self.get_file(self._inventory.path2id(path), path)
311
def iter_files_bytes(self, desired_files):
312
"""Iterate through file contents.
314
Files will not necessarily be returned in the order they occur in
315
desired_files. No specific order is guaranteed.
317
Yields pairs of identifier, bytes_iterator. identifier is an opaque
318
value supplied by the caller as part of desired_files. It should
319
uniquely identify the file version in the caller's context. (Examples:
320
an index number or a TreeTransform trans_id.)
322
bytes_iterator is an iterable of bytestrings for the file. The
323
kind of iterable and length of the bytestrings are unspecified, but for
324
this implementation, it is a tuple containing a single bytestring with
325
the complete text of the file.
327
:param desired_files: a list of (file_id, identifier) pairs
329
for file_id, identifier in desired_files:
330
# We wrap the string in a tuple so that we can return an iterable
331
# of bytestrings. (Technically, a bytestring is also an iterable
332
# of bytestrings, but iterating through each character is not
334
cur_file = (self.get_file_text(file_id),)
335
yield identifier, cur_file
337
def get_symlink_target(self, file_id):
338
"""Get the target for a given file_id.
340
It is assumed that the caller already knows that file_id is referencing
342
:param file_id: Handle for the symlink entry.
343
:return: The path the symlink points to.
345
raise NotImplementedError(self.get_symlink_target)
347
def get_canonical_inventory_paths(self, paths):
348
"""Like get_canonical_inventory_path() but works on multiple items.
350
:param paths: A sequence of paths relative to the root of the tree.
351
:return: A list of paths, with each item the corresponding input path
352
adjusted to account for existing elements that match case
355
return list(self._yield_canonical_inventory_paths(paths))
357
def get_canonical_inventory_path(self, path):
358
"""Returns the first inventory item that case-insensitively matches path.
360
If a path matches exactly, it is returned. If no path matches exactly
361
but more than one path matches case-insensitively, it is implementation
362
defined which is returned.
364
If no path matches case-insensitively, the input path is returned, but
365
with as many path entries that do exist changed to their canonical
368
If you need to resolve many names from the same tree, you should
369
use get_canonical_inventory_paths() to avoid O(N) behaviour.
371
:param path: A paths relative to the root of the tree.
372
:return: The input path adjusted to account for existing elements
373
that match case insensitively.
375
return self._yield_canonical_inventory_paths([path]).next()
377
def _yield_canonical_inventory_paths(self, paths):
379
# First, if the path as specified exists exactly, just use it.
380
if self.path2id(path) is not None:
384
cur_id = self.get_root_id()
386
bit_iter = iter(path.split("/"))
389
for child in self.iter_children(cur_id):
391
child_base = os.path.basename(self.id2path(child))
392
if child_base.lower() == lelt:
394
cur_path = osutils.pathjoin(cur_path, child_base)
397
# before a change is committed we can see this error...
400
# got to the end of this directory and no entries matched.
401
# Return what matched so far, plus the rest as specified.
402
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
407
def get_root_id(self):
408
"""Return the file_id for the root of this tree."""
409
raise NotImplementedError(self.get_root_id)
411
def annotate_iter(self, file_id,
412
default_revision=_mod_revision.CURRENT_REVISION):
413
"""Return an iterator of revision_id, line tuples.
415
For working trees (and mutable trees in general), the special
416
revision_id 'current:' will be used for lines that are new in this
417
tree, e.g. uncommitted changes.
418
:param file_id: The file to produce an annotated version from
419
:param default_revision: For lines that don't match a basis, mark them
420
with this revision id. Not all implementations will make use of
423
raise NotImplementedError(self.annotate_iter)
425
def _get_plan_merge_data(self, file_id, other, base):
426
from bzrlib import merge, versionedfile
427
vf = versionedfile._PlanMergeVersionedFile(file_id)
428
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
429
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
431
last_revision_base = None
433
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
434
return vf, last_revision_a, last_revision_b, last_revision_base
436
def plan_file_merge(self, file_id, other, base=None):
437
"""Generate a merge plan based on annotations.
439
If the file contains uncommitted changes in this tree, they will be
440
attributed to the 'current:' pseudo-revision. If the file contains
441
uncommitted changes in the other tree, they will be assigned to the
442
'other:' pseudo-revision.
444
data = self._get_plan_merge_data(file_id, other, base)
445
vf, last_revision_a, last_revision_b, last_revision_base = data
446
return vf.plan_merge(last_revision_a, last_revision_b,
449
def plan_file_lca_merge(self, file_id, other, base=None):
450
"""Generate a merge plan based lca-newness.
452
If the file contains uncommitted changes in this tree, they will be
453
attributed to the 'current:' pseudo-revision. If the file contains
454
uncommitted changes in the other tree, they will be assigned to the
455
'other:' pseudo-revision.
457
data = self._get_plan_merge_data(file_id, other, base)
458
vf, last_revision_a, last_revision_b, last_revision_base = data
459
return vf.plan_lca_merge(last_revision_a, last_revision_b,
462
def _iter_parent_trees(self):
463
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
464
for revision_id in self.get_parent_ids():
466
yield self.revision_tree(revision_id)
467
except errors.NoSuchRevisionInTree:
468
yield self.repository.revision_tree(revision_id)
471
def _file_revision(revision_tree, file_id):
472
"""Determine the revision associated with a file in a given tree."""
473
revision_tree.lock_read()
475
return revision_tree.inventory[file_id].revision
477
revision_tree.unlock()
479
def _get_file_revision(self, file_id, vf, tree_revision):
480
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
482
if getattr(self, '_repository', None) is None:
483
last_revision = tree_revision
484
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
485
self._iter_parent_trees()]
486
vf.add_lines((file_id, last_revision), parent_keys,
487
self.get_file(file_id).readlines())
488
repo = self.branch.repository
491
last_revision = self._file_revision(self, file_id)
492
base_vf = self._repository.texts
493
if base_vf not in vf.fallback_versionedfiles:
494
vf.fallback_versionedfiles.append(base_vf)
75
497
inventory = property(_get_inventory,
76
498
doc="Inventory of this Tree")
78
500
def _check_retrieved(self, ie, f):
79
503
fp = fingerprint_file(f)
82
if ie.text_size != None:
506
if ie.text_size is not None:
83
507
if ie.text_size != fp['size']:
84
508
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
85
509
["inventory expects %d bytes" % ie.text_size,
92
516
"file is actually %s" % fp['sha1'],
93
517
"store is probably damaged/corrupt"])
96
def print_file(self, file_id):
97
"""Print file with id `file_id` to stdout."""
99
sys.stdout.write(self.get_file_text(file_id))
102
def export(self, dest, format='dir', root=None):
103
"""Export this tree."""
105
exporter = exporters[format]
107
from bzrlib.errors import BzrCommandError
108
raise BzrCommandError("export format %r not supported" % format)
109
exporter(self, dest, root)
113
class RevisionTree(Tree):
114
"""Tree viewing a previous revision.
116
File text can be retrieved from the text store.
118
TODO: Some kind of `__repr__` method, but a good one
119
probably means knowing the branch and revision number,
120
or at least passing a description to the constructor.
123
def __init__(self, weave_store, inv, revision_id):
124
self._weave_store = weave_store
125
self._inventory = inv
126
self._revision_id = revision_id
128
def get_weave(self, file_id):
129
return self._weave_store.get_weave(file_id)
132
def get_file_text(self, file_id):
133
ie = self._inventory[file_id]
134
weave = self.get_weave(file_id)
135
idx = weave.lookup(ie.text_version)
136
content = weave.get_text(idx)
137
if len(content) != ie.text_size:
138
raise BzrCheckError('mismatched size on revision %s of file %s: '
140
% (self._revision_id, file_id, len(content),
144
def get_file(self, file_id):
145
return StringIO(self.get_file_text(file_id))
147
def get_file_size(self, file_id):
148
return self._inventory[file_id].text_size
150
def get_file_sha1(self, file_id):
151
ie = self._inventory[file_id]
152
if ie.kind == "file":
155
def has_filename(self, filename):
156
return bool(self.inventory.path2id(filename))
158
def list_files(self):
159
# The only files returned by this are those from the version
160
for path, entry in self.inventory.iter_entries():
161
yield path, 'V', entry.kind, entry.file_id
164
class EmptyTree(Tree):
166
self._inventory = Inventory()
168
def has_filename(self, filename):
520
def path2id(self, path):
521
"""Return the id for path in this tree."""
522
return self._inventory.path2id(path)
524
def paths2ids(self, paths, trees=[], require_versioned=True):
525
"""Return all the ids that can be reached by walking from paths.
527
Each path is looked up in this tree and any extras provided in
528
trees, and this is repeated recursively: the children in an extra tree
529
of a directory that has been renamed under a provided path in this tree
530
are all returned, even if none exist under a provided path in this
531
tree, and vice versa.
533
:param paths: An iterable of paths to start converting to ids from.
534
Alternatively, if paths is None, no ids should be calculated and None
535
will be returned. This is offered to make calling the api unconditional
536
for code that *might* take a list of files.
537
:param trees: Additional trees to consider.
538
:param require_versioned: If False, do not raise NotVersionedError if
539
an element of paths is not versioned in this tree and all of trees.
541
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
543
def iter_children(self, file_id):
544
entry = self.iter_entries_by_dir([file_id]).next()[1]
545
for child in getattr(entry, 'children', {}).itervalues():
551
def revision_tree(self, revision_id):
552
"""Obtain a revision tree for the revision revision_id.
554
The intention of this method is to allow access to possibly cached
555
tree data. Implementors of this method should raise NoSuchRevision if
556
the tree is not locally available, even if they could obtain the
557
tree via a repository or some other means. Callers are responsible
558
for finding the ultimate source for a revision tree.
560
:param revision_id: The revision_id of the requested tree.
562
:raises: NoSuchRevision if the tree cannot be obtained.
564
raise errors.NoSuchRevisionInTree(self, revision_id)
567
"""What files are present in this tree and unknown.
569
:return: an iterator over the unknown files.
576
def filter_unversioned_files(self, paths):
577
"""Filter out paths that are versioned.
579
:return: set of paths.
581
# NB: we specifically *don't* call self.has_filename, because for
582
# WorkingTrees that can indicate files that exist on disk but that
584
pred = self.inventory.has_filename
585
return set((p for p in paths if not pred(p)))
587
def walkdirs(self, prefix=""):
588
"""Walk the contents of this tree from path down.
590
This yields all the data about the contents of a directory at a time.
591
After each directory has been yielded, if the caller has mutated the
592
list to exclude some directories, they are then not descended into.
594
The data yielded is of the form:
595
((directory-relpath, directory-path-from-root, directory-fileid),
596
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
597
versioned_kind), ...]),
598
- directory-relpath is the containing dirs relpath from prefix
599
- directory-path-from-root is the containing dirs path from /
600
- directory-fileid is the id of the directory if it is versioned.
601
- relpath is the relative path within the subtree being walked.
602
- basename is the basename
603
- kind is the kind of the file now. If unknonwn then the file is not
604
present within the tree - but it may be recorded as versioned. See
606
- lstat is the stat data *if* the file was statted.
607
- path_from_tree_root is the path from the root of the tree.
608
- file_id is the file_id if the entry is versioned.
609
- versioned_kind is the kind of the file as last recorded in the
610
versioning system. If 'unknown' the file is not versioned.
611
One of 'kind' and 'versioned_kind' must not be 'unknown'.
613
:param prefix: Start walking from prefix within the tree rather than
614
at the root. This allows one to walk a subtree but get paths that are
615
relative to a tree rooted higher up.
616
:return: an iterator over the directory data.
618
raise NotImplementedError(self.walkdirs)
620
def supports_content_filtering(self):
171
def list_files(self):
172
if False: # just to make it a generator
175
def __contains__(self, file_id):
176
return file_id in self._inventory
178
def get_file_sha1(self, file_id):
179
assert self._inventory[file_id].kind == "root_directory"
623
def _content_filter_stack(self, path=None, file_id=None):
624
"""The stack of content filters for a path if filtering is supported.
626
Readers will be applied in first-to-last order.
627
Writers will be applied in last-to-first order.
628
Either the path or the file-id needs to be provided.
630
:param path: path relative to the root of the tree
632
:param file_id: file_id or None if unknown
633
:return: the list of filters - [] if there are none
635
filter_pref_names = filters._get_registered_names()
636
if len(filter_pref_names) == 0:
639
path = self.id2path(file_id)
640
prefs = self.iter_search_rules([path], filter_pref_names).next()
641
stk = filters._get_filter_stack_for(prefs)
642
if 'filters' in debug.debug_flags:
643
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
646
def _content_filter_stack_provider(self):
647
"""A function that returns a stack of ContentFilters.
649
The function takes a path (relative to the top of the tree) and a
650
file-id as parameters.
652
:return: None if content filtering is not supported by this tree.
654
if self.supports_content_filtering():
655
return lambda path, file_id: \
656
self._content_filter_stack(path, file_id)
660
def iter_search_rules(self, path_names, pref_names=None,
661
_default_searcher=None):
662
"""Find the preferences for filenames in a tree.
664
:param path_names: an iterable of paths to find attributes for.
665
Paths are given relative to the root of the tree.
666
:param pref_names: the list of preferences to lookup - None for all
667
:param _default_searcher: private parameter to assist testing - don't use
668
:return: an iterator of tuple sequences, one per path-name.
669
See _RulesSearcher.get_items for details on the tuple sequence.
671
if _default_searcher is None:
672
_default_searcher = rules._per_user_searcher
673
searcher = self._get_rules_searcher(_default_searcher)
674
if searcher is not None:
675
if pref_names is not None:
676
for path in path_names:
677
yield searcher.get_selected_items(path, pref_names)
679
for path in path_names:
680
yield searcher.get_items(path)
683
def _get_rules_searcher(self, default_searcher):
684
"""Get the RulesSearcher for this tree given the default one."""
685
searcher = default_searcher
185
689
######################################################################
245
748
new_name = new_inv.id2path(file_id)
246
749
if old_name != new_name:
247
750
yield (old_name, new_name)
251
######################################################################
254
def dir_exporter(tree, dest, root):
255
"""Export this tree to a new directory.
257
`dest` should not exist, and will be created holding the
258
contents of this tree.
260
TODO: To handle subdirectories we need to create the
263
:note: If the export fails, the destination directory will be
264
left in a half-assed state.
268
mutter('export version %r' % tree)
270
for dp, ie in inv.iter_entries():
272
fullpath = appendpath(dest, dp)
273
if kind == 'directory':
276
pumpfile(tree.get_file(ie.file_id), file(fullpath, 'wb'))
278
raise BzrError("don't know how to export {%s} of kind %r" % (ie.file_id, kind))
279
mutter(" export {%s} kind %s to %s" % (ie.file_id, kind, fullpath))
280
exporters['dir'] = dir_exporter
287
def get_root_name(dest):
288
"""Get just the root name for a tarball.
290
>>> get_root_name('mytar.tar')
292
>>> get_root_name('mytar.tar.bz2')
294
>>> get_root_name('tar.tar.tar.tgz')
296
>>> get_root_name('bzr-0.0.5.tar.gz')
298
>>> get_root_name('a/long/path/mytar.tgz')
300
>>> get_root_name('../parent/../dir/other.tbz2')
303
endings = ['.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tbz2']
304
dest = os.path.basename(dest)
306
if dest.endswith(end):
307
return dest[:-len(end)]
309
def tar_exporter(tree, dest, root, compression=None):
310
"""Export this tree to a new tar file.
312
`dest` will be created holding the contents of this tree; if it
313
already exists, it will be clobbered, like with "tar -c".
315
from time import time
317
compression = str(compression or '')
319
root = get_root_name(dest)
321
ball = tarfile.open(dest, 'w:' + compression)
322
except tarfile.CompressionError, e:
323
raise BzrError(str(e))
324
mutter('export version %r' % tree)
326
for dp, ie in inv.iter_entries():
327
mutter(" export {%s} kind %s to %s" % (ie.file_id, ie.kind, dest))
328
item = tarfile.TarInfo(os.path.join(root, dp))
329
# TODO: would be cool to actually set it to the timestamp of the
330
# revision it was last changed
332
if ie.kind == 'directory':
333
item.type = tarfile.DIRTYPE
338
elif ie.kind == 'file':
339
item.type = tarfile.REGTYPE
340
fileobj = tree.get_file(ie.file_id)
341
item.size = _find_file_size(fileobj)
344
raise BzrError("don't know how to export {%s} of kind %r" %
345
(ie.file_id, ie.kind))
347
ball.addfile(item, fileobj)
349
exporters['tar'] = tar_exporter
351
def tgz_exporter(tree, dest, root):
352
tar_exporter(tree, dest, root, compression='gz')
353
exporters['tgz'] = tgz_exporter
355
def tbz_exporter(tree, dest, root):
356
tar_exporter(tree, dest, root, compression='bz2')
357
exporters['tbz2'] = tbz_exporter
360
def _find_file_size(fileobj):
361
offset = fileobj.tell()
364
size = fileobj.tell()
366
# gzip doesn't accept second argument to seek()
370
nread = len(fileobj.read())
753
def find_ids_across_trees(filenames, trees, require_versioned=True):
754
"""Find the ids corresponding to specified filenames.
756
All matches in all trees will be used, and all children of matched
757
directories will be used.
759
:param filenames: The filenames to find file_ids for (if None, returns
761
:param trees: The trees to find file_ids within
762
:param require_versioned: if true, all specified filenames must occur in
764
:return: a set of file ids for the specified filenames and their children.
768
specified_path_ids = _find_ids_across_trees(filenames, trees,
770
return _find_children_across_trees(specified_path_ids, trees)
773
def _find_ids_across_trees(filenames, trees, require_versioned):
774
"""Find the ids corresponding to specified filenames.
776
All matches in all trees will be used, but subdirectories are not scanned.
778
:param filenames: The filenames to find file_ids for
779
:param trees: The trees to find file_ids within
780
:param require_versioned: if true, all specified filenames must occur in
782
:return: a set of file ids for the specified filenames
785
interesting_ids = set()
786
for tree_path in filenames:
789
file_id = tree.path2id(tree_path)
790
if file_id is not None:
791
interesting_ids.add(file_id)
794
not_versioned.append(tree_path)
795
if len(not_versioned) > 0 and require_versioned:
796
raise errors.PathsNotVersionedError(not_versioned)
797
return interesting_ids
800
def _find_children_across_trees(specified_ids, trees):
801
"""Return a set including specified ids and their children.
803
All matches in all trees will be used.
805
:param trees: The trees to find file_ids within
806
:return: a set containing all specified ids and their children
808
interesting_ids = set(specified_ids)
809
pending = interesting_ids
810
# now handle children of interesting ids
811
# we loop so that we handle all children of each id in both trees
812
while len(pending) > 0:
814
for file_id in pending:
816
if not tree.has_id(file_id):
818
for child_id in tree.iter_children(file_id):
819
if child_id not in interesting_ids:
820
new_pending.add(child_id)
821
interesting_ids.update(new_pending)
822
pending = new_pending
823
return interesting_ids
826
class InterTree(InterObject):
827
"""This class represents operations taking place between two Trees.
829
Its instances have methods like 'compare' and contain references to the
830
source and target trees these operations are to be carried out on.
832
Clients of bzrlib should not need to use InterTree directly, rather they
833
should use the convenience methods on Tree such as 'Tree.compare()' which
834
will pass through to InterTree as appropriate.
840
def compare(self, want_unchanged=False, specific_files=None,
841
extra_trees=None, require_versioned=False, include_root=False,
842
want_unversioned=False):
843
"""Return the changes from source to target.
845
:return: A TreeDelta.
846
:param specific_files: An optional list of file paths to restrict the
847
comparison to. When mapping filenames to ids, all matches in all
848
trees (including optional extra_trees) are used, and all children of
849
matched directories are included.
850
:param want_unchanged: An optional boolean requesting the inclusion of
851
unchanged entries in the result.
852
:param extra_trees: An optional list of additional trees to use when
853
mapping the contents of specific_files (paths) to file_ids.
854
:param require_versioned: An optional boolean (defaults to False). When
855
supplied and True all the 'specific_files' must be versioned, or
856
a PathsNotVersionedError will be thrown.
857
:param want_unversioned: Scan for unversioned paths.
859
# NB: show_status depends on being able to pass in non-versioned files
860
# and report them as unknown
861
trees = (self.source,)
862
if extra_trees is not None:
863
trees = trees + tuple(extra_trees)
864
# target is usually the newer tree:
865
specific_file_ids = self.target.paths2ids(specific_files, trees,
866
require_versioned=require_versioned)
867
if specific_files and not specific_file_ids:
868
# All files are unversioned, so just return an empty delta
869
# _compare_trees would think we want a complete delta
870
result = delta.TreeDelta()
871
fake_entry = InventoryFile('unused', 'unused', 'unused')
872
result.unversioned = [(path, None,
873
self.target._comparison_data(fake_entry, path)[0]) for path in
876
return delta._compare_trees(self.source, self.target, want_unchanged,
877
specific_files, include_root, extra_trees=extra_trees,
878
require_versioned=require_versioned,
879
want_unversioned=want_unversioned)
881
def iter_changes(self, include_unchanged=False,
882
specific_files=None, pb=None, extra_trees=[],
883
require_versioned=True, want_unversioned=False):
884
"""Generate an iterator of changes between trees.
887
(file_id, (path_in_source, path_in_target),
888
changed_content, versioned, parent, name, kind,
891
Changed_content is True if the file's content has changed. This
892
includes changes to its kind, and to a symlink's target.
894
versioned, parent, name, kind, executable are tuples of (from, to).
895
If a file is missing in a tree, its kind is None.
897
Iteration is done in parent-to-child order, relative to the target
900
There is no guarantee that all paths are in sorted order: the
901
requirement to expand the search due to renames may result in children
902
that should be found early being found late in the search, after
903
lexically later results have been returned.
904
:param require_versioned: Raise errors.PathsNotVersionedError if a
905
path in the specific_files list is not versioned in one of
906
source, target or extra_trees.
907
:param want_unversioned: Should unversioned files be returned in the
908
output. An unversioned file is defined as one with (False, False)
909
for the versioned pair.
911
lookup_trees = [self.source]
913
lookup_trees.extend(extra_trees)
914
if specific_files == []:
915
specific_file_ids = []
917
specific_file_ids = self.target.paths2ids(specific_files,
918
lookup_trees, require_versioned=require_versioned)
920
all_unversioned = sorted([(p.split('/'), p) for p in
922
if specific_files is None or
923
osutils.is_inside_any(specific_files, p)])
924
all_unversioned = deque(all_unversioned)
926
all_unversioned = deque()
928
from_entries_by_dir = list(self.source.iter_entries_by_dir(
929
specific_file_ids=specific_file_ids))
930
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
931
to_entries_by_dir = list(self.target.iter_entries_by_dir(
932
specific_file_ids=specific_file_ids))
933
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
935
# the unversioned path lookup only occurs on real trees - where there
936
# can be extras. So the fake_entry is solely used to look up
937
# executable it values when execute is not supported.
938
fake_entry = InventoryFile('unused', 'unused', 'unused')
939
for to_path, to_entry in to_entries_by_dir:
940
while all_unversioned and all_unversioned[0][0] < to_path.split('/'):
941
unversioned_path = all_unversioned.popleft()
942
to_kind, to_executable, to_stat = \
943
self.target._comparison_data(fake_entry, unversioned_path[1])
944
yield (None, (None, unversioned_path[1]), True, (False, False),
946
(None, unversioned_path[0][-1]),
948
(None, to_executable))
949
file_id = to_entry.file_id
950
to_paths[file_id] = to_path
952
changed_content = False
953
from_path, from_entry = from_data.get(file_id, (None, None))
954
from_versioned = (from_entry is not None)
955
if from_entry is not None:
956
from_versioned = True
957
from_name = from_entry.name
958
from_parent = from_entry.parent_id
959
from_kind, from_executable, from_stat = \
960
self.source._comparison_data(from_entry, from_path)
963
from_versioned = False
967
from_executable = None
968
versioned = (from_versioned, True)
969
to_kind, to_executable, to_stat = \
970
self.target._comparison_data(to_entry, to_path)
971
kind = (from_kind, to_kind)
972
if kind[0] != kind[1]:
973
changed_content = True
974
elif from_kind == 'file':
975
if (self.source.get_file_sha1(file_id, from_path, from_stat) !=
976
self.target.get_file_sha1(file_id, to_path, to_stat)):
977
changed_content = True
978
elif from_kind == 'symlink':
979
if (self.source.get_symlink_target(file_id) !=
980
self.target.get_symlink_target(file_id)):
981
changed_content = True
982
# XXX: Yes, the indentation below is wrong. But fixing it broke
983
# test_merge.TestMergerEntriesLCAOnDisk.
984
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
985
# the fix from bzr.dev -- vila 2009026
986
elif from_kind == 'tree-reference':
987
if (self.source.get_reference_revision(file_id, from_path)
988
!= self.target.get_reference_revision(file_id, to_path)):
989
changed_content = True
990
parent = (from_parent, to_entry.parent_id)
991
name = (from_name, to_entry.name)
992
executable = (from_executable, to_executable)
994
pb.update('comparing files', entry_count, num_entries)
995
if (changed_content is not False or versioned[0] != versioned[1]
996
or parent[0] != parent[1] or name[0] != name[1] or
997
executable[0] != executable[1] or include_unchanged):
998
yield (file_id, (from_path, to_path), changed_content,
999
versioned, parent, name, kind, executable)
1001
while all_unversioned:
1002
# yield any trailing unversioned paths
1003
unversioned_path = all_unversioned.popleft()
1004
to_kind, to_executable, to_stat = \
1005
self.target._comparison_data(fake_entry, unversioned_path[1])
1006
yield (None, (None, unversioned_path[1]), True, (False, False),
1008
(None, unversioned_path[0][-1]),
1010
(None, to_executable))
1012
def get_to_path(to_entry):
1013
if to_entry.parent_id is None:
1014
to_path = '' # the root
1016
if to_entry.parent_id not in to_paths:
1018
return get_to_path(self.target.inventory[to_entry.parent_id])
1019
to_path = osutils.pathjoin(to_paths[to_entry.parent_id],
1021
to_paths[to_entry.file_id] = to_path
1024
for path, from_entry in from_entries_by_dir:
1025
file_id = from_entry.file_id
1026
if file_id in to_paths:
1029
if not file_id in self.target.all_file_ids():
1030
# common case - paths we have not emitted are not present in
1034
to_path = get_to_path(self.target.inventory[file_id])
1037
pb.update('comparing files', entry_count, num_entries)
1038
versioned = (True, False)
1039
parent = (from_entry.parent_id, None)
1040
name = (from_entry.name, None)
1041
from_kind, from_executable, stat_value = \
1042
self.source._comparison_data(from_entry, path)
1043
kind = (from_kind, None)
1044
executable = (from_executable, None)
1045
changed_content = from_kind is not None
1046
# the parent's path is necessarily known at this point.
1047
yield(file_id, (path, to_path), changed_content, versioned, parent,
1048
name, kind, executable)
1051
class MultiWalker(object):
1052
"""Walk multiple trees simultaneously, getting combined results."""
1054
# Note: This could be written to not assume you can do out-of-order
1055
# lookups. Instead any nodes that don't match in all trees could be
1056
# marked as 'deferred', and then returned in the final cleanup loop.
1057
# For now, I think it is "nicer" to return things as close to the
1058
# "master_tree" order as we can.
1060
def __init__(self, master_tree, other_trees):
1061
"""Create a new MultiWalker.
1063
All trees being walked must implement "iter_entries_by_dir()", such
1064
that they yield (path, object) tuples, where that object will have a
1065
'.file_id' member, that can be used to check equality.
1067
:param master_tree: All trees will be 'slaved' to the master_tree such
1068
that nodes in master_tree will be used as 'first-pass' sync points.
1069
Any nodes that aren't in master_tree will be merged in a second
1071
:param other_trees: A list of other trees to walk simultaneously.
1073
self._master_tree = master_tree
1074
self._other_trees = other_trees
1076
# Keep track of any nodes that were properly processed just out of
1077
# order, that way we don't return them at the end, we don't have to
1078
# track *all* processed file_ids, just the out-of-order ones
1079
self._out_of_order_processed = set()
1082
def _step_one(iterator):
1083
"""Step an iter_entries_by_dir iterator.
1085
:return: (has_more, path, ie)
1086
If has_more is False, path and ie will be None.
1089
path, ie = iterator.next()
1090
except StopIteration:
1091
return False, None, None
1093
return True, path, ie
1096
def _cmp_path_by_dirblock(path1, path2):
1097
"""Compare two paths based on what directory they are in.
1099
This generates a sort order, such that all children of a directory are
1100
sorted together, and grandchildren are in the same order as the
1101
children appear. But all grandchildren come after all children.
1103
:param path1: first path
1104
:param path2: the second path
1105
:return: negative number if ``path1`` comes first,
1106
0 if paths are equal
1107
and a positive number if ``path2`` sorts first
1109
# Shortcut this special case
1112
# This is stolen from _dirstate_helpers_py.py, only switching it to
1113
# Unicode objects. Consider using encode_utf8() and then using the
1114
# optimized versions, or maybe writing optimized unicode versions.
1115
if not isinstance(path1, unicode):
1116
raise TypeError("'path1' must be a unicode string, not %s: %r"
1117
% (type(path1), path1))
1118
if not isinstance(path2, unicode):
1119
raise TypeError("'path2' must be a unicode string, not %s: %r"
1120
% (type(path2), path2))
1121
return cmp(MultiWalker._path_to_key(path1),
1122
MultiWalker._path_to_key(path2))
1125
def _path_to_key(path):
1126
dirname, basename = osutils.split(path)
1127
return (dirname.split(u'/'), basename)
1129
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1130
"""Lookup an inventory entry by file_id.
1132
This is called when an entry is missing in the normal order.
1133
Generally this is because a file was either renamed, or it was
1134
deleted/added. If the entry was found in the inventory and not in
1135
extra_entries, it will be added to self._out_of_order_processed
1137
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1138
should be filled with entries that were found before they were
1139
used. If file_id is present, it will be removed from the
1141
:param other_tree: The Tree to search, in case we didn't find the entry
1143
:param file_id: The file_id to look for
1144
:return: (path, ie) if found or (None, None) if not present.
1146
if file_id in extra_entries:
1147
return extra_entries.pop(file_id)
1148
# TODO: Is id2path better as the first call, or is
1149
# inventory[file_id] better as a first check?
1151
cur_path = other_tree.id2path(file_id)
1152
except errors.NoSuchId:
1154
if cur_path is None:
1157
self._out_of_order_processed.add(file_id)
1158
cur_ie = other_tree.inventory[file_id]
1159
return (cur_path, cur_ie)
1162
"""Match up the values in the different trees."""
1163
for result in self._walk_master_tree():
1165
self._finish_others()
1166
for result in self._walk_others():
1169
def _walk_master_tree(self):
1170
"""First pass, walk all trees in lock-step.
1172
When we are done, all nodes in the master_tree will have been
1173
processed. _other_walkers, _other_entries, and _others_extra will be
1174
set on 'self' for future processing.
1176
# This iterator has the most "inlining" done, because it tends to touch
1177
# every file in the tree, while the others only hit nodes that don't
1179
master_iterator = self._master_tree.iter_entries_by_dir()
1181
other_walkers = [other.iter_entries_by_dir()
1182
for other in self._other_trees]
1183
other_entries = [self._step_one(walker) for walker in other_walkers]
1184
# Track extra nodes in the other trees
1185
others_extra = [{} for i in xrange(len(self._other_trees))]
1187
master_has_more = True
1188
step_one = self._step_one
1189
lookup_by_file_id = self._lookup_by_file_id
1190
out_of_order_processed = self._out_of_order_processed
1192
while master_has_more:
1193
(master_has_more, path, master_ie) = step_one(master_iterator)
1194
if not master_has_more:
1197
file_id = master_ie.file_id
1199
other_values_append = other_values.append
1200
next_other_entries = []
1201
next_other_entries_append = next_other_entries.append
1202
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1203
if not other_has_more:
1204
other_values_append(lookup_by_file_id(
1205
others_extra[idx], self._other_trees[idx], file_id))
1206
next_other_entries_append((False, None, None))
1207
elif file_id == other_ie.file_id:
1208
# This is the critical code path, as most of the entries
1209
# should match between most trees.
1210
other_values_append((other_path, other_ie))
1211
next_other_entries_append(step_one(other_walkers[idx]))
1213
# This walker did not match, step it until it either
1214
# matches, or we know we are past the current walker.
1215
other_walker = other_walkers[idx]
1216
other_extra = others_extra[idx]
1217
while (other_has_more and
1218
self._cmp_path_by_dirblock(other_path, path) < 0):
1219
other_file_id = other_ie.file_id
1220
if other_file_id not in out_of_order_processed:
1221
other_extra[other_file_id] = (other_path, other_ie)
1222
other_has_more, other_path, other_ie = \
1223
step_one(other_walker)
1224
if other_has_more and other_ie.file_id == file_id:
1225
# We ended up walking to this point, match and step
1227
other_values_append((other_path, other_ie))
1228
other_has_more, other_path, other_ie = \
1229
step_one(other_walker)
1231
# This record isn't in the normal order, see if it
1233
other_values_append(lookup_by_file_id(
1234
other_extra, self._other_trees[idx], file_id))
1235
next_other_entries_append((other_has_more, other_path,
1237
other_entries = next_other_entries
1239
# We've matched all the walkers, yield this datapoint
1240
yield path, file_id, master_ie, other_values
1241
self._other_walkers = other_walkers
1242
self._other_entries = other_entries
1243
self._others_extra = others_extra
1245
def _finish_others(self):
1246
"""Finish walking the other iterators, so we get all entries."""
1247
for idx, info in enumerate(self._other_entries):
1248
other_extra = self._others_extra[idx]
1249
(other_has_more, other_path, other_ie) = info
1250
while other_has_more:
1251
other_file_id = other_ie.file_id
1252
if other_file_id not in self._out_of_order_processed:
1253
other_extra[other_file_id] = (other_path, other_ie)
1254
other_has_more, other_path, other_ie = \
1255
self._step_one(self._other_walkers[idx])
1256
del self._other_entries
1258
def _walk_others(self):
1259
"""Finish up by walking all the 'deferred' nodes."""
1260
# TODO: One alternative would be to grab all possible unprocessed
1261
# file_ids, and then sort by path, and then yield them. That
1262
# might ensure better ordering, in case a caller strictly
1263
# requires parents before children.
1264
for idx, other_extra in enumerate(self._others_extra):
1265
others = sorted(other_extra.itervalues(),
1266
key=lambda x: self._path_to_key(x[0]))
1267
for other_path, other_ie in others:
1268
file_id = other_ie.file_id
1269
# We don't need to check out_of_order_processed here, because
1270
# the lookup_by_file_id will be removing anything processed
1271
# from the extras cache
1272
other_extra.pop(file_id)
1273
other_values = [(None, None) for i in xrange(idx)]
1274
other_values.append((other_path, other_ie))
1275
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1276
alt_idx = alt_idx + idx + 1
1277
alt_extra = self._others_extra[alt_idx]
1278
alt_tree = self._other_trees[alt_idx]
1279
other_values.append(self._lookup_by_file_id(
1280
alt_extra, alt_tree, file_id))
1281
yield other_path, file_id, None, other_values