60
50
Trees can be compared, etc, regardless of whether they are working
61
51
trees or versioned trees.
64
def changes_from(self, other, want_unchanged=False, specific_files=None,
65
extra_trees=None, require_versioned=False, include_root=False,
66
want_unversioned=False):
67
"""Return a TreeDelta of the changes from other to this tree.
69
:param other: A tree to compare with.
70
:param specific_files: An optional list of file paths to restrict the
71
comparison to. When mapping filenames to ids, all matches in all
72
trees (including optional extra_trees) are used, and all children of
73
matched directories are included.
74
:param want_unchanged: An optional boolean requesting the inclusion of
75
unchanged entries in the result.
76
:param extra_trees: An optional list of additional trees to use when
77
mapping the contents of specific_files (paths) to file_ids.
78
:param require_versioned: An optional boolean (defaults to False). When
79
supplied and True all the 'specific_files' must be versioned, or
80
a PathsNotVersionedError will be thrown.
81
:param want_unversioned: Scan for unversioned paths.
83
The comparison will be performed by an InterTree object looked up on
86
# Martin observes that Tree.changes_from returns a TreeDelta and this
87
# may confuse people, because the class name of the returned object is
88
# a synonym of the object referenced in the method name.
89
return InterTree.get(other, self).compare(
90
want_unchanged=want_unchanged,
91
specific_files=specific_files,
92
extra_trees=extra_trees,
93
require_versioned=require_versioned,
94
include_root=include_root,
95
want_unversioned=want_unversioned,
98
def iter_changes(self, from_tree, include_unchanged=False,
99
specific_files=None, pb=None, extra_trees=None,
100
require_versioned=True, want_unversioned=False):
101
"""See InterTree.iter_changes"""
102
intertree = InterTree.get(from_tree, self)
103
return intertree.iter_changes(include_unchanged, specific_files, pb,
104
extra_trees, require_versioned, want_unversioned=want_unversioned)
107
"""Get a list of the conflicts in the tree.
109
Each conflict is an instance of bzrlib.conflicts.Conflict.
111
return _mod_conflicts.ConflictList()
114
"""For trees that can have unversioned files, return all such paths."""
117
def get_parent_ids(self):
118
"""Get the parent ids for this tree.
120
:return: a list of parent ids. [] is returned to indicate
121
a tree with no parents.
122
:raises: BzrError if the parents are not known.
124
raise NotImplementedError(self.get_parent_ids)
126
54
def has_filename(self, filename):
127
55
"""True if the tree has given filename."""
128
raise NotImplementedError(self.has_filename)
56
raise NotImplementedError()
130
58
def has_id(self, file_id):
131
59
return self.inventory.has_id(file_id)
133
def __contains__(self, file_id):
134
return self.has_id(file_id)
136
61
def has_or_had_id(self, file_id):
62
if file_id == self.inventory.root.file_id:
137
64
return self.inventory.has_id(file_id)
139
def is_ignored(self, filename):
140
"""Check whether the filename is ignored by this tree.
142
:param filename: The relative filename within the tree.
143
:return: True if the filename is ignored.
147
68
def __iter__(self):
148
69
return iter(self.inventory)
150
def all_file_ids(self):
151
"""Iterate through all file ids, including ids for missing files."""
152
return set(self.inventory)
154
71
def id2path(self, file_id):
155
"""Return the path for a file id.
159
72
return self.inventory.id2path(file_id)
161
def is_control_filename(self, filename):
162
"""True if filename is the name of a control file in this tree.
164
:param filename: A filename within the tree. This is a relative path
165
from the root of this tree.
167
This is true IF and ONLY IF the filename is part of the meta data
168
that bzr controls in this tree. I.E. a random .bzr directory placed
169
on disk will not be a control file for this tree.
171
return self.bzrdir.is_control_filename(filename)
174
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
175
"""Walk the tree in 'by_dir' order.
177
This will yield each entry in the tree as a (path, entry) tuple.
178
The order that they are yielded is:
180
Directories are walked in a depth-first lexicographical order,
181
however, whenever a directory is reached, all of its direct child
182
nodes are yielded in lexicographical order before yielding the
185
For example, in the tree::
195
The yield order (ignoring root) would be::
196
a, f, a/b, a/d, a/b/c, a/d/e, f/g
198
:param yield_parents: If True, yield the parents from the root leading
199
down to specific_file_ids that have been requested. This has no
200
impact if specific_file_ids is None.
202
return self.inventory.iter_entries_by_dir(
203
specific_file_ids=specific_file_ids, yield_parents=yield_parents)
205
def iter_references(self):
206
if self.supports_tree_reference():
207
for path, entry in self.iter_entries_by_dir():
208
if entry.kind == 'tree-reference':
209
yield path, entry.file_id
211
74
def kind(self, file_id):
212
raise NotImplementedError("Tree subclass %s must implement kind"
213
% self.__class__.__name__)
215
def stored_kind(self, file_id):
216
"""File kind stored for this file_id.
218
May not match kind on disk for working trees. Always available
219
for versioned files, even when the file itself is missing.
221
return self.kind(file_id)
223
def path_content_summary(self, path):
224
"""Get a summary of the information about path.
226
All the attributes returned are for the canonical form, not the
227
convenient form (if content filters are in use.)
229
:param path: A relative path within the tree.
230
:return: A tuple containing kind, size, exec, sha1-or-link.
231
Kind is always present (see tree.kind()).
232
size is present if kind is file and the size of the
233
canonical form can be cheaply determined, None otherwise.
234
exec is None unless kind is file and the platform supports the 'x'
236
sha1-or-link is the link target if kind is symlink, or the sha1 if
237
it can be obtained without reading the file.
239
raise NotImplementedError(self.path_content_summary)
241
def get_reference_revision(self, file_id, path=None):
242
raise NotImplementedError("Tree subclass %s must implement "
243
"get_reference_revision"
244
% self.__class__.__name__)
246
def _comparison_data(self, entry, path):
247
"""Return a tuple of kind, executable, stat_value for a file.
249
entry may be None if there is no inventory entry for the file, but
250
path must always be supplied.
252
kind is None if there is no file present (even if an inventory id is
253
present). executable is False for non-file entries.
255
raise NotImplementedError(self._comparison_data)
257
def _file_size(self, entry, stat_value):
258
raise NotImplementedError(self._file_size)
75
raise NotImplementedError("subclasses must implement kind")
260
77
def _get_inventory(self):
261
78
return self._inventory
263
def get_file(self, file_id, path=None):
264
"""Return a file object for the file file_id in the tree.
266
If both file_id and path are defined, it is implementation defined as
267
to which one is used.
269
raise NotImplementedError(self.get_file)
271
def get_file_with_stat(self, file_id, path=None):
272
"""Get a file handle and stat object for file_id.
274
The default implementation returns (self.get_file, None) for backwards
277
:param file_id: The file id to read.
278
:param path: The path of the file, if it is known.
279
:return: A tuple (file_handle, stat_value_or_None). If the tree has
280
no stat facility, or need for a stat cache feedback during commit,
281
it may return None for the second element of the tuple.
283
return (self.get_file(file_id, path), None)
285
def get_file_text(self, file_id, path=None):
286
"""Return the byte content of a file.
288
:param file_id: The file_id of the file.
289
:param path: The path of the file.
290
If both file_id and path are supplied, an implementation may use
293
my_file = self.get_file(file_id, path)
295
return my_file.read()
299
def get_file_lines(self, file_id, path=None):
300
"""Return the content of a file, as lines.
302
:param file_id: The file_id of the file.
303
:param path: The path of the file.
304
If both file_id and path are supplied, an implementation may use
307
return osutils.split_lines(self.get_file_text(file_id, path))
309
def get_file_mtime(self, file_id, path=None):
310
"""Return the modification time for a file.
312
:param file_id: The handle for this file.
313
:param path: The path that this file can be found at.
314
These must point to the same object.
316
raise NotImplementedError(self.get_file_mtime)
318
def get_file_size(self, file_id):
319
"""Return the size of a file in bytes.
321
This applies only to regular files. If invoked on directories or
322
symlinks, it will return None.
323
:param file_id: The file-id of the file
325
raise NotImplementedError(self.get_file_size)
327
80
def get_file_by_path(self, path):
328
return self.get_file(self._inventory.path2id(path), path)
330
def iter_files_bytes(self, desired_files):
331
"""Iterate through file contents.
333
Files will not necessarily be returned in the order they occur in
334
desired_files. No specific order is guaranteed.
336
Yields pairs of identifier, bytes_iterator. identifier is an opaque
337
value supplied by the caller as part of desired_files. It should
338
uniquely identify the file version in the caller's context. (Examples:
339
an index number or a TreeTransform trans_id.)
341
bytes_iterator is an iterable of bytestrings for the file. The
342
kind of iterable and length of the bytestrings are unspecified, but for
343
this implementation, it is a tuple containing a single bytestring with
344
the complete text of the file.
346
:param desired_files: a list of (file_id, identifier) pairs
348
for file_id, identifier in desired_files:
349
# We wrap the string in a tuple so that we can return an iterable
350
# of bytestrings. (Technically, a bytestring is also an iterable
351
# of bytestrings, but iterating through each character is not
353
cur_file = (self.get_file_text(file_id),)
354
yield identifier, cur_file
356
def get_symlink_target(self, file_id):
357
"""Get the target for a given file_id.
359
It is assumed that the caller already knows that file_id is referencing
361
:param file_id: Handle for the symlink entry.
362
:return: The path the symlink points to.
364
raise NotImplementedError(self.get_symlink_target)
366
def get_canonical_inventory_paths(self, paths):
367
"""Like get_canonical_inventory_path() but works on multiple items.
369
:param paths: A sequence of paths relative to the root of the tree.
370
:return: A list of paths, with each item the corresponding input path
371
adjusted to account for existing elements that match case
374
return list(self._yield_canonical_inventory_paths(paths))
376
def get_canonical_inventory_path(self, path):
377
"""Returns the first inventory item that case-insensitively matches path.
379
If a path matches exactly, it is returned. If no path matches exactly
380
but more than one path matches case-insensitively, it is implementation
381
defined which is returned.
383
If no path matches case-insensitively, the input path is returned, but
384
with as many path entries that do exist changed to their canonical
387
If you need to resolve many names from the same tree, you should
388
use get_canonical_inventory_paths() to avoid O(N) behaviour.
390
:param path: A paths relative to the root of the tree.
391
:return: The input path adjusted to account for existing elements
392
that match case insensitively.
394
return self._yield_canonical_inventory_paths([path]).next()
396
def _yield_canonical_inventory_paths(self, paths):
398
# First, if the path as specified exists exactly, just use it.
399
if self.path2id(path) is not None:
403
cur_id = self.get_root_id()
405
bit_iter = iter(path.split("/"))
409
for child in self.iter_children(cur_id):
411
# XXX: it seem like if the child is known to be in the
412
# tree, we shouldn't need to go from its id back to
413
# its path -- mbp 2010-02-11
415
# XXX: it seems like we could be more efficient
416
# by just directly looking up the original name and
417
# only then searching all children; also by not
418
# chopping paths so much. -- mbp 2010-02-11
419
child_base = os.path.basename(self.id2path(child))
420
if (child_base == elt):
421
# if we found an exact match, we can stop now; if
422
# we found an approximate match we need to keep
423
# searching because there might be an exact match
426
new_path = osutils.pathjoin(cur_path, child_base)
428
elif child_base.lower() == lelt:
430
new_path = osutils.pathjoin(cur_path, child_base)
432
# before a change is committed we can see this error...
437
# got to the end of this directory and no entries matched.
438
# Return what matched so far, plus the rest as specified.
439
cur_path = osutils.pathjoin(cur_path, elt, *list(bit_iter))
444
def get_root_id(self):
445
"""Return the file_id for the root of this tree."""
446
raise NotImplementedError(self.get_root_id)
448
def annotate_iter(self, file_id,
449
default_revision=_mod_revision.CURRENT_REVISION):
450
"""Return an iterator of revision_id, line tuples.
452
For working trees (and mutable trees in general), the special
453
revision_id 'current:' will be used for lines that are new in this
454
tree, e.g. uncommitted changes.
455
:param file_id: The file to produce an annotated version from
456
:param default_revision: For lines that don't match a basis, mark them
457
with this revision id. Not all implementations will make use of
460
raise NotImplementedError(self.annotate_iter)
462
def _get_plan_merge_data(self, file_id, other, base):
463
from bzrlib import versionedfile
464
vf = versionedfile._PlanMergeVersionedFile(file_id)
465
last_revision_a = self._get_file_revision(file_id, vf, 'this:')
466
last_revision_b = other._get_file_revision(file_id, vf, 'other:')
468
last_revision_base = None
470
last_revision_base = base._get_file_revision(file_id, vf, 'base:')
471
return vf, last_revision_a, last_revision_b, last_revision_base
473
def plan_file_merge(self, file_id, other, base=None):
474
"""Generate a merge plan based on annotations.
476
If the file contains uncommitted changes in this tree, they will be
477
attributed to the 'current:' pseudo-revision. If the file contains
478
uncommitted changes in the other tree, they will be assigned to the
479
'other:' pseudo-revision.
481
data = self._get_plan_merge_data(file_id, other, base)
482
vf, last_revision_a, last_revision_b, last_revision_base = data
483
return vf.plan_merge(last_revision_a, last_revision_b,
486
def plan_file_lca_merge(self, file_id, other, base=None):
487
"""Generate a merge plan based lca-newness.
489
If the file contains uncommitted changes in this tree, they will be
490
attributed to the 'current:' pseudo-revision. If the file contains
491
uncommitted changes in the other tree, they will be assigned to the
492
'other:' pseudo-revision.
494
data = self._get_plan_merge_data(file_id, other, base)
495
vf, last_revision_a, last_revision_b, last_revision_base = data
496
return vf.plan_lca_merge(last_revision_a, last_revision_b,
499
def _iter_parent_trees(self):
500
"""Iterate through parent trees, defaulting to Tree.revision_tree."""
501
for revision_id in self.get_parent_ids():
503
yield self.revision_tree(revision_id)
504
except errors.NoSuchRevisionInTree:
505
yield self.repository.revision_tree(revision_id)
508
def _file_revision(revision_tree, file_id):
509
"""Determine the revision associated with a file in a given tree."""
510
revision_tree.lock_read()
512
return revision_tree.inventory[file_id].revision
514
revision_tree.unlock()
516
def _get_file_revision(self, file_id, vf, tree_revision):
517
"""Ensure that file_id, tree_revision is in vf to plan the merge."""
519
if getattr(self, '_repository', None) is None:
520
last_revision = tree_revision
521
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
522
self._iter_parent_trees()]
523
vf.add_lines((file_id, last_revision), parent_keys,
524
self.get_file(file_id).readlines())
525
repo = self.branch.repository
528
last_revision = self._file_revision(self, file_id)
529
base_vf = self._repository.texts
530
if base_vf not in vf.fallback_versionedfiles:
531
vf.fallback_versionedfiles.append(base_vf)
81
return self.get_file(self._inventory.path2id(path))
534
83
inventory = property(_get_inventory,
535
84
doc="Inventory of this Tree")
537
86
def _check_retrieved(self, ie, f):
540
89
fp = fingerprint_file(f)
543
if ie.text_size is not None:
92
if ie.text_size != None:
544
93
if ie.text_size != fp['size']:
545
94
raise BzrError("mismatched size for file %r in %r" % (ie.file_id, self._store),
546
95
["inventory expects %d bytes" % ie.text_size,
553
102
"file is actually %s" % fp['sha1'],
554
103
"store is probably damaged/corrupt"])
557
def path2id(self, path):
558
"""Return the id for path in this tree."""
559
return self._inventory.path2id(path)
561
def paths2ids(self, paths, trees=[], require_versioned=True):
562
"""Return all the ids that can be reached by walking from paths.
564
Each path is looked up in this tree and any extras provided in
565
trees, and this is repeated recursively: the children in an extra tree
566
of a directory that has been renamed under a provided path in this tree
567
are all returned, even if none exist under a provided path in this
568
tree, and vice versa.
570
:param paths: An iterable of paths to start converting to ids from.
571
Alternatively, if paths is None, no ids should be calculated and None
572
will be returned. This is offered to make calling the api unconditional
573
for code that *might* take a list of files.
574
:param trees: Additional trees to consider.
575
:param require_versioned: If False, do not raise NotVersionedError if
576
an element of paths is not versioned in this tree and all of trees.
578
return find_ids_across_trees(paths, [self] + list(trees), require_versioned)
580
def iter_children(self, file_id):
581
entry = self.iter_entries_by_dir([file_id]).next()[1]
582
for child in getattr(entry, 'children', {}).itervalues():
588
def revision_tree(self, revision_id):
589
"""Obtain a revision tree for the revision revision_id.
591
The intention of this method is to allow access to possibly cached
592
tree data. Implementors of this method should raise NoSuchRevision if
593
the tree is not locally available, even if they could obtain the
594
tree via a repository or some other means. Callers are responsible
595
for finding the ultimate source for a revision tree.
597
:param revision_id: The revision_id of the requested tree.
599
:raises: NoSuchRevision if the tree cannot be obtained.
601
raise errors.NoSuchRevisionInTree(self, revision_id)
604
"""What files are present in this tree and unknown.
606
:return: an iterator over the unknown files.
106
def print_file(self, file_id):
107
"""Print file with id `file_id` to stdout."""
109
sys.stdout.write(self.get_file_text(file_id))
112
def export(self, dest, format='dir', root=None):
113
"""Export this tree."""
115
exporter = exporters[format]
117
from bzrlib.errors import BzrCommandError
118
raise BzrCommandError("export format %r not supported" % format)
119
exporter(self, dest, root)
123
class RevisionTree(Tree):
124
"""Tree viewing a previous revision.
126
File text can be retrieved from the text store.
128
TODO: Some kind of `__repr__` method, but a good one
129
probably means knowing the branch and revision number,
130
or at least passing a description to the constructor.
133
def __init__(self, weave_store, inv, revision_id):
134
self._weave_store = weave_store
135
self._inventory = inv
136
self._revision_id = revision_id
138
def get_weave(self, file_id):
139
# FIXME: RevisionTree should be given a branch
140
# not a store, or the store should know the branch.
141
import bzrlib.transactions as transactions
142
return self._weave_store.get_weave(file_id,
143
transactions.PassThroughTransaction())
146
def get_file_lines(self, file_id):
147
ie = self._inventory[file_id]
148
weave = self.get_weave(file_id)
149
return weave.get(ie.revision)
152
def get_file_text(self, file_id):
153
return ''.join(self.get_file_lines(file_id))
156
def get_file(self, file_id):
157
return StringIO(self.get_file_text(file_id))
159
def get_file_size(self, file_id):
160
return self._inventory[file_id].text_size
162
def get_file_sha1(self, file_id):
163
ie = self._inventory[file_id]
164
if ie.kind == "file":
167
def is_executable(self, file_id):
168
ie = self._inventory[file_id]
169
if ie.kind != "file":
171
return self._inventory[file_id].executable
173
def has_filename(self, filename):
174
return bool(self.inventory.path2id(filename))
176
def list_files(self):
177
# The only files returned by this are those from the version
178
for path, entry in self.inventory.iter_entries():
179
yield path, 'V', entry.kind, entry.file_id, entry
181
def get_symlink_target(self, file_id):
182
ie = self._inventory[file_id]
183
return ie.symlink_target;
185
def kind(self, file_id):
186
return self._inventory[file_id].kind
189
class EmptyTree(Tree):
191
self._inventory = Inventory()
193
def get_symlink_target(self, file_id):
196
def has_filename(self, filename):
199
def kind(self, file_id):
200
assert self._inventory[file_id].kind == "root_directory"
201
return "root_directory"
203
def list_files(self):
613
def filter_unversioned_files(self, paths):
614
"""Filter out paths that are versioned.
616
:return: set of paths.
618
# NB: we specifically *don't* call self.has_filename, because for
619
# WorkingTrees that can indicate files that exist on disk but that
621
pred = self.inventory.has_filename
622
return set((p for p in paths if not pred(p)))
624
def walkdirs(self, prefix=""):
625
"""Walk the contents of this tree from path down.
627
This yields all the data about the contents of a directory at a time.
628
After each directory has been yielded, if the caller has mutated the
629
list to exclude some directories, they are then not descended into.
631
The data yielded is of the form:
632
((directory-relpath, directory-path-from-root, directory-fileid),
633
[(relpath, basename, kind, lstat, path_from_tree_root, file_id,
634
versioned_kind), ...]),
635
- directory-relpath is the containing dirs relpath from prefix
636
- directory-path-from-root is the containing dirs path from /
637
- directory-fileid is the id of the directory if it is versioned.
638
- relpath is the relative path within the subtree being walked.
639
- basename is the basename
640
- kind is the kind of the file now. If unknonwn then the file is not
641
present within the tree - but it may be recorded as versioned. See
643
- lstat is the stat data *if* the file was statted.
644
- path_from_tree_root is the path from the root of the tree.
645
- file_id is the file_id if the entry is versioned.
646
- versioned_kind is the kind of the file as last recorded in the
647
versioning system. If 'unknown' the file is not versioned.
648
One of 'kind' and 'versioned_kind' must not be 'unknown'.
650
:param prefix: Start walking from prefix within the tree rather than
651
at the root. This allows one to walk a subtree but get paths that are
652
relative to a tree rooted higher up.
653
:return: an iterator over the directory data.
655
raise NotImplementedError(self.walkdirs)
657
def supports_content_filtering(self):
660
def _content_filter_stack(self, path=None, file_id=None):
661
"""The stack of content filters for a path if filtering is supported.
663
Readers will be applied in first-to-last order.
664
Writers will be applied in last-to-first order.
665
Either the path or the file-id needs to be provided.
667
:param path: path relative to the root of the tree
669
:param file_id: file_id or None if unknown
670
:return: the list of filters - [] if there are none
672
filter_pref_names = filters._get_registered_names()
673
if len(filter_pref_names) == 0:
676
path = self.id2path(file_id)
677
prefs = self.iter_search_rules([path], filter_pref_names).next()
678
stk = filters._get_filter_stack_for(prefs)
679
if 'filters' in debug.debug_flags:
680
note("*** %s content-filter: %s => %r" % (path,prefs,stk))
683
def _content_filter_stack_provider(self):
684
"""A function that returns a stack of ContentFilters.
686
The function takes a path (relative to the top of the tree) and a
687
file-id as parameters.
689
:return: None if content filtering is not supported by this tree.
691
if self.supports_content_filtering():
692
return lambda path, file_id: \
693
self._content_filter_stack(path, file_id)
697
def iter_search_rules(self, path_names, pref_names=None,
698
_default_searcher=None):
699
"""Find the preferences for filenames in a tree.
701
:param path_names: an iterable of paths to find attributes for.
702
Paths are given relative to the root of the tree.
703
:param pref_names: the list of preferences to lookup - None for all
704
:param _default_searcher: private parameter to assist testing - don't use
705
:return: an iterator of tuple sequences, one per path-name.
706
See _RulesSearcher.get_items for details on the tuple sequence.
708
if _default_searcher is None:
709
_default_searcher = rules._per_user_searcher
710
searcher = self._get_rules_searcher(_default_searcher)
711
if searcher is not None:
712
if pref_names is not None:
713
for path in path_names:
714
yield searcher.get_selected_items(path, pref_names)
716
for path in path_names:
717
yield searcher.get_items(path)
719
def _get_rules_searcher(self, default_searcher):
720
"""Get the RulesSearcher for this tree given the default one."""
721
searcher = default_searcher
206
def __contains__(self, file_id):
207
return file_id in self._inventory
209
def get_file_sha1(self, file_id):
210
assert self._inventory[file_id].kind == "root_directory"
725
214
######################################################################
784
274
new_name = new_inv.id2path(file_id)
785
275
if old_name != new_name:
786
276
yield (old_name, new_name)
789
def find_ids_across_trees(filenames, trees, require_versioned=True):
790
"""Find the ids corresponding to specified filenames.
792
All matches in all trees will be used, and all children of matched
793
directories will be used.
795
:param filenames: The filenames to find file_ids for (if None, returns
797
:param trees: The trees to find file_ids within
798
:param require_versioned: if true, all specified filenames must occur in
800
:return: a set of file ids for the specified filenames and their children.
804
specified_path_ids = _find_ids_across_trees(filenames, trees,
806
return _find_children_across_trees(specified_path_ids, trees)
809
def _find_ids_across_trees(filenames, trees, require_versioned):
810
"""Find the ids corresponding to specified filenames.
812
All matches in all trees will be used, but subdirectories are not scanned.
814
:param filenames: The filenames to find file_ids for
815
:param trees: The trees to find file_ids within
816
:param require_versioned: if true, all specified filenames must occur in
818
:return: a set of file ids for the specified filenames
821
interesting_ids = set()
822
for tree_path in filenames:
825
file_id = tree.path2id(tree_path)
826
if file_id is not None:
827
interesting_ids.add(file_id)
830
not_versioned.append(tree_path)
831
if len(not_versioned) > 0 and require_versioned:
832
raise errors.PathsNotVersionedError(not_versioned)
833
return interesting_ids
836
def _find_children_across_trees(specified_ids, trees):
837
"""Return a set including specified ids and their children.
839
All matches in all trees will be used.
841
:param trees: The trees to find file_ids within
842
:return: a set containing all specified ids and their children
844
interesting_ids = set(specified_ids)
845
pending = interesting_ids
846
# now handle children of interesting ids
847
# we loop so that we handle all children of each id in both trees
848
while len(pending) > 0:
850
for file_id in pending:
852
if not tree.has_or_had_id(file_id):
854
for child_id in tree.iter_children(file_id):
855
if child_id not in interesting_ids:
856
new_pending.add(child_id)
857
interesting_ids.update(new_pending)
858
pending = new_pending
859
return interesting_ids
862
class InterTree(InterObject):
863
"""This class represents operations taking place between two Trees.
865
Its instances have methods like 'compare' and contain references to the
866
source and target trees these operations are to be carried out on.
868
Clients of bzrlib should not need to use InterTree directly, rather they
869
should use the convenience methods on Tree such as 'Tree.compare()' which
870
will pass through to InterTree as appropriate.
873
# Formats that will be used to test this InterTree. If both are
874
# None, this InterTree will not be tested (e.g. because a complex
876
_matching_from_tree_format = None
877
_matching_to_tree_format = None
881
def _changes_from_entries(self, source_entry, target_entry,
882
source_path=None, target_path=None):
883
"""Generate a iter_changes tuple between source_entry and target_entry.
885
:param source_entry: An inventory entry from self.source, or None.
886
:param target_entry: An inventory entry from self.target, or None.
887
:param source_path: The path of source_entry, if known. If not known
888
it will be looked up.
889
:param target_path: The path of target_entry, if known. If not known
890
it will be looked up.
891
:return: A tuple, item 0 of which is an iter_changes result tuple, and
892
item 1 is True if there are any changes in the result tuple.
894
if source_entry is None:
895
if target_entry is None:
897
file_id = target_entry.file_id
899
file_id = source_entry.file_id
900
if source_entry is not None:
901
source_versioned = True
902
source_name = source_entry.name
903
source_parent = source_entry.parent_id
904
if source_path is None:
905
source_path = self.source.id2path(file_id)
906
source_kind, source_executable, source_stat = \
907
self.source._comparison_data(source_entry, source_path)
909
source_versioned = False
913
source_executable = None
914
if target_entry is not None:
915
target_versioned = True
916
target_name = target_entry.name
917
target_parent = target_entry.parent_id
918
if target_path is None:
919
target_path = self.target.id2path(file_id)
920
target_kind, target_executable, target_stat = \
921
self.target._comparison_data(target_entry, target_path)
923
target_versioned = False
927
target_executable = None
928
versioned = (source_versioned, target_versioned)
929
kind = (source_kind, target_kind)
930
changed_content = False
931
if source_kind != target_kind:
932
changed_content = True
933
elif source_kind == 'file':
934
if (self.source.get_file_sha1(file_id, source_path, source_stat) !=
935
self.target.get_file_sha1(file_id, target_path, target_stat)):
936
changed_content = True
937
elif source_kind == 'symlink':
938
if (self.source.get_symlink_target(file_id) !=
939
self.target.get_symlink_target(file_id)):
940
changed_content = True
941
# XXX: Yes, the indentation below is wrong. But fixing it broke
942
# test_merge.TestMergerEntriesLCAOnDisk.
943
# test_nested_tree_subtree_renamed_and_modified. We'll wait for
944
# the fix from bzr.dev -- vila 2009026
945
elif source_kind == 'tree-reference':
946
if (self.source.get_reference_revision(file_id, source_path)
947
!= self.target.get_reference_revision(file_id, target_path)):
948
changed_content = True
949
parent = (source_parent, target_parent)
950
name = (source_name, target_name)
951
executable = (source_executable, target_executable)
952
if (changed_content is not False or versioned[0] != versioned[1]
953
or parent[0] != parent[1] or name[0] != name[1] or
954
executable[0] != executable[1]):
958
return (file_id, (source_path, target_path), changed_content,
959
versioned, parent, name, kind, executable), changes
962
def compare(self, want_unchanged=False, specific_files=None,
963
extra_trees=None, require_versioned=False, include_root=False,
964
want_unversioned=False):
965
"""Return the changes from source to target.
967
:return: A TreeDelta.
968
:param specific_files: An optional list of file paths to restrict the
969
comparison to. When mapping filenames to ids, all matches in all
970
trees (including optional extra_trees) are used, and all children of
971
matched directories are included.
972
:param want_unchanged: An optional boolean requesting the inclusion of
973
unchanged entries in the result.
974
:param extra_trees: An optional list of additional trees to use when
975
mapping the contents of specific_files (paths) to file_ids.
976
:param require_versioned: An optional boolean (defaults to False). When
977
supplied and True all the 'specific_files' must be versioned, or
978
a PathsNotVersionedError will be thrown.
979
:param want_unversioned: Scan for unversioned paths.
981
trees = (self.source,)
982
if extra_trees is not None:
983
trees = trees + tuple(extra_trees)
984
# target is usually the newer tree:
985
specific_file_ids = self.target.paths2ids(specific_files, trees,
986
require_versioned=require_versioned)
987
if specific_files and not specific_file_ids:
988
# All files are unversioned, so just return an empty delta
989
# _compare_trees would think we want a complete delta
990
result = delta.TreeDelta()
991
fake_entry = InventoryFile('unused', 'unused', 'unused')
992
result.unversioned = [(path, None,
993
self.target._comparison_data(fake_entry, path)[0]) for path in
996
return delta._compare_trees(self.source, self.target, want_unchanged,
997
specific_files, include_root, extra_trees=extra_trees,
998
require_versioned=require_versioned,
999
want_unversioned=want_unversioned)
1001
def iter_changes(self, include_unchanged=False,
1002
specific_files=None, pb=None, extra_trees=[],
1003
require_versioned=True, want_unversioned=False):
1004
"""Generate an iterator of changes between trees.
1006
A tuple is returned:
1007
(file_id, (path_in_source, path_in_target),
1008
changed_content, versioned, parent, name, kind,
1011
Changed_content is True if the file's content has changed. This
1012
includes changes to its kind, and to a symlink's target.
1014
versioned, parent, name, kind, executable are tuples of (from, to).
1015
If a file is missing in a tree, its kind is None.
1017
Iteration is done in parent-to-child order, relative to the target
1020
There is no guarantee that all paths are in sorted order: the
1021
requirement to expand the search due to renames may result in children
1022
that should be found early being found late in the search, after
1023
lexically later results have been returned.
1024
:param require_versioned: Raise errors.PathsNotVersionedError if a
1025
path in the specific_files list is not versioned in one of
1026
source, target or extra_trees.
1027
:param specific_files: An optional list of file paths to restrict the
1028
comparison to. When mapping filenames to ids, all matches in all
1029
trees (including optional extra_trees) are used, and all children
1030
of matched directories are included. The parents in the target tree
1031
of the specific files up to and including the root of the tree are
1032
always evaluated for changes too.
1033
:param want_unversioned: Should unversioned files be returned in the
1034
output. An unversioned file is defined as one with (False, False)
1035
for the versioned pair.
1037
lookup_trees = [self.source]
1039
lookup_trees.extend(extra_trees)
1040
# The ids of items we need to examine to insure delta consistency.
1041
precise_file_ids = set()
1042
changed_file_ids = []
1043
if specific_files == []:
1044
specific_file_ids = []
1046
specific_file_ids = self.target.paths2ids(specific_files,
1047
lookup_trees, require_versioned=require_versioned)
1048
if specific_files is not None:
1049
# reparented or added entries must have their parents included
1050
# so that valid deltas can be created. The seen_parents set
1051
# tracks the parents that we need to have.
1052
# The seen_dirs set tracks directory entries we've yielded.
1053
# After outputting version object in to_entries we set difference
1054
# the two seen sets and start checking parents.
1055
seen_parents = set()
1057
if want_unversioned:
1058
all_unversioned = sorted([(p.split('/'), p) for p in
1059
self.target.extras()
1060
if specific_files is None or
1061
osutils.is_inside_any(specific_files, p)])
1062
all_unversioned = deque(all_unversioned)
1064
all_unversioned = deque()
1066
from_entries_by_dir = list(self.source.iter_entries_by_dir(
1067
specific_file_ids=specific_file_ids))
1068
from_data = dict((e.file_id, (p, e)) for p, e in from_entries_by_dir)
1069
to_entries_by_dir = list(self.target.iter_entries_by_dir(
1070
specific_file_ids=specific_file_ids))
1071
num_entries = len(from_entries_by_dir) + len(to_entries_by_dir)
1073
# the unversioned path lookup only occurs on real trees - where there
1074
# can be extras. So the fake_entry is solely used to look up
1075
# executable it values when execute is not supported.
1076
fake_entry = InventoryFile('unused', 'unused', 'unused')
1077
for target_path, target_entry in to_entries_by_dir:
1078
while (all_unversioned and
1079
all_unversioned[0][0] < target_path.split('/')):
1080
unversioned_path = all_unversioned.popleft()
1081
target_kind, target_executable, target_stat = \
1082
self.target._comparison_data(fake_entry, unversioned_path[1])
1083
yield (None, (None, unversioned_path[1]), True, (False, False),
1085
(None, unversioned_path[0][-1]),
1086
(None, target_kind),
1087
(None, target_executable))
1088
source_path, source_entry = from_data.get(target_entry.file_id,
1090
result, changes = self._changes_from_entries(source_entry,
1091
target_entry, source_path=source_path, target_path=target_path)
1092
to_paths[result[0]] = result[1][1]
1097
pb.update('comparing files', entry_count, num_entries)
1098
if changes or include_unchanged:
1099
if specific_file_ids is not None:
1100
new_parent_id = result[4][1]
1101
precise_file_ids.add(new_parent_id)
1102
changed_file_ids.append(result[0])
1104
# Ensure correct behaviour for reparented/added specific files.
1105
if specific_files is not None:
1106
# Record output dirs
1107
if result[6][1] == 'directory':
1108
seen_dirs.add(result[0])
1109
# Record parents of reparented/added entries.
1110
versioned = result[3]
1112
if not versioned[0] or parents[0] != parents[1]:
1113
seen_parents.add(parents[1])
1114
while all_unversioned:
1115
# yield any trailing unversioned paths
1116
unversioned_path = all_unversioned.popleft()
1117
to_kind, to_executable, to_stat = \
1118
self.target._comparison_data(fake_entry, unversioned_path[1])
1119
yield (None, (None, unversioned_path[1]), True, (False, False),
1121
(None, unversioned_path[0][-1]),
1123
(None, to_executable))
1124
# Yield all remaining source paths
1125
for path, from_entry in from_entries_by_dir:
1126
file_id = from_entry.file_id
1127
if file_id in to_paths:
1130
if file_id not in self.target.all_file_ids():
1131
# common case - paths we have not emitted are not present in
1135
to_path = self.target.id2path(file_id)
1138
pb.update('comparing files', entry_count, num_entries)
1139
versioned = (True, False)
1140
parent = (from_entry.parent_id, None)
1141
name = (from_entry.name, None)
1142
from_kind, from_executable, stat_value = \
1143
self.source._comparison_data(from_entry, path)
1144
kind = (from_kind, None)
1145
executable = (from_executable, None)
1146
changed_content = from_kind is not None
1147
# the parent's path is necessarily known at this point.
1148
changed_file_ids.append(file_id)
1149
yield(file_id, (path, to_path), changed_content, versioned, parent,
1150
name, kind, executable)
1151
changed_file_ids = set(changed_file_ids)
1152
if specific_file_ids is not None:
1153
for result in self._handle_precise_ids(precise_file_ids,
1157
def _get_entry(self, tree, file_id):
1158
"""Get an inventory entry from a tree, with missing entries as None.
1160
If the tree raises NotImplementedError on accessing .inventory, then
1161
this is worked around using iter_entries_by_dir on just the file id
1164
:param tree: The tree to lookup the entry in.
1165
:param file_id: The file_id to lookup.
1168
inventory = tree.inventory
1169
except NotImplementedError:
1170
# No inventory available.
1172
iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
1173
return iterator.next()[1]
1174
except StopIteration:
1178
return inventory[file_id]
1179
except errors.NoSuchId:
1182
def _handle_precise_ids(self, precise_file_ids, changed_file_ids,
1183
discarded_changes=None):
1184
"""Fill out a partial iter_changes to be consistent.
1186
:param precise_file_ids: The file ids of parents that were seen during
1188
:param changed_file_ids: The file ids of already emitted items.
1189
:param discarded_changes: An optional dict of precalculated
1190
iter_changes items which the partial iter_changes had not output
1192
:return: A generator of iter_changes items to output.
1194
# process parents of things that had changed under the users
1195
# requested paths to prevent incorrect paths or parent ids which
1196
# aren't in the tree.
1197
while precise_file_ids:
1198
precise_file_ids.discard(None)
1199
# Don't emit file_ids twice
1200
precise_file_ids.difference_update(changed_file_ids)
1201
if not precise_file_ids:
1203
# If the there was something at a given output path in source, we
1204
# have to include the entry from source in the delta, or we would
1205
# be putting this entry into a used path.
1207
for parent_id in precise_file_ids:
1209
paths.append(self.target.id2path(parent_id))
1210
except errors.NoSuchId:
1211
# This id has been dragged in from the source by delta
1212
# expansion and isn't present in target at all: we don't
1213
# need to check for path collisions on it.
1216
old_id = self.source.path2id(path)
1217
precise_file_ids.add(old_id)
1218
precise_file_ids.discard(None)
1219
current_ids = precise_file_ids
1220
precise_file_ids = set()
1221
# We have to emit all of precise_file_ids that have been altered.
1222
# We may have to output the children of some of those ids if any
1223
# directories have stopped being directories.
1224
for file_id in current_ids:
1226
if discarded_changes:
1227
result = discarded_changes.get(file_id)
1232
old_entry = self._get_entry(self.source, file_id)
1233
new_entry = self._get_entry(self.target, file_id)
1234
result, changes = self._changes_from_entries(
1235
old_entry, new_entry)
1238
# Get this parents parent to examine.
1239
new_parent_id = result[4][1]
1240
precise_file_ids.add(new_parent_id)
1242
if (result[6][0] == 'directory' and
1243
result[6][1] != 'directory'):
1244
# This stopped being a directory, the old children have
1246
if old_entry is None:
1247
# Reusing a discarded change.
1248
old_entry = self._get_entry(self.source, file_id)
1249
for child in old_entry.children.values():
1250
precise_file_ids.add(child.file_id)
1251
changed_file_ids.add(result[0])
1255
class MultiWalker(object):
1256
"""Walk multiple trees simultaneously, getting combined results."""
1258
# Note: This could be written to not assume you can do out-of-order
1259
# lookups. Instead any nodes that don't match in all trees could be
1260
# marked as 'deferred', and then returned in the final cleanup loop.
1261
# For now, I think it is "nicer" to return things as close to the
1262
# "master_tree" order as we can.
1264
def __init__(self, master_tree, other_trees):
1265
"""Create a new MultiWalker.
1267
All trees being walked must implement "iter_entries_by_dir()", such
1268
that they yield (path, object) tuples, where that object will have a
1269
'.file_id' member, that can be used to check equality.
1271
:param master_tree: All trees will be 'slaved' to the master_tree such
1272
that nodes in master_tree will be used as 'first-pass' sync points.
1273
Any nodes that aren't in master_tree will be merged in a second
1275
:param other_trees: A list of other trees to walk simultaneously.
1277
self._master_tree = master_tree
1278
self._other_trees = other_trees
1280
# Keep track of any nodes that were properly processed just out of
1281
# order, that way we don't return them at the end, we don't have to
1282
# track *all* processed file_ids, just the out-of-order ones
1283
self._out_of_order_processed = set()
1286
def _step_one(iterator):
1287
"""Step an iter_entries_by_dir iterator.
1289
:return: (has_more, path, ie)
1290
If has_more is False, path and ie will be None.
1293
path, ie = iterator.next()
1294
except StopIteration:
1295
return False, None, None
1297
return True, path, ie
1300
def _cmp_path_by_dirblock(path1, path2):
1301
"""Compare two paths based on what directory they are in.
1303
This generates a sort order, such that all children of a directory are
1304
sorted together, and grandchildren are in the same order as the
1305
children appear. But all grandchildren come after all children.
1307
:param path1: first path
1308
:param path2: the second path
1309
:return: negative number if ``path1`` comes first,
1310
0 if paths are equal
1311
and a positive number if ``path2`` sorts first
1313
# Shortcut this special case
1316
# This is stolen from _dirstate_helpers_py.py, only switching it to
1317
# Unicode objects. Consider using encode_utf8() and then using the
1318
# optimized versions, or maybe writing optimized unicode versions.
1319
if not isinstance(path1, unicode):
1320
raise TypeError("'path1' must be a unicode string, not %s: %r"
1321
% (type(path1), path1))
1322
if not isinstance(path2, unicode):
1323
raise TypeError("'path2' must be a unicode string, not %s: %r"
1324
% (type(path2), path2))
1325
return cmp(MultiWalker._path_to_key(path1),
1326
MultiWalker._path_to_key(path2))
1329
def _path_to_key(path):
1330
dirname, basename = osutils.split(path)
1331
return (dirname.split(u'/'), basename)
1333
def _lookup_by_file_id(self, extra_entries, other_tree, file_id):
1334
"""Lookup an inventory entry by file_id.
1336
This is called when an entry is missing in the normal order.
1337
Generally this is because a file was either renamed, or it was
1338
deleted/added. If the entry was found in the inventory and not in
1339
extra_entries, it will be added to self._out_of_order_processed
1341
:param extra_entries: A dictionary of {file_id: (path, ie)}. This
1342
should be filled with entries that were found before they were
1343
used. If file_id is present, it will be removed from the
1345
:param other_tree: The Tree to search, in case we didn't find the entry
1347
:param file_id: The file_id to look for
1348
:return: (path, ie) if found or (None, None) if not present.
1350
if file_id in extra_entries:
1351
return extra_entries.pop(file_id)
1352
# TODO: Is id2path better as the first call, or is
1353
# inventory[file_id] better as a first check?
1355
cur_path = other_tree.id2path(file_id)
1356
except errors.NoSuchId:
1358
if cur_path is None:
1361
self._out_of_order_processed.add(file_id)
1362
cur_ie = other_tree.inventory[file_id]
1363
return (cur_path, cur_ie)
1366
"""Match up the values in the different trees."""
1367
for result in self._walk_master_tree():
1369
self._finish_others()
1370
for result in self._walk_others():
1373
def _walk_master_tree(self):
1374
"""First pass, walk all trees in lock-step.
1376
When we are done, all nodes in the master_tree will have been
1377
processed. _other_walkers, _other_entries, and _others_extra will be
1378
set on 'self' for future processing.
1380
# This iterator has the most "inlining" done, because it tends to touch
1381
# every file in the tree, while the others only hit nodes that don't
1383
master_iterator = self._master_tree.iter_entries_by_dir()
1385
other_walkers = [other.iter_entries_by_dir()
1386
for other in self._other_trees]
1387
other_entries = [self._step_one(walker) for walker in other_walkers]
1388
# Track extra nodes in the other trees
1389
others_extra = [{} for i in xrange(len(self._other_trees))]
1391
master_has_more = True
1392
step_one = self._step_one
1393
lookup_by_file_id = self._lookup_by_file_id
1394
out_of_order_processed = self._out_of_order_processed
1396
while master_has_more:
1397
(master_has_more, path, master_ie) = step_one(master_iterator)
1398
if not master_has_more:
1401
file_id = master_ie.file_id
1403
other_values_append = other_values.append
1404
next_other_entries = []
1405
next_other_entries_append = next_other_entries.append
1406
for idx, (other_has_more, other_path, other_ie) in enumerate(other_entries):
1407
if not other_has_more:
1408
other_values_append(lookup_by_file_id(
1409
others_extra[idx], self._other_trees[idx], file_id))
1410
next_other_entries_append((False, None, None))
1411
elif file_id == other_ie.file_id:
1412
# This is the critical code path, as most of the entries
1413
# should match between most trees.
1414
other_values_append((other_path, other_ie))
1415
next_other_entries_append(step_one(other_walkers[idx]))
1417
# This walker did not match, step it until it either
1418
# matches, or we know we are past the current walker.
1419
other_walker = other_walkers[idx]
1420
other_extra = others_extra[idx]
1421
while (other_has_more and
1422
self._cmp_path_by_dirblock(other_path, path) < 0):
1423
other_file_id = other_ie.file_id
1424
if other_file_id not in out_of_order_processed:
1425
other_extra[other_file_id] = (other_path, other_ie)
1426
other_has_more, other_path, other_ie = \
1427
step_one(other_walker)
1428
if other_has_more and other_ie.file_id == file_id:
1429
# We ended up walking to this point, match and step
1431
other_values_append((other_path, other_ie))
1432
other_has_more, other_path, other_ie = \
1433
step_one(other_walker)
1435
# This record isn't in the normal order, see if it
1437
other_values_append(lookup_by_file_id(
1438
other_extra, self._other_trees[idx], file_id))
1439
next_other_entries_append((other_has_more, other_path,
1441
other_entries = next_other_entries
1443
# We've matched all the walkers, yield this datapoint
1444
yield path, file_id, master_ie, other_values
1445
self._other_walkers = other_walkers
1446
self._other_entries = other_entries
1447
self._others_extra = others_extra
1449
def _finish_others(self):
1450
"""Finish walking the other iterators, so we get all entries."""
1451
for idx, info in enumerate(self._other_entries):
1452
other_extra = self._others_extra[idx]
1453
(other_has_more, other_path, other_ie) = info
1454
while other_has_more:
1455
other_file_id = other_ie.file_id
1456
if other_file_id not in self._out_of_order_processed:
1457
other_extra[other_file_id] = (other_path, other_ie)
1458
other_has_more, other_path, other_ie = \
1459
self._step_one(self._other_walkers[idx])
1460
del self._other_entries
1462
def _walk_others(self):
1463
"""Finish up by walking all the 'deferred' nodes."""
1464
# TODO: One alternative would be to grab all possible unprocessed
1465
# file_ids, and then sort by path, and then yield them. That
1466
# might ensure better ordering, in case a caller strictly
1467
# requires parents before children.
1468
for idx, other_extra in enumerate(self._others_extra):
1469
others = sorted(other_extra.itervalues(),
1470
key=lambda x: self._path_to_key(x[0]))
1471
for other_path, other_ie in others:
1472
file_id = other_ie.file_id
1473
# We don't need to check out_of_order_processed here, because
1474
# the lookup_by_file_id will be removing anything processed
1475
# from the extras cache
1476
other_extra.pop(file_id)
1477
other_values = [(None, None) for i in xrange(idx)]
1478
other_values.append((other_path, other_ie))
1479
for alt_idx, alt_extra in enumerate(self._others_extra[idx+1:]):
1480
alt_idx = alt_idx + idx + 1
1481
alt_extra = self._others_extra[alt_idx]
1482
alt_tree = self._other_trees[alt_idx]
1483
other_values.append(self._lookup_by_file_id(
1484
alt_extra, alt_tree, file_id))
1485
yield other_path, file_id, None, other_values
280
######################################################################
283
def dir_exporter(tree, dest, root):
284
"""Export this tree to a new directory.
286
`dest` should not exist, and will be created holding the
287
contents of this tree.
289
TODO: To handle subdirectories we need to create the
292
:note: If the export fails, the destination directory will be
293
left in a half-assed state.
297
mutter('export version %r' % tree)
299
for dp, ie in inv.iter_entries():
300
ie.put_on_disk(dest, dp, tree)
302
exporters['dir'] = dir_exporter
309
def get_root_name(dest):
310
"""Get just the root name for a tarball.
312
>>> get_root_name('mytar.tar')
314
>>> get_root_name('mytar.tar.bz2')
316
>>> get_root_name('tar.tar.tar.tgz')
318
>>> get_root_name('bzr-0.0.5.tar.gz')
320
>>> get_root_name('a/long/path/mytar.tgz')
322
>>> get_root_name('../parent/../dir/other.tbz2')
325
endings = ['.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tbz2']
326
dest = os.path.basename(dest)
328
if dest.endswith(end):
329
return dest[:-len(end)]
331
def tar_exporter(tree, dest, root, compression=None):
332
"""Export this tree to a new tar file.
334
`dest` will be created holding the contents of this tree; if it
335
already exists, it will be clobbered, like with "tar -c".
337
from time import time
339
compression = str(compression or '')
341
root = get_root_name(dest)
343
ball = tarfile.open(dest, 'w:' + compression)
344
except tarfile.CompressionError, e:
345
raise BzrError(str(e))
346
mutter('export version %r' % tree)
348
for dp, ie in inv.iter_entries():
349
mutter(" export {%s} kind %s to %s" % (ie.file_id, ie.kind, dest))
350
item, fileobj = ie.get_tar_item(root, dp, now, tree)
351
ball.addfile(item, fileobj)
354
exporters['tar'] = tar_exporter
356
def tgz_exporter(tree, dest, root):
357
tar_exporter(tree, dest, root, compression='gz')
358
exporters['tgz'] = tgz_exporter
360
def tbz_exporter(tree, dest, root):
361
tar_exporter(tree, dest, root, compression='bz2')
362
exporters['tbz2'] = tbz_exporter