1
# Copyright (C) 2006-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from stat import S_ISREG, S_IEXEC
27
lazy_import.lazy_import(globals(), """
38
revision as _mod_revision,
43
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
44
ReusingTransform, CantMoveRoot,
45
ExistingLimbo, ImmortalLimbo, NoFinalPath,
47
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
48
from bzrlib.inventory import InventoryEntry
49
from bzrlib.osutils import (
59
from bzrlib.progress import ProgressPhase
60
from bzrlib.symbol_versioning import (
65
from bzrlib.trace import mutter, warning
66
from bzrlib import tree
68
import bzrlib.urlutils as urlutils
71
ROOT_PARENT = "root-parent"
73
def unique_add(map, key, value):
75
raise DuplicateKey(key=key)
80
class _TransformResults(object):
81
def __init__(self, modified_paths, rename_count):
83
self.modified_paths = modified_paths
84
self.rename_count = rename_count
87
class TreeTransformBase(object):
88
"""The base class for TreeTransform and its kin."""
90
def __init__(self, tree, pb=None,
94
:param tree: The tree that will be transformed, but not necessarily
97
:param case_sensitive: If True, the target of the transform is
98
case sensitive, not just case preserving.
100
object.__init__(self)
103
# mapping of trans_id -> new basename
105
# mapping of trans_id -> new parent trans_id
106
self._new_parent = {}
107
# mapping of trans_id with new contents -> new file_kind
108
self._new_contents = {}
109
# Set of trans_ids whose contents will be removed
110
self._removed_contents = set()
111
# Mapping of trans_id -> new execute-bit value
112
self._new_executability = {}
113
# Mapping of trans_id -> new tree-reference value
114
self._new_reference_revision = {}
115
# Mapping of trans_id -> new file_id
117
# Mapping of old file-id -> trans_id
118
self._non_present_ids = {}
119
# Mapping of new file_id -> trans_id
121
# Set of trans_ids that will be removed
122
self._removed_id = set()
123
# Mapping of path in old tree -> trans_id
124
self._tree_path_ids = {}
125
# Mapping trans_id -> path in old tree
126
self._tree_id_paths = {}
127
# The trans_id that will be used as the tree root
128
root_id = tree.get_root_id()
129
if root_id is not None:
130
self._new_root = self.trans_id_tree_file_id(root_id)
132
self._new_root = None
133
# Indictor of whether the transform has been applied
137
# Whether the target is case sensitive
138
self._case_sensitive_target = case_sensitive
139
# A counter of how many files have been renamed
140
self.rename_count = 0
143
"""Release the working tree lock, if held.
145
This is required if apply has not been invoked, but can be invoked
148
if self._tree is None:
153
def __get_root(self):
154
return self._new_root
156
root = property(__get_root)
158
def _assign_id(self):
159
"""Produce a new tranform id"""
160
new_id = "new-%s" % self._id_number
164
def create_path(self, name, parent):
165
"""Assign a transaction id to a new path"""
166
trans_id = self._assign_id()
167
unique_add(self._new_name, trans_id, name)
168
unique_add(self._new_parent, trans_id, parent)
171
def adjust_path(self, name, parent, trans_id):
172
"""Change the path that is assigned to a transaction id."""
174
raise ValueError("Parent trans-id may not be None")
175
if trans_id == self._new_root:
177
self._new_name[trans_id] = name
178
self._new_parent[trans_id] = parent
180
def adjust_root_path(self, name, parent):
181
"""Emulate moving the root by moving all children, instead.
183
We do this by undoing the association of root's transaction id with the
184
current tree. This allows us to create a new directory with that
185
transaction id. We unversion the root directory and version the
186
physically new directory, and hope someone versions the tree root
189
old_root = self._new_root
190
old_root_file_id = self.final_file_id(old_root)
191
# force moving all children of root
192
for child_id in self.iter_tree_children(old_root):
193
if child_id != parent:
194
self.adjust_path(self.final_name(child_id),
195
self.final_parent(child_id), child_id)
196
file_id = self.final_file_id(child_id)
197
if file_id is not None:
198
self.unversion_file(child_id)
199
self.version_file(file_id, child_id)
201
# the physical root needs a new transaction id
202
self._tree_path_ids.pop("")
203
self._tree_id_paths.pop(old_root)
204
self._new_root = self.trans_id_tree_file_id(self._tree.get_root_id())
205
if parent == old_root:
206
parent = self._new_root
207
self.adjust_path(name, parent, old_root)
208
self.create_directory(old_root)
209
self.version_file(old_root_file_id, old_root)
210
self.unversion_file(self._new_root)
212
def fixup_new_roots(self):
213
"""Reinterpret requests to change the root directory
215
Instead of creating a root directory, or moving an existing directory,
216
all the attributes and children of the new root are applied to the
217
existing root directory.
219
This means that the old root trans-id becomes obsolete, so it is
220
recommended only to invoke this after the root trans-id has become
223
new_roots = [k for k, v in self._new_parent.iteritems() if v is
225
if len(new_roots) < 1:
227
if len(new_roots) != 1:
228
raise ValueError('A tree cannot have two roots!')
229
if self._new_root is None:
230
self._new_root = new_roots[0]
232
old_new_root = new_roots[0]
233
# TODO: What to do if a old_new_root is present, but self._new_root is
234
# not listed as being removed? This code explicitly unversions
235
# the old root and versions it with the new file_id. Though that
236
# seems like an incomplete delta
238
# unversion the new root's directory.
239
file_id = self.final_file_id(old_new_root)
240
if old_new_root in self._new_id:
241
self.cancel_versioning(old_new_root)
243
self.unversion_file(old_new_root)
244
# if, at this stage, root still has an old file_id, zap it so we can
245
# stick a new one in.
246
if (self.tree_file_id(self._new_root) is not None and
247
self._new_root not in self._removed_id):
248
self.unversion_file(self._new_root)
249
self.version_file(file_id, self._new_root)
251
# Now move children of new root into old root directory.
252
# Ensure all children are registered with the transaction, but don't
253
# use directly-- some tree children have new parents
254
list(self.iter_tree_children(old_new_root))
255
# Move all children of new root into old root directory.
256
for child in self.by_parent().get(old_new_root, []):
257
self.adjust_path(self.final_name(child), self._new_root, child)
259
# Ensure old_new_root has no directory.
260
if old_new_root in self._new_contents:
261
self.cancel_creation(old_new_root)
263
self.delete_contents(old_new_root)
265
# prevent deletion of root directory.
266
if self._new_root in self._removed_contents:
267
self.cancel_deletion(self._new_root)
269
# destroy path info for old_new_root.
270
del self._new_parent[old_new_root]
271
del self._new_name[old_new_root]
273
def trans_id_tree_file_id(self, inventory_id):
274
"""Determine the transaction id of a working tree file.
276
This reflects only files that already exist, not ones that will be
277
added by transactions.
279
if inventory_id is None:
280
raise ValueError('None is not a valid file id')
281
path = self._tree.id2path(inventory_id)
282
return self.trans_id_tree_path(path)
284
def trans_id_file_id(self, file_id):
285
"""Determine or set the transaction id associated with a file ID.
286
A new id is only created for file_ids that were never present. If
287
a transaction has been unversioned, it is deliberately still returned.
288
(this will likely lead to an unversioned parent conflict.)
291
raise ValueError('None is not a valid file id')
292
if file_id in self._r_new_id and self._r_new_id[file_id] is not None:
293
return self._r_new_id[file_id]
296
self._tree.iter_entries_by_dir([file_id]).next()
297
except StopIteration:
298
if file_id in self._non_present_ids:
299
return self._non_present_ids[file_id]
301
trans_id = self._assign_id()
302
self._non_present_ids[file_id] = trans_id
305
return self.trans_id_tree_file_id(file_id)
307
def trans_id_tree_path(self, path):
308
"""Determine (and maybe set) the transaction ID for a tree path."""
309
path = self.canonical_path(path)
310
if path not in self._tree_path_ids:
311
self._tree_path_ids[path] = self._assign_id()
312
self._tree_id_paths[self._tree_path_ids[path]] = path
313
return self._tree_path_ids[path]
315
def get_tree_parent(self, trans_id):
316
"""Determine id of the parent in the tree."""
317
path = self._tree_id_paths[trans_id]
320
return self.trans_id_tree_path(os.path.dirname(path))
322
def delete_contents(self, trans_id):
323
"""Schedule the contents of a path entry for deletion"""
324
kind = self.tree_kind(trans_id)
326
self._removed_contents.add(trans_id)
328
def cancel_deletion(self, trans_id):
329
"""Cancel a scheduled deletion"""
330
self._removed_contents.remove(trans_id)
332
def unversion_file(self, trans_id):
333
"""Schedule a path entry to become unversioned"""
334
self._removed_id.add(trans_id)
336
def delete_versioned(self, trans_id):
337
"""Delete and unversion a versioned file"""
338
self.delete_contents(trans_id)
339
self.unversion_file(trans_id)
341
def set_executability(self, executability, trans_id):
342
"""Schedule setting of the 'execute' bit
343
To unschedule, set to None
345
if executability is None:
346
del self._new_executability[trans_id]
348
unique_add(self._new_executability, trans_id, executability)
350
def set_tree_reference(self, revision_id, trans_id):
351
"""Set the reference associated with a directory"""
352
unique_add(self._new_reference_revision, trans_id, revision_id)
354
def version_file(self, file_id, trans_id):
355
"""Schedule a file to become versioned."""
358
unique_add(self._new_id, trans_id, file_id)
359
unique_add(self._r_new_id, file_id, trans_id)
361
def cancel_versioning(self, trans_id):
362
"""Undo a previous versioning of a file"""
363
file_id = self._new_id[trans_id]
364
del self._new_id[trans_id]
365
del self._r_new_id[file_id]
367
def new_paths(self, filesystem_only=False):
368
"""Determine the paths of all new and changed files.
370
:param filesystem_only: if True, only calculate values for files
371
that require renames or execute bit changes.
375
stale_ids = self._needs_rename.difference(self._new_name)
376
stale_ids.difference_update(self._new_parent)
377
stale_ids.difference_update(self._new_contents)
378
stale_ids.difference_update(self._new_id)
379
needs_rename = self._needs_rename.difference(stale_ids)
380
id_sets = (needs_rename, self._new_executability)
382
id_sets = (self._new_name, self._new_parent, self._new_contents,
383
self._new_id, self._new_executability)
384
for id_set in id_sets:
385
new_ids.update(id_set)
386
return sorted(FinalPaths(self).get_paths(new_ids))
388
def _inventory_altered(self):
389
"""Get the trans_ids and paths of files needing new inv entries."""
391
for id_set in [self._new_name, self._new_parent, self._new_id,
392
self._new_executability]:
393
new_ids.update(id_set)
394
changed_kind = set(self._removed_contents)
395
changed_kind.intersection_update(self._new_contents)
396
changed_kind.difference_update(new_ids)
397
changed_kind = (t for t in changed_kind
398
if self.tree_kind(t) != self.final_kind(t))
399
new_ids.update(changed_kind)
400
return sorted(FinalPaths(self).get_paths(new_ids))
402
def final_kind(self, trans_id):
403
"""Determine the final file kind, after any changes applied.
405
:return: None if the file does not exist/has no contents. (It is
406
conceivable that a path would be created without the corresponding
407
contents insertion command)
409
if trans_id in self._new_contents:
410
return self._new_contents[trans_id]
411
elif trans_id in self._removed_contents:
414
return self.tree_kind(trans_id)
416
def tree_file_id(self, trans_id):
417
"""Determine the file id associated with the trans_id in the tree"""
419
path = self._tree_id_paths[trans_id]
421
# the file is a new, unversioned file, or invalid trans_id
423
# the file is old; the old id is still valid
424
if self._new_root == trans_id:
425
return self._tree.get_root_id()
426
return self._tree.path2id(path)
428
def final_file_id(self, trans_id):
429
"""Determine the file id after any changes are applied, or None.
431
None indicates that the file will not be versioned after changes are
435
return self._new_id[trans_id]
437
if trans_id in self._removed_id:
439
return self.tree_file_id(trans_id)
441
def inactive_file_id(self, trans_id):
442
"""Return the inactive file_id associated with a transaction id.
443
That is, the one in the tree or in non_present_ids.
444
The file_id may actually be active, too.
446
file_id = self.tree_file_id(trans_id)
447
if file_id is not None:
449
for key, value in self._non_present_ids.iteritems():
450
if value == trans_id:
453
def final_parent(self, trans_id):
454
"""Determine the parent file_id, after any changes are applied.
456
ROOT_PARENT is returned for the tree root.
459
return self._new_parent[trans_id]
461
return self.get_tree_parent(trans_id)
463
def final_name(self, trans_id):
464
"""Determine the final filename, after all changes are applied."""
466
return self._new_name[trans_id]
469
return os.path.basename(self._tree_id_paths[trans_id])
471
raise NoFinalPath(trans_id, self)
474
"""Return a map of parent: children for known parents.
476
Only new paths and parents of tree files with assigned ids are used.
479
items = list(self._new_parent.iteritems())
480
items.extend((t, self.final_parent(t)) for t in
481
self._tree_id_paths.keys())
482
for trans_id, parent_id in items:
483
if parent_id not in by_parent:
484
by_parent[parent_id] = set()
485
by_parent[parent_id].add(trans_id)
488
def path_changed(self, trans_id):
489
"""Return True if a trans_id's path has changed."""
490
return (trans_id in self._new_name) or (trans_id in self._new_parent)
492
def new_contents(self, trans_id):
493
return (trans_id in self._new_contents)
495
def find_conflicts(self):
496
"""Find any violations of inventory or filesystem invariants"""
497
if self._done is True:
498
raise ReusingTransform()
500
# ensure all children of all existent parents are known
501
# all children of non-existent parents are known, by definition.
502
self._add_tree_children()
503
by_parent = self.by_parent()
504
conflicts.extend(self._unversioned_parents(by_parent))
505
conflicts.extend(self._parent_loops())
506
conflicts.extend(self._duplicate_entries(by_parent))
507
conflicts.extend(self._duplicate_ids())
508
conflicts.extend(self._parent_type_conflicts(by_parent))
509
conflicts.extend(self._improper_versioning())
510
conflicts.extend(self._executability_conflicts())
511
conflicts.extend(self._overwrite_conflicts())
514
def _check_malformed(self):
515
conflicts = self.find_conflicts()
516
if len(conflicts) != 0:
517
raise MalformedTransform(conflicts=conflicts)
519
def _add_tree_children(self):
520
"""Add all the children of all active parents to the known paths.
522
Active parents are those which gain children, and those which are
523
removed. This is a necessary first step in detecting conflicts.
525
parents = self.by_parent().keys()
526
parents.extend([t for t in self._removed_contents if
527
self.tree_kind(t) == 'directory'])
528
for trans_id in self._removed_id:
529
file_id = self.tree_file_id(trans_id)
530
if file_id is not None:
531
if self._tree.inventory[file_id].kind == 'directory':
532
parents.append(trans_id)
533
elif self.tree_kind(trans_id) == 'directory':
534
parents.append(trans_id)
536
for parent_id in parents:
537
# ensure that all children are registered with the transaction
538
list(self.iter_tree_children(parent_id))
540
@deprecated_method(deprecated_in((2, 3, 0)))
541
def has_named_child(self, by_parent, parent_id, name):
542
return self._has_named_child(
543
name, parent_id, known_children=by_parent.get(parent_id, []))
545
def _has_named_child(self, name, parent_id, known_children):
546
"""Does a parent already have a name child.
548
:param name: The searched for name.
550
:param parent_id: The parent for which the check is made.
552
:param known_children: The already known children. This should have
553
been recently obtained from `self.by_parent.get(parent_id)`
554
(or will be if None is passed).
556
if known_children is None:
557
known_children = self.by_parent().get(parent_id, [])
558
for child in known_children:
559
if self.final_name(child) == name:
561
parent_path = self._tree_id_paths.get(parent_id, None)
562
if parent_path is None:
563
# No parent... no children
565
child_path = joinpath(parent_path, name)
566
child_id = self._tree_path_ids.get(child_path, None)
568
# Not known by the tree transform yet, check the filesystem
569
return osutils.lexists(self._tree.abspath(child_path))
571
raise AssertionError('child_id is missing: %s, %s, %s'
572
% (name, parent_id, child_id))
574
def _available_backup_name(self, name, target_id):
575
"""Find an available backup name.
577
:param name: The basename of the file.
579
:param target_id: The directory trans_id where the backup should
582
known_children = self.by_parent().get(target_id, [])
583
return osutils.available_backup_name(
585
lambda base: self._has_named_child(
586
base, target_id, known_children))
588
def _parent_loops(self):
589
"""No entry should be its own ancestor"""
591
for trans_id in self._new_parent:
594
while parent_id is not ROOT_PARENT:
597
parent_id = self.final_parent(parent_id)
600
if parent_id == trans_id:
601
conflicts.append(('parent loop', trans_id))
602
if parent_id in seen:
606
def _unversioned_parents(self, by_parent):
607
"""If parent directories are versioned, children must be versioned."""
609
for parent_id, children in by_parent.iteritems():
610
if parent_id is ROOT_PARENT:
612
if self.final_file_id(parent_id) is not None:
614
for child_id in children:
615
if self.final_file_id(child_id) is not None:
616
conflicts.append(('unversioned parent', parent_id))
620
def _improper_versioning(self):
621
"""Cannot version a file with no contents, or a bad type.
623
However, existing entries with no contents are okay.
626
for trans_id in self._new_id.iterkeys():
627
kind = self.final_kind(trans_id)
629
conflicts.append(('versioning no contents', trans_id))
631
if not InventoryEntry.versionable_kind(kind):
632
conflicts.append(('versioning bad kind', trans_id, kind))
635
def _executability_conflicts(self):
636
"""Check for bad executability changes.
638
Only versioned files may have their executability set, because
639
1. only versioned entries can have executability under windows
640
2. only files can be executable. (The execute bit on a directory
641
does not indicate searchability)
644
for trans_id in self._new_executability:
645
if self.final_file_id(trans_id) is None:
646
conflicts.append(('unversioned executability', trans_id))
648
if self.final_kind(trans_id) != "file":
649
conflicts.append(('non-file executability', trans_id))
652
def _overwrite_conflicts(self):
653
"""Check for overwrites (not permitted on Win32)"""
655
for trans_id in self._new_contents:
656
if self.tree_kind(trans_id) is None:
658
if trans_id not in self._removed_contents:
659
conflicts.append(('overwrite', trans_id,
660
self.final_name(trans_id)))
663
def _duplicate_entries(self, by_parent):
664
"""No directory may have two entries with the same name."""
666
if (self._new_name, self._new_parent) == ({}, {}):
668
for children in by_parent.itervalues():
669
name_ids = [(self.final_name(t), t) for t in children]
670
if not self._case_sensitive_target:
671
name_ids = [(n.lower(), t) for n, t in name_ids]
675
for name, trans_id in name_ids:
676
kind = self.final_kind(trans_id)
677
file_id = self.final_file_id(trans_id)
678
if kind is None and file_id is None:
680
if name == last_name:
681
conflicts.append(('duplicate', last_trans_id, trans_id,
684
last_trans_id = trans_id
687
def _duplicate_ids(self):
688
"""Each inventory id may only be used once"""
690
removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in
692
all_ids = self._tree.all_file_ids()
693
active_tree_ids = all_ids.difference(removed_tree_ids)
694
for trans_id, file_id in self._new_id.iteritems():
695
if file_id in active_tree_ids:
696
old_trans_id = self.trans_id_tree_file_id(file_id)
697
conflicts.append(('duplicate id', old_trans_id, trans_id))
700
def _parent_type_conflicts(self, by_parent):
701
"""parents must have directory 'contents'."""
703
for parent_id, children in by_parent.iteritems():
704
if parent_id is ROOT_PARENT:
706
if not self._any_contents(children):
708
kind = self.final_kind(parent_id)
710
conflicts.append(('missing parent', parent_id))
711
elif kind != "directory":
712
conflicts.append(('non-directory parent', parent_id))
715
def _any_contents(self, trans_ids):
716
"""Return true if any of the trans_ids, will have contents."""
717
for trans_id in trans_ids:
718
if self.final_kind(trans_id) is not None:
722
def _set_executability(self, path, trans_id):
723
"""Set the executability of versioned files """
724
if supports_executable():
725
new_executability = self._new_executability[trans_id]
726
abspath = self._tree.abspath(path)
727
current_mode = os.stat(abspath).st_mode
728
if new_executability:
731
to_mode = current_mode | (0100 & ~umask)
732
# Enable x-bit for others only if they can read it.
733
if current_mode & 0004:
734
to_mode |= 0001 & ~umask
735
if current_mode & 0040:
736
to_mode |= 0010 & ~umask
738
to_mode = current_mode & ~0111
739
os.chmod(abspath, to_mode)
741
def _new_entry(self, name, parent_id, file_id):
742
"""Helper function to create a new filesystem entry."""
743
trans_id = self.create_path(name, parent_id)
744
if file_id is not None:
745
self.version_file(file_id, trans_id)
748
def new_file(self, name, parent_id, contents, file_id=None,
750
"""Convenience method to create files.
752
name is the name of the file to create.
753
parent_id is the transaction id of the parent directory of the file.
754
contents is an iterator of bytestrings, which will be used to produce
756
:param file_id: The inventory ID of the file, if it is to be versioned.
757
:param executable: Only valid when a file_id has been supplied.
759
trans_id = self._new_entry(name, parent_id, file_id)
760
# TODO: rather than scheduling a set_executable call,
761
# have create_file create the file with the right mode.
762
self.create_file(contents, trans_id)
763
if executable is not None:
764
self.set_executability(executable, trans_id)
767
def new_directory(self, name, parent_id, file_id=None):
768
"""Convenience method to create directories.
770
name is the name of the directory to create.
771
parent_id is the transaction id of the parent directory of the
773
file_id is the inventory ID of the directory, if it is to be versioned.
775
trans_id = self._new_entry(name, parent_id, file_id)
776
self.create_directory(trans_id)
779
def new_symlink(self, name, parent_id, target, file_id=None):
780
"""Convenience method to create symbolic link.
782
name is the name of the symlink to create.
783
parent_id is the transaction id of the parent directory of the symlink.
784
target is a bytestring of the target of the symlink.
785
file_id is the inventory ID of the file, if it is to be versioned.
787
trans_id = self._new_entry(name, parent_id, file_id)
788
self.create_symlink(target, trans_id)
791
def new_orphan(self, trans_id, parent_id):
792
"""Schedule an item to be orphaned.
794
When a directory is about to be removed, its children, if they are not
795
versioned are moved out of the way: they don't have a parent anymore.
797
:param trans_id: The trans_id of the existing item.
798
:param parent_id: The parent trans_id of the item.
800
raise NotImplementedError(self.new_orphan)
802
def _get_potential_orphans(self, dir_id):
803
"""Find the potential orphans in a directory.
805
A directory can't be safely deleted if there are versioned files in it.
806
If all the contained files are unversioned then they can be orphaned.
808
The 'None' return value means that the directory contains at least one
809
versioned file and should not be deleted.
811
:param dir_id: The directory trans id.
813
:return: A list of the orphan trans ids or None if at least one
814
versioned file is present.
817
# Find the potential orphans, stop if one item should be kept
818
for c in self.by_parent()[dir_id]:
819
if self.final_file_id(c) is None:
822
# We have a versioned file here, searching for orphans is
828
def _affected_ids(self):
829
"""Return the set of transform ids affected by the transform"""
830
trans_ids = set(self._removed_id)
831
trans_ids.update(self._new_id.keys())
832
trans_ids.update(self._removed_contents)
833
trans_ids.update(self._new_contents.keys())
834
trans_ids.update(self._new_executability.keys())
835
trans_ids.update(self._new_name.keys())
836
trans_ids.update(self._new_parent.keys())
839
def _get_file_id_maps(self):
840
"""Return mapping of file_ids to trans_ids in the to and from states"""
841
trans_ids = self._affected_ids()
844
# Build up two dicts: trans_ids associated with file ids in the
845
# FROM state, vs the TO state.
846
for trans_id in trans_ids:
847
from_file_id = self.tree_file_id(trans_id)
848
if from_file_id is not None:
849
from_trans_ids[from_file_id] = trans_id
850
to_file_id = self.final_file_id(trans_id)
851
if to_file_id is not None:
852
to_trans_ids[to_file_id] = trans_id
853
return from_trans_ids, to_trans_ids
855
def _from_file_data(self, from_trans_id, from_versioned, file_id):
856
"""Get data about a file in the from (tree) state
858
Return a (name, parent, kind, executable) tuple
860
from_path = self._tree_id_paths.get(from_trans_id)
862
# get data from working tree if versioned
863
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
864
from_name = from_entry.name
865
from_parent = from_entry.parent_id
868
if from_path is None:
869
# File does not exist in FROM state
873
# File exists, but is not versioned. Have to use path-
875
from_name = os.path.basename(from_path)
876
tree_parent = self.get_tree_parent(from_trans_id)
877
from_parent = self.tree_file_id(tree_parent)
878
if from_path is not None:
879
from_kind, from_executable, from_stats = \
880
self._tree._comparison_data(from_entry, from_path)
883
from_executable = False
884
return from_name, from_parent, from_kind, from_executable
886
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
887
"""Get data about a file in the to (target) state
889
Return a (name, parent, kind, executable) tuple
891
to_name = self.final_name(to_trans_id)
892
to_kind = self.final_kind(to_trans_id)
893
to_parent = self.final_file_id(self.final_parent(to_trans_id))
894
if to_trans_id in self._new_executability:
895
to_executable = self._new_executability[to_trans_id]
896
elif to_trans_id == from_trans_id:
897
to_executable = from_executable
899
to_executable = False
900
return to_name, to_parent, to_kind, to_executable
902
def iter_changes(self):
903
"""Produce output in the same format as Tree.iter_changes.
905
Will produce nonsensical results if invoked while inventory/filesystem
906
conflicts (as reported by TreeTransform.find_conflicts()) are present.
908
This reads the Transform, but only reproduces changes involving a
909
file_id. Files that are not versioned in either of the FROM or TO
910
states are not reflected.
912
final_paths = FinalPaths(self)
913
from_trans_ids, to_trans_ids = self._get_file_id_maps()
915
# Now iterate through all active file_ids
916
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
918
from_trans_id = from_trans_ids.get(file_id)
919
# find file ids, and determine versioning state
920
if from_trans_id is None:
921
from_versioned = False
922
from_trans_id = to_trans_ids[file_id]
924
from_versioned = True
925
to_trans_id = to_trans_ids.get(file_id)
926
if to_trans_id is None:
928
to_trans_id = from_trans_id
932
from_name, from_parent, from_kind, from_executable = \
933
self._from_file_data(from_trans_id, from_versioned, file_id)
935
to_name, to_parent, to_kind, to_executable = \
936
self._to_file_data(to_trans_id, from_trans_id, from_executable)
938
if not from_versioned:
941
from_path = self._tree_id_paths.get(from_trans_id)
945
to_path = final_paths.get_path(to_trans_id)
946
if from_kind != to_kind:
948
elif to_kind in ('file', 'symlink') and (
949
to_trans_id != from_trans_id or
950
to_trans_id in self._new_contents):
952
if (not modified and from_versioned == to_versioned and
953
from_parent==to_parent and from_name == to_name and
954
from_executable == to_executable):
956
results.append((file_id, (from_path, to_path), modified,
957
(from_versioned, to_versioned),
958
(from_parent, to_parent),
959
(from_name, to_name),
960
(from_kind, to_kind),
961
(from_executable, to_executable)))
962
return iter(sorted(results, key=lambda x:x[1]))
964
def get_preview_tree(self):
965
"""Return a tree representing the result of the transform.
967
The tree is a snapshot, and altering the TreeTransform will invalidate
970
return _PreviewTree(self)
972
def commit(self, branch, message, merge_parents=None, strict=False,
973
timestamp=None, timezone=None, committer=None, authors=None,
974
revprops=None, revision_id=None):
975
"""Commit the result of this TreeTransform to a branch.
977
:param branch: The branch to commit to.
978
:param message: The message to attach to the commit.
979
:param merge_parents: Additional parent revision-ids specified by
981
:param strict: If True, abort the commit if there are unversioned
983
:param timestamp: if not None, seconds-since-epoch for the time and
984
date. (May be a float.)
985
:param timezone: Optional timezone for timestamp, as an offset in
987
:param committer: Optional committer in email-id format.
988
(e.g. "J Random Hacker <jrandom@example.com>")
989
:param authors: Optional list of authors in email-id format.
990
:param revprops: Optional dictionary of revision properties.
991
:param revision_id: Optional revision id. (Specifying a revision-id
992
may reduce performance for some non-native formats.)
993
:return: The revision_id of the revision committed.
995
self._check_malformed()
997
unversioned = set(self._new_contents).difference(set(self._new_id))
998
for trans_id in unversioned:
999
if self.final_file_id(trans_id) is None:
1000
raise errors.StrictCommitFailed()
1002
revno, last_rev_id = branch.last_revision_info()
1003
if last_rev_id == _mod_revision.NULL_REVISION:
1004
if merge_parents is not None:
1005
raise ValueError('Cannot supply merge parents for first'
1009
parent_ids = [last_rev_id]
1010
if merge_parents is not None:
1011
parent_ids.extend(merge_parents)
1012
if self._tree.get_revision_id() != last_rev_id:
1013
raise ValueError('TreeTransform not based on branch basis: %s' %
1014
self._tree.get_revision_id())
1015
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1016
builder = branch.get_commit_builder(parent_ids,
1017
timestamp=timestamp,
1019
committer=committer,
1021
revision_id=revision_id)
1022
preview = self.get_preview_tree()
1023
list(builder.record_iter_changes(preview, last_rev_id,
1024
self.iter_changes()))
1025
builder.finish_inventory()
1026
revision_id = builder.commit(message)
1027
branch.set_last_revision_info(revno + 1, revision_id)
1030
def _text_parent(self, trans_id):
1031
file_id = self.tree_file_id(trans_id)
1033
if file_id is None or self._tree.kind(file_id) != 'file':
1035
except errors.NoSuchFile:
1039
def _get_parents_texts(self, trans_id):
1040
"""Get texts for compression parents of this file."""
1041
file_id = self._text_parent(trans_id)
1044
return (self._tree.get_file_text(file_id),)
1046
def _get_parents_lines(self, trans_id):
1047
"""Get lines for compression parents of this file."""
1048
file_id = self._text_parent(trans_id)
1051
return (self._tree.get_file_lines(file_id),)
1053
def serialize(self, serializer):
1054
"""Serialize this TreeTransform.
1056
:param serializer: A Serialiser like pack.ContainerSerializer.
1058
new_name = dict((k, v.encode('utf-8')) for k, v in
1059
self._new_name.items())
1060
new_executability = dict((k, int(v)) for k, v in
1061
self._new_executability.items())
1062
tree_path_ids = dict((k.encode('utf-8'), v)
1063
for k, v in self._tree_path_ids.items())
1065
'_id_number': self._id_number,
1066
'_new_name': new_name,
1067
'_new_parent': self._new_parent,
1068
'_new_executability': new_executability,
1069
'_new_id': self._new_id,
1070
'_tree_path_ids': tree_path_ids,
1071
'_removed_id': list(self._removed_id),
1072
'_removed_contents': list(self._removed_contents),
1073
'_non_present_ids': self._non_present_ids,
1075
yield serializer.bytes_record(bencode.bencode(attribs),
1077
for trans_id, kind in self._new_contents.items():
1079
lines = osutils.chunks_to_lines(
1080
self._read_file_chunks(trans_id))
1081
parents = self._get_parents_lines(trans_id)
1082
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1083
content = ''.join(mpdiff.to_patch())
1084
if kind == 'directory':
1086
if kind == 'symlink':
1087
content = self._read_symlink_target(trans_id)
1088
yield serializer.bytes_record(content, ((trans_id, kind),))
1090
def deserialize(self, records):
1091
"""Deserialize a stored TreeTransform.
1093
:param records: An iterable of (names, content) tuples, as per
1094
pack.ContainerPushParser.
1096
names, content = records.next()
1097
attribs = bencode.bdecode(content)
1098
self._id_number = attribs['_id_number']
1099
self._new_name = dict((k, v.decode('utf-8'))
1100
for k, v in attribs['_new_name'].items())
1101
self._new_parent = attribs['_new_parent']
1102
self._new_executability = dict((k, bool(v)) for k, v in
1103
attribs['_new_executability'].items())
1104
self._new_id = attribs['_new_id']
1105
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1106
self._tree_path_ids = {}
1107
self._tree_id_paths = {}
1108
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1109
path = bytepath.decode('utf-8')
1110
self._tree_path_ids[path] = trans_id
1111
self._tree_id_paths[trans_id] = path
1112
self._removed_id = set(attribs['_removed_id'])
1113
self._removed_contents = set(attribs['_removed_contents'])
1114
self._non_present_ids = attribs['_non_present_ids']
1115
for ((trans_id, kind),), content in records:
1117
mpdiff = multiparent.MultiParent.from_patch(content)
1118
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1119
self.create_file(lines, trans_id)
1120
if kind == 'directory':
1121
self.create_directory(trans_id)
1122
if kind == 'symlink':
1123
self.create_symlink(content.decode('utf-8'), trans_id)
1126
class DiskTreeTransform(TreeTransformBase):
1127
"""Tree transform storing its contents on disk."""
1129
def __init__(self, tree, limbodir, pb=None,
1130
case_sensitive=True):
1132
:param tree: The tree that will be transformed, but not necessarily
1134
:param limbodir: A directory where new files can be stored until
1135
they are installed in their proper places
1137
:param case_sensitive: If True, the target of the transform is
1138
case sensitive, not just case preserving.
1140
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1141
self._limbodir = limbodir
1142
self._deletiondir = None
1143
# A mapping of transform ids to their limbo filename
1144
self._limbo_files = {}
1145
# A mapping of transform ids to a set of the transform ids of children
1146
# that their limbo directory has
1147
self._limbo_children = {}
1148
# Map transform ids to maps of child filename to child transform id
1149
self._limbo_children_names = {}
1150
# List of transform ids that need to be renamed from limbo into place
1151
self._needs_rename = set()
1152
self._creation_mtime = None
1155
"""Release the working tree lock, if held, clean up limbo dir.
1157
This is required if apply has not been invoked, but can be invoked
1160
if self._tree is None:
1163
entries = [(self._limbo_name(t), t, k) for t, k in
1164
self._new_contents.iteritems()]
1165
entries.sort(reverse=True)
1166
for path, trans_id, kind in entries:
1169
delete_any(self._limbodir)
1171
# We don't especially care *why* the dir is immortal.
1172
raise ImmortalLimbo(self._limbodir)
1174
if self._deletiondir is not None:
1175
delete_any(self._deletiondir)
1177
raise errors.ImmortalPendingDeletion(self._deletiondir)
1179
TreeTransformBase.finalize(self)
1181
def _limbo_name(self, trans_id):
1182
"""Generate the limbo name of a file"""
1183
limbo_name = self._limbo_files.get(trans_id)
1184
if limbo_name is None:
1185
limbo_name = self._generate_limbo_path(trans_id)
1186
self._limbo_files[trans_id] = limbo_name
1189
def _generate_limbo_path(self, trans_id):
1190
"""Generate a limbo path using the trans_id as the relative path.
1192
This is suitable as a fallback, and when the transform should not be
1193
sensitive to the path encoding of the limbo directory.
1195
self._needs_rename.add(trans_id)
1196
return pathjoin(self._limbodir, trans_id)
1198
def adjust_path(self, name, parent, trans_id):
1199
previous_parent = self._new_parent.get(trans_id)
1200
previous_name = self._new_name.get(trans_id)
1201
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1202
if (trans_id in self._limbo_files and
1203
trans_id not in self._needs_rename):
1204
self._rename_in_limbo([trans_id])
1205
if previous_parent != parent:
1206
self._limbo_children[previous_parent].remove(trans_id)
1207
if previous_parent != parent or previous_name != name:
1208
del self._limbo_children_names[previous_parent][previous_name]
1210
def _rename_in_limbo(self, trans_ids):
1211
"""Fix limbo names so that the right final path is produced.
1213
This means we outsmarted ourselves-- we tried to avoid renaming
1214
these files later by creating them with their final names in their
1215
final parents. But now the previous name or parent is no longer
1216
suitable, so we have to rename them.
1218
Even for trans_ids that have no new contents, we must remove their
1219
entries from _limbo_files, because they are now stale.
1221
for trans_id in trans_ids:
1222
old_path = self._limbo_files.pop(trans_id)
1223
if trans_id not in self._new_contents:
1225
new_path = self._limbo_name(trans_id)
1226
os.rename(old_path, new_path)
1227
for descendant in self._limbo_descendants(trans_id):
1228
desc_path = self._limbo_files[descendant]
1229
desc_path = new_path + desc_path[len(old_path):]
1230
self._limbo_files[descendant] = desc_path
1232
def _limbo_descendants(self, trans_id):
1233
"""Return the set of trans_ids whose limbo paths descend from this."""
1234
descendants = set(self._limbo_children.get(trans_id, []))
1235
for descendant in list(descendants):
1236
descendants.update(self._limbo_descendants(descendant))
1239
def create_file(self, contents, trans_id, mode_id=None):
1240
"""Schedule creation of a new file.
1244
Contents is an iterator of strings, all of which will be written
1245
to the target destination.
1247
New file takes the permissions of any existing file with that id,
1248
unless mode_id is specified.
1250
name = self._limbo_name(trans_id)
1251
f = open(name, 'wb')
1254
unique_add(self._new_contents, trans_id, 'file')
1256
# Clean up the file, it never got registered so
1257
# TreeTransform.finalize() won't clean it up.
1262
f.writelines(contents)
1265
self._set_mtime(name)
1266
self._set_mode(trans_id, mode_id, S_ISREG)
1268
def _read_file_chunks(self, trans_id):
1269
cur_file = open(self._limbo_name(trans_id), 'rb')
1271
return cur_file.readlines()
1275
def _read_symlink_target(self, trans_id):
1276
return os.readlink(self._limbo_name(trans_id))
1278
def _set_mtime(self, path):
1279
"""All files that are created get the same mtime.
1281
This time is set by the first object to be created.
1283
if self._creation_mtime is None:
1284
self._creation_mtime = time.time()
1285
os.utime(path, (self._creation_mtime, self._creation_mtime))
1287
def create_hardlink(self, path, trans_id):
1288
"""Schedule creation of a hard link"""
1289
name = self._limbo_name(trans_id)
1293
if e.errno != errno.EPERM:
1295
raise errors.HardLinkNotSupported(path)
1297
unique_add(self._new_contents, trans_id, 'file')
1299
# Clean up the file, it never got registered so
1300
# TreeTransform.finalize() won't clean it up.
1304
def create_directory(self, trans_id):
1305
"""Schedule creation of a new directory.
1307
See also new_directory.
1309
os.mkdir(self._limbo_name(trans_id))
1310
unique_add(self._new_contents, trans_id, 'directory')
1312
def create_symlink(self, target, trans_id):
1313
"""Schedule creation of a new symbolic link.
1315
target is a bytestring.
1316
See also new_symlink.
1319
os.symlink(target, self._limbo_name(trans_id))
1320
unique_add(self._new_contents, trans_id, 'symlink')
1323
path = FinalPaths(self).get_path(trans_id)
1326
raise UnableCreateSymlink(path=path)
1328
def cancel_creation(self, trans_id):
1329
"""Cancel the creation of new file contents."""
1330
del self._new_contents[trans_id]
1331
children = self._limbo_children.get(trans_id)
1332
# if this is a limbo directory with children, move them before removing
1334
if children is not None:
1335
self._rename_in_limbo(children)
1336
del self._limbo_children[trans_id]
1337
del self._limbo_children_names[trans_id]
1338
delete_any(self._limbo_name(trans_id))
1340
def new_orphan(self, trans_id, parent_id):
1341
# FIXME: There is no tree config, so we use the branch one (it's weird
1342
# to define it this way as orphaning can only occur in a working tree,
1343
# but that's all we have (for now). It will find the option in
1344
# locations.conf or bazaar.conf though) -- vila 20100916
1345
conf = self._tree.branch.get_config()
1346
conf_var_name = 'bzr.transform.orphan_policy'
1347
orphan_policy = conf.get_user_option(conf_var_name)
1348
default_policy = orphaning_registry.default_key
1349
if orphan_policy is None:
1350
orphan_policy = default_policy
1351
if orphan_policy not in orphaning_registry:
1352
trace.warning('%s (from %s) is not a known policy, defaulting to %s'
1353
% (orphan_policy, conf_var_name, default_policy))
1354
orphan_policy = default_policy
1355
handle_orphan = orphaning_registry.get(orphan_policy)
1356
handle_orphan(self, trans_id, parent_id)
1359
class OrphaningError(errors.BzrError):
1361
# Only bugs could lead to such exception being seen by the user
1362
internal_error = True
1363
_fmt = "Error while orphaning %s in %s directory"
1365
def __init__(self, orphan, parent):
1366
errors.BzrError.__init__(self)
1367
self.orphan = orphan
1368
self.parent = parent
1371
class OrphaningForbidden(OrphaningError):
1373
_fmt = "Policy: %s doesn't allow creating orphans."
1375
def __init__(self, policy):
1376
errors.BzrError.__init__(self)
1377
self.policy = policy
1380
def move_orphan(tt, orphan_id, parent_id):
1381
"""See TreeTransformBase.new_orphan.
1383
This creates a new orphan in the `bzr-orphans` dir at the root of the
1386
:param tt: The TreeTransform orphaning `trans_id`.
1388
:param orphan_id: The trans id that should be orphaned.
1390
:param parent_id: The orphan parent trans id.
1392
# Add the orphan dir if it doesn't exist
1393
orphan_dir_basename = 'bzr-orphans'
1394
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1395
if tt.final_kind(od_id) is None:
1396
tt.create_directory(od_id)
1397
parent_path = tt._tree_id_paths[parent_id]
1398
# Find a name that doesn't exist yet in the orphan dir
1399
actual_name = tt.final_name(orphan_id)
1400
new_name = tt._available_backup_name(actual_name, od_id)
1401
tt.adjust_path(new_name, od_id, orphan_id)
1402
trace.warning('%s has been orphaned in %s'
1403
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1406
def refuse_orphan(tt, orphan_id, parent_id):
1407
"""See TreeTransformBase.new_orphan.
1409
This refuses to create orphan, letting the caller handle the conflict.
1411
raise OrphaningForbidden('never')
1414
orphaning_registry = registry.Registry()
1415
orphaning_registry.register(
1416
'conflict', refuse_orphan,
1417
'Leave orphans in place and create a conflict on the directory.')
1418
orphaning_registry.register(
1419
'move', move_orphan,
1420
'Move orphans into the bzr-orphans directory.')
1421
orphaning_registry._set_default_key('conflict')
1424
class TreeTransform(DiskTreeTransform):
1425
"""Represent a tree transformation.
1427
This object is designed to support incremental generation of the transform,
1430
However, it gives optimum performance when parent directories are created
1431
before their contents. The transform is then able to put child files
1432
directly in their parent directory, avoiding later renames.
1434
It is easy to produce malformed transforms, but they are generally
1435
harmless. Attempting to apply a malformed transform will cause an
1436
exception to be raised before any modifications are made to the tree.
1438
Many kinds of malformed transforms can be corrected with the
1439
resolve_conflicts function. The remaining ones indicate programming error,
1440
such as trying to create a file with no path.
1442
Two sets of file creation methods are supplied. Convenience methods are:
1447
These are composed of the low-level methods:
1449
* create_file or create_directory or create_symlink
1453
Transform/Transaction ids
1454
-------------------------
1455
trans_ids are temporary ids assigned to all files involved in a transform.
1456
It's possible, even common, that not all files in the Tree have trans_ids.
1458
trans_ids are used because filenames and file_ids are not good enough
1459
identifiers; filenames change, and not all files have file_ids. File-ids
1460
are also associated with trans-ids, so that moving a file moves its
1463
trans_ids are only valid for the TreeTransform that generated them.
1467
Limbo is a temporary directory use to hold new versions of files.
1468
Files are added to limbo by create_file, create_directory, create_symlink,
1469
and their convenience variants (new_*). Files may be removed from limbo
1470
using cancel_creation. Files are renamed from limbo into their final
1471
location as part of TreeTransform.apply
1473
Limbo must be cleaned up, by either calling TreeTransform.apply or
1474
calling TreeTransform.finalize.
1476
Files are placed into limbo inside their parent directories, where
1477
possible. This reduces subsequent renames, and makes operations involving
1478
lots of files faster. This optimization is only possible if the parent
1479
directory is created *before* creating any of its children, so avoid
1480
creating children before parents, where possible.
1484
This temporary directory is used by _FileMover for storing files that are
1485
about to be deleted. In case of rollback, the files will be restored.
1486
FileMover does not delete files until it is sure that a rollback will not
1489
def __init__(self, tree, pb=None):
1490
"""Note: a tree_write lock is taken on the tree.
1492
Use TreeTransform.finalize() to release the lock (can be omitted if
1493
TreeTransform.apply() called).
1495
tree.lock_tree_write()
1498
limbodir = urlutils.local_path_from_url(
1499
tree._transport.abspath('limbo'))
1503
if e.errno == errno.EEXIST:
1504
raise ExistingLimbo(limbodir)
1505
deletiondir = urlutils.local_path_from_url(
1506
tree._transport.abspath('pending-deletion'))
1508
os.mkdir(deletiondir)
1510
if e.errno == errno.EEXIST:
1511
raise errors.ExistingPendingDeletion(deletiondir)
1516
# Cache of realpath results, to speed up canonical_path
1517
self._realpaths = {}
1518
# Cache of relpath results, to speed up canonical_path
1520
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1521
tree.case_sensitive)
1522
self._deletiondir = deletiondir
1524
def canonical_path(self, path):
1525
"""Get the canonical tree-relative path"""
1526
# don't follow final symlinks
1527
abs = self._tree.abspath(path)
1528
if abs in self._relpaths:
1529
return self._relpaths[abs]
1530
dirname, basename = os.path.split(abs)
1531
if dirname not in self._realpaths:
1532
self._realpaths[dirname] = os.path.realpath(dirname)
1533
dirname = self._realpaths[dirname]
1534
abs = pathjoin(dirname, basename)
1535
if dirname in self._relpaths:
1536
relpath = pathjoin(self._relpaths[dirname], basename)
1537
relpath = relpath.rstrip('/\\')
1539
relpath = self._tree.relpath(abs)
1540
self._relpaths[abs] = relpath
1543
def tree_kind(self, trans_id):
1544
"""Determine the file kind in the working tree.
1546
:returns: The file kind or None if the file does not exist
1548
path = self._tree_id_paths.get(trans_id)
1552
return file_kind(self._tree.abspath(path))
1553
except errors.NoSuchFile:
1556
def _set_mode(self, trans_id, mode_id, typefunc):
1557
"""Set the mode of new file contents.
1558
The mode_id is the existing file to get the mode from (often the same
1559
as trans_id). The operation is only performed if there's a mode match
1560
according to typefunc.
1565
old_path = self._tree_id_paths[mode_id]
1569
mode = os.stat(self._tree.abspath(old_path)).st_mode
1571
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1572
# Either old_path doesn't exist, or the parent of the
1573
# target is not a directory (but will be one eventually)
1574
# Either way, we know it doesn't exist *right now*
1575
# See also bug #248448
1580
os.chmod(self._limbo_name(trans_id), mode)
1582
def iter_tree_children(self, parent_id):
1583
"""Iterate through the entry's tree children, if any"""
1585
path = self._tree_id_paths[parent_id]
1589
children = os.listdir(self._tree.abspath(path))
1591
if not (osutils._is_error_enotdir(e)
1592
or e.errno in (errno.ENOENT, errno.ESRCH)):
1596
for child in children:
1597
childpath = joinpath(path, child)
1598
if self._tree.is_control_filename(childpath):
1600
yield self.trans_id_tree_path(childpath)
1602
def _generate_limbo_path(self, trans_id):
1603
"""Generate a limbo path using the final path if possible.
1605
This optimizes the performance of applying the tree transform by
1606
avoiding renames. These renames can be avoided only when the parent
1607
directory is already scheduled for creation.
1609
If the final path cannot be used, falls back to using the trans_id as
1612
parent = self._new_parent.get(trans_id)
1613
# if the parent directory is already in limbo (e.g. when building a
1614
# tree), choose a limbo name inside the parent, to reduce further
1616
use_direct_path = False
1617
if self._new_contents.get(parent) == 'directory':
1618
filename = self._new_name.get(trans_id)
1619
if filename is not None:
1620
if parent not in self._limbo_children:
1621
self._limbo_children[parent] = set()
1622
self._limbo_children_names[parent] = {}
1623
use_direct_path = True
1624
# the direct path can only be used if no other file has
1625
# already taken this pathname, i.e. if the name is unused, or
1626
# if it is already associated with this trans_id.
1627
elif self._case_sensitive_target:
1628
if (self._limbo_children_names[parent].get(filename)
1629
in (trans_id, None)):
1630
use_direct_path = True
1632
for l_filename, l_trans_id in\
1633
self._limbo_children_names[parent].iteritems():
1634
if l_trans_id == trans_id:
1636
if l_filename.lower() == filename.lower():
1639
use_direct_path = True
1641
if not use_direct_path:
1642
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1644
limbo_name = pathjoin(self._limbo_files[parent], filename)
1645
self._limbo_children[parent].add(trans_id)
1646
self._limbo_children_names[parent][filename] = trans_id
1650
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1651
"""Apply all changes to the inventory and filesystem.
1653
If filesystem or inventory conflicts are present, MalformedTransform
1656
If apply succeeds, finalize is not necessary.
1658
:param no_conflicts: if True, the caller guarantees there are no
1659
conflicts, so no check is made.
1660
:param precomputed_delta: An inventory delta to use instead of
1662
:param _mover: Supply an alternate FileMover, for testing
1664
if not no_conflicts:
1665
self._check_malformed()
1666
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1668
if precomputed_delta is None:
1669
child_pb.update('Apply phase', 0, 2)
1670
inventory_delta = self._generate_inventory_delta()
1673
inventory_delta = precomputed_delta
1676
mover = _FileMover()
1680
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1681
self._apply_removals(mover)
1682
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1683
modified_paths = self._apply_insertions(mover)
1688
mover.apply_deletions()
1691
self._tree.apply_inventory_delta(inventory_delta)
1694
return _TransformResults(modified_paths, self.rename_count)
1696
def _generate_inventory_delta(self):
1697
"""Generate an inventory delta for the current transform."""
1698
inventory_delta = []
1699
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1700
new_paths = self._inventory_altered()
1701
total_entries = len(new_paths) + len(self._removed_id)
1703
for num, trans_id in enumerate(self._removed_id):
1705
child_pb.update('removing file', num, total_entries)
1706
if trans_id == self._new_root:
1707
file_id = self._tree.get_root_id()
1709
file_id = self.tree_file_id(trans_id)
1710
# File-id isn't really being deleted, just moved
1711
if file_id in self._r_new_id:
1713
path = self._tree_id_paths[trans_id]
1714
inventory_delta.append((path, None, file_id, None))
1715
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1717
entries = self._tree.iter_entries_by_dir(
1718
new_path_file_ids.values())
1719
old_paths = dict((e.file_id, p) for p, e in entries)
1721
for num, (path, trans_id) in enumerate(new_paths):
1723
child_pb.update('adding file',
1724
num + len(self._removed_id), total_entries)
1725
file_id = new_path_file_ids[trans_id]
1729
kind = self.final_kind(trans_id)
1731
kind = self._tree.stored_kind(file_id)
1732
parent_trans_id = self.final_parent(trans_id)
1733
parent_file_id = new_path_file_ids.get(parent_trans_id)
1734
if parent_file_id is None:
1735
parent_file_id = self.final_file_id(parent_trans_id)
1736
if trans_id in self._new_reference_revision:
1737
new_entry = inventory.TreeReference(
1739
self._new_name[trans_id],
1740
self.final_file_id(self._new_parent[trans_id]),
1741
None, self._new_reference_revision[trans_id])
1743
new_entry = inventory.make_entry(kind,
1744
self.final_name(trans_id),
1745
parent_file_id, file_id)
1746
old_path = old_paths.get(new_entry.file_id)
1747
new_executability = self._new_executability.get(trans_id)
1748
if new_executability is not None:
1749
new_entry.executable = new_executability
1750
inventory_delta.append(
1751
(old_path, path, new_entry.file_id, new_entry))
1754
return inventory_delta
1756
def _apply_removals(self, mover):
1757
"""Perform tree operations that remove directory/inventory names.
1759
That is, delete files that are to be deleted, and put any files that
1760
need renaming into limbo. This must be done in strict child-to-parent
1763
If inventory_delta is None, no inventory delta generation is performed.
1765
tree_paths = list(self._tree_path_ids.iteritems())
1766
tree_paths.sort(reverse=True)
1767
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1769
for num, data in enumerate(tree_paths):
1770
path, trans_id = data
1771
child_pb.update('removing file', num, len(tree_paths))
1772
full_path = self._tree.abspath(path)
1773
if trans_id in self._removed_contents:
1774
delete_path = os.path.join(self._deletiondir, trans_id)
1775
mover.pre_delete(full_path, delete_path)
1776
elif (trans_id in self._new_name
1777
or trans_id in self._new_parent):
1779
mover.rename(full_path, self._limbo_name(trans_id))
1780
except errors.TransformRenameFailed, e:
1781
if e.errno != errno.ENOENT:
1784
self.rename_count += 1
1788
def _apply_insertions(self, mover):
1789
"""Perform tree operations that insert directory/inventory names.
1791
That is, create any files that need to be created, and restore from
1792
limbo any files that needed renaming. This must be done in strict
1793
parent-to-child order.
1795
If inventory_delta is None, no inventory delta is calculated, and
1796
no list of modified paths is returned.
1798
new_paths = self.new_paths(filesystem_only=True)
1800
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1802
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1804
for num, (path, trans_id) in enumerate(new_paths):
1806
child_pb.update('adding file', num, len(new_paths))
1807
full_path = self._tree.abspath(path)
1808
if trans_id in self._needs_rename:
1810
mover.rename(self._limbo_name(trans_id), full_path)
1811
except errors.TransformRenameFailed, e:
1812
# We may be renaming a dangling inventory id
1813
if e.errno != errno.ENOENT:
1816
self.rename_count += 1
1817
if (trans_id in self._new_contents or
1818
self.path_changed(trans_id)):
1819
if trans_id in self._new_contents:
1820
modified_paths.append(full_path)
1821
if trans_id in self._new_executability:
1822
self._set_executability(path, trans_id)
1825
self._new_contents.clear()
1826
return modified_paths
1829
class TransformPreview(DiskTreeTransform):
1830
"""A TreeTransform for generating preview trees.
1832
Unlike TreeTransform, this version works when the input tree is a
1833
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1834
unversioned files in the input tree.
1837
def __init__(self, tree, pb=None, case_sensitive=True):
1839
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1840
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1842
def canonical_path(self, path):
1845
def tree_kind(self, trans_id):
1846
path = self._tree_id_paths.get(trans_id)
1849
file_id = self._tree.path2id(path)
1851
return self._tree.kind(file_id)
1852
except errors.NoSuchFile:
1855
def _set_mode(self, trans_id, mode_id, typefunc):
1856
"""Set the mode of new file contents.
1857
The mode_id is the existing file to get the mode from (often the same
1858
as trans_id). The operation is only performed if there's a mode match
1859
according to typefunc.
1861
# is it ok to ignore this? probably
1864
def iter_tree_children(self, parent_id):
1865
"""Iterate through the entry's tree children, if any"""
1867
path = self._tree_id_paths[parent_id]
1870
file_id = self.tree_file_id(parent_id)
1873
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1874
children = getattr(entry, 'children', {})
1875
for child in children:
1876
childpath = joinpath(path, child)
1877
yield self.trans_id_tree_path(childpath)
1879
def new_orphan(self, trans_id, parent_id):
1880
raise NotImplementedError(self.new_orphan)
1883
class _PreviewTree(tree.Tree):
1884
"""Partial implementation of Tree to support show_diff_trees"""
1886
def __init__(self, transform):
1887
self._transform = transform
1888
self._final_paths = FinalPaths(transform)
1889
self.__by_parent = None
1890
self._parent_ids = []
1891
self._all_children_cache = {}
1892
self._path2trans_id_cache = {}
1893
self._final_name_cache = {}
1894
self._iter_changes_cache = dict((c[0], c) for c in
1895
self._transform.iter_changes())
1897
def _content_change(self, file_id):
1898
"""Return True if the content of this file changed"""
1899
changes = self._iter_changes_cache.get(file_id)
1900
# changes[2] is true if the file content changed. See
1901
# InterTree.iter_changes.
1902
return (changes is not None and changes[2])
1904
def _get_repository(self):
1905
repo = getattr(self._transform._tree, '_repository', None)
1907
repo = self._transform._tree.branch.repository
1910
def _iter_parent_trees(self):
1911
for revision_id in self.get_parent_ids():
1913
yield self.revision_tree(revision_id)
1914
except errors.NoSuchRevisionInTree:
1915
yield self._get_repository().revision_tree(revision_id)
1917
def _get_file_revision(self, file_id, vf, tree_revision):
1918
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
1919
self._iter_parent_trees()]
1920
vf.add_lines((file_id, tree_revision), parent_keys,
1921
self.get_file_lines(file_id))
1922
repo = self._get_repository()
1923
base_vf = repo.texts
1924
if base_vf not in vf.fallback_versionedfiles:
1925
vf.fallback_versionedfiles.append(base_vf)
1926
return tree_revision
1928
def _stat_limbo_file(self, file_id):
1929
trans_id = self._transform.trans_id_file_id(file_id)
1930
name = self._transform._limbo_name(trans_id)
1931
return os.lstat(name)
1934
def _by_parent(self):
1935
if self.__by_parent is None:
1936
self.__by_parent = self._transform.by_parent()
1937
return self.__by_parent
1939
def _comparison_data(self, entry, path):
1940
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
1941
if kind == 'missing':
1945
file_id = self._transform.final_file_id(self._path2trans_id(path))
1946
executable = self.is_executable(file_id, path)
1947
return kind, executable, None
1949
def is_locked(self):
1952
def lock_read(self):
1953
# Perhaps in theory, this should lock the TreeTransform?
1960
def inventory(self):
1961
"""This Tree does not use inventory as its backing data."""
1962
raise NotImplementedError(_PreviewTree.inventory)
1964
def get_root_id(self):
1965
return self._transform.final_file_id(self._transform.root)
1967
def all_file_ids(self):
1968
tree_ids = set(self._transform._tree.all_file_ids())
1969
tree_ids.difference_update(self._transform.tree_file_id(t)
1970
for t in self._transform._removed_id)
1971
tree_ids.update(self._transform._new_id.values())
1975
return iter(self.all_file_ids())
1977
def _has_id(self, file_id, fallback_check):
1978
if file_id in self._transform._r_new_id:
1980
elif file_id in set([self._transform.tree_file_id(trans_id) for
1981
trans_id in self._transform._removed_id]):
1984
return fallback_check(file_id)
1986
def has_id(self, file_id):
1987
return self._has_id(file_id, self._transform._tree.has_id)
1989
def has_or_had_id(self, file_id):
1990
return self._has_id(file_id, self._transform._tree.has_or_had_id)
1992
def _path2trans_id(self, path):
1993
# We must not use None here, because that is a valid value to store.
1994
trans_id = self._path2trans_id_cache.get(path, object)
1995
if trans_id is not object:
1997
segments = splitpath(path)
1998
cur_parent = self._transform.root
1999
for cur_segment in segments:
2000
for child in self._all_children(cur_parent):
2001
final_name = self._final_name_cache.get(child)
2002
if final_name is None:
2003
final_name = self._transform.final_name(child)
2004
self._final_name_cache[child] = final_name
2005
if final_name == cur_segment:
2009
self._path2trans_id_cache[path] = None
2011
self._path2trans_id_cache[path] = cur_parent
2014
def path2id(self, path):
2015
return self._transform.final_file_id(self._path2trans_id(path))
2017
def id2path(self, file_id):
2018
trans_id = self._transform.trans_id_file_id(file_id)
2020
return self._final_paths._determine_path(trans_id)
2022
raise errors.NoSuchId(self, file_id)
2024
def _all_children(self, trans_id):
2025
children = self._all_children_cache.get(trans_id)
2026
if children is not None:
2028
children = set(self._transform.iter_tree_children(trans_id))
2029
# children in the _new_parent set are provided by _by_parent.
2030
children.difference_update(self._transform._new_parent.keys())
2031
children.update(self._by_parent.get(trans_id, []))
2032
self._all_children_cache[trans_id] = children
2035
def iter_children(self, file_id):
2036
trans_id = self._transform.trans_id_file_id(file_id)
2037
for child_trans_id in self._all_children(trans_id):
2038
yield self._transform.final_file_id(child_trans_id)
2041
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2042
in self._transform._tree.extras())
2043
possible_extras.update(self._transform._new_contents)
2044
possible_extras.update(self._transform._removed_id)
2045
for trans_id in possible_extras:
2046
if self._transform.final_file_id(trans_id) is None:
2047
yield self._final_paths._determine_path(trans_id)
2049
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2050
yield_parents=False):
2051
for trans_id, parent_file_id in ordered_entries:
2052
file_id = self._transform.final_file_id(trans_id)
2055
if (specific_file_ids is not None
2056
and file_id not in specific_file_ids):
2058
kind = self._transform.final_kind(trans_id)
2060
kind = self._transform._tree.stored_kind(file_id)
2061
new_entry = inventory.make_entry(
2063
self._transform.final_name(trans_id),
2064
parent_file_id, file_id)
2065
yield new_entry, trans_id
2067
def _list_files_by_dir(self):
2068
todo = [ROOT_PARENT]
2070
while len(todo) > 0:
2072
parent_file_id = self._transform.final_file_id(parent)
2073
children = list(self._all_children(parent))
2074
paths = dict(zip(children, self._final_paths.get_paths(children)))
2075
children.sort(key=paths.get)
2076
todo.extend(reversed(children))
2077
for trans_id in children:
2078
ordered_ids.append((trans_id, parent_file_id))
2081
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2082
# This may not be a maximally efficient implementation, but it is
2083
# reasonably straightforward. An implementation that grafts the
2084
# TreeTransform changes onto the tree's iter_entries_by_dir results
2085
# might be more efficient, but requires tricky inferences about stack
2087
ordered_ids = self._list_files_by_dir()
2088
for entry, trans_id in self._make_inv_entries(ordered_ids,
2089
specific_file_ids, yield_parents=yield_parents):
2090
yield unicode(self._final_paths.get_path(trans_id)), entry
2092
def _iter_entries_for_dir(self, dir_path):
2093
"""Return path, entry for items in a directory without recursing down."""
2094
dir_file_id = self.path2id(dir_path)
2096
for file_id in self.iter_children(dir_file_id):
2097
trans_id = self._transform.trans_id_file_id(file_id)
2098
ordered_ids.append((trans_id, file_id))
2099
for entry, trans_id in self._make_inv_entries(ordered_ids):
2100
yield unicode(self._final_paths.get_path(trans_id)), entry
2102
def list_files(self, include_root=False, from_dir=None, recursive=True):
2103
"""See WorkingTree.list_files."""
2104
# XXX This should behave like WorkingTree.list_files, but is really
2105
# more like RevisionTree.list_files.
2109
prefix = from_dir + '/'
2110
entries = self.iter_entries_by_dir()
2111
for path, entry in entries:
2112
if entry.name == '' and not include_root:
2115
if not path.startswith(prefix):
2117
path = path[len(prefix):]
2118
yield path, 'V', entry.kind, entry.file_id, entry
2120
if from_dir is None and include_root is True:
2121
root_entry = inventory.make_entry('directory', '',
2122
ROOT_PARENT, self.get_root_id())
2123
yield '', 'V', 'directory', root_entry.file_id, root_entry
2124
entries = self._iter_entries_for_dir(from_dir or '')
2125
for path, entry in entries:
2126
yield path, 'V', entry.kind, entry.file_id, entry
2128
def kind(self, file_id):
2129
trans_id = self._transform.trans_id_file_id(file_id)
2130
return self._transform.final_kind(trans_id)
2132
def stored_kind(self, file_id):
2133
trans_id = self._transform.trans_id_file_id(file_id)
2135
return self._transform._new_contents[trans_id]
2137
return self._transform._tree.stored_kind(file_id)
2139
def get_file_mtime(self, file_id, path=None):
2140
"""See Tree.get_file_mtime"""
2141
if not self._content_change(file_id):
2142
return self._transform._tree.get_file_mtime(file_id)
2143
return self._stat_limbo_file(file_id).st_mtime
2145
def _file_size(self, entry, stat_value):
2146
return self.get_file_size(entry.file_id)
2148
def get_file_size(self, file_id):
2149
"""See Tree.get_file_size"""
2150
if self.kind(file_id) == 'file':
2151
return self._transform._tree.get_file_size(file_id)
2155
def get_file_sha1(self, file_id, path=None, stat_value=None):
2156
trans_id = self._transform.trans_id_file_id(file_id)
2157
kind = self._transform._new_contents.get(trans_id)
2159
return self._transform._tree.get_file_sha1(file_id)
2161
fileobj = self.get_file(file_id)
2163
return sha_file(fileobj)
2167
def is_executable(self, file_id, path=None):
2170
trans_id = self._transform.trans_id_file_id(file_id)
2172
return self._transform._new_executability[trans_id]
2175
return self._transform._tree.is_executable(file_id, path)
2177
if e.errno == errno.ENOENT:
2180
except errors.NoSuchId:
2183
def path_content_summary(self, path):
2184
trans_id = self._path2trans_id(path)
2185
tt = self._transform
2186
tree_path = tt._tree_id_paths.get(trans_id)
2187
kind = tt._new_contents.get(trans_id)
2189
if tree_path is None or trans_id in tt._removed_contents:
2190
return 'missing', None, None, None
2191
summary = tt._tree.path_content_summary(tree_path)
2192
kind, size, executable, link_or_sha1 = summary
2195
limbo_name = tt._limbo_name(trans_id)
2196
if trans_id in tt._new_reference_revision:
2197
kind = 'tree-reference'
2199
statval = os.lstat(limbo_name)
2200
size = statval.st_size
2201
if not supports_executable():
2204
executable = statval.st_mode & S_IEXEC
2208
if kind == 'symlink':
2209
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2210
executable = tt._new_executability.get(trans_id, executable)
2211
return kind, size, executable, link_or_sha1
2213
def iter_changes(self, from_tree, include_unchanged=False,
2214
specific_files=None, pb=None, extra_trees=None,
2215
require_versioned=True, want_unversioned=False):
2216
"""See InterTree.iter_changes.
2218
This has a fast path that is only used when the from_tree matches
2219
the transform tree, and no fancy options are supplied.
2221
if (from_tree is not self._transform._tree or include_unchanged or
2222
specific_files or want_unversioned):
2223
return tree.InterTree(from_tree, self).iter_changes(
2224
include_unchanged=include_unchanged,
2225
specific_files=specific_files,
2227
extra_trees=extra_trees,
2228
require_versioned=require_versioned,
2229
want_unversioned=want_unversioned)
2230
if want_unversioned:
2231
raise ValueError('want_unversioned is not supported')
2232
return self._transform.iter_changes()
2234
def get_file(self, file_id, path=None):
2235
"""See Tree.get_file"""
2236
if not self._content_change(file_id):
2237
return self._transform._tree.get_file(file_id, path)
2238
trans_id = self._transform.trans_id_file_id(file_id)
2239
name = self._transform._limbo_name(trans_id)
2240
return open(name, 'rb')
2242
def get_file_with_stat(self, file_id, path=None):
2243
return self.get_file(file_id, path), None
2245
def annotate_iter(self, file_id,
2246
default_revision=_mod_revision.CURRENT_REVISION):
2247
changes = self._iter_changes_cache.get(file_id)
2251
changed_content, versioned, kind = (changes[2], changes[3],
2255
get_old = (kind[0] == 'file' and versioned[0])
2257
old_annotation = self._transform._tree.annotate_iter(file_id,
2258
default_revision=default_revision)
2262
return old_annotation
2263
if not changed_content:
2264
return old_annotation
2265
# TODO: This is doing something similar to what WT.annotate_iter is
2266
# doing, however it fails slightly because it doesn't know what
2267
# the *other* revision_id is, so it doesn't know how to give the
2268
# other as the origin for some lines, they all get
2269
# 'default_revision'
2270
# It would be nice to be able to use the new Annotator based
2271
# approach, as well.
2272
return annotate.reannotate([old_annotation],
2273
self.get_file(file_id).readlines(),
2276
def get_symlink_target(self, file_id):
2277
"""See Tree.get_symlink_target"""
2278
if not self._content_change(file_id):
2279
return self._transform._tree.get_symlink_target(file_id)
2280
trans_id = self._transform.trans_id_file_id(file_id)
2281
name = self._transform._limbo_name(trans_id)
2282
return osutils.readlink(name)
2284
def walkdirs(self, prefix=''):
2285
pending = [self._transform.root]
2286
while len(pending) > 0:
2287
parent_id = pending.pop()
2290
prefix = prefix.rstrip('/')
2291
parent_path = self._final_paths.get_path(parent_id)
2292
parent_file_id = self._transform.final_file_id(parent_id)
2293
for child_id in self._all_children(parent_id):
2294
path_from_root = self._final_paths.get_path(child_id)
2295
basename = self._transform.final_name(child_id)
2296
file_id = self._transform.final_file_id(child_id)
2297
kind = self._transform.final_kind(child_id)
2298
if kind is not None:
2299
versioned_kind = kind
2302
versioned_kind = self._transform._tree.stored_kind(file_id)
2303
if versioned_kind == 'directory':
2304
subdirs.append(child_id)
2305
children.append((path_from_root, basename, kind, None,
2306
file_id, versioned_kind))
2308
if parent_path.startswith(prefix):
2309
yield (parent_path, parent_file_id), children
2310
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2313
def get_parent_ids(self):
2314
return self._parent_ids
2316
def set_parent_ids(self, parent_ids):
2317
self._parent_ids = parent_ids
2319
def get_revision_tree(self, revision_id):
2320
return self._transform._tree.get_revision_tree(revision_id)
2323
def joinpath(parent, child):
2324
"""Join tree-relative paths, handling the tree root specially"""
2325
if parent is None or parent == "":
2328
return pathjoin(parent, child)
2331
class FinalPaths(object):
2332
"""Make path calculation cheap by memoizing paths.
2334
The underlying tree must not be manipulated between calls, or else
2335
the results will likely be incorrect.
2337
def __init__(self, transform):
2338
object.__init__(self)
2339
self._known_paths = {}
2340
self.transform = transform
2342
def _determine_path(self, trans_id):
2343
if (trans_id == self.transform.root or trans_id == ROOT_PARENT):
2345
name = self.transform.final_name(trans_id)
2346
parent_id = self.transform.final_parent(trans_id)
2347
if parent_id == self.transform.root:
2350
return pathjoin(self.get_path(parent_id), name)
2352
def get_path(self, trans_id):
2353
"""Find the final path associated with a trans_id"""
2354
if trans_id not in self._known_paths:
2355
self._known_paths[trans_id] = self._determine_path(trans_id)
2356
return self._known_paths[trans_id]
2358
def get_paths(self, trans_ids):
2359
return [(self.get_path(t), t) for t in trans_ids]
2363
def topology_sorted_ids(tree):
2364
"""Determine the topological order of the ids in a tree"""
2365
file_ids = list(tree)
2366
file_ids.sort(key=tree.id2path)
2370
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2371
delta_from_tree=False):
2372
"""Create working tree for a branch, using a TreeTransform.
2374
This function should be used on empty trees, having a tree root at most.
2375
(see merge and revert functionality for working with existing trees)
2377
Existing files are handled like so:
2379
- Existing bzrdirs take precedence over creating new items. They are
2380
created as '%s.diverted' % name.
2381
- Otherwise, if the content on disk matches the content we are building,
2382
it is silently replaced.
2383
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2385
:param tree: The tree to convert wt into a copy of
2386
:param wt: The working tree that files will be placed into
2387
:param accelerator_tree: A tree which can be used for retrieving file
2388
contents more quickly than tree itself, i.e. a workingtree. tree
2389
will be used for cases where accelerator_tree's content is different.
2390
:param hardlink: If true, hard-link files to accelerator_tree, where
2391
possible. accelerator_tree must implement abspath, i.e. be a
2393
:param delta_from_tree: If true, build_tree may use the input Tree to
2394
generate the inventory delta.
2396
wt.lock_tree_write()
2400
if accelerator_tree is not None:
2401
accelerator_tree.lock_read()
2403
return _build_tree(tree, wt, accelerator_tree, hardlink,
2406
if accelerator_tree is not None:
2407
accelerator_tree.unlock()
2414
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2415
"""See build_tree."""
2416
for num, _unused in enumerate(wt.all_file_ids()):
2417
if num > 0: # more than just a root
2418
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2420
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2421
pp = ProgressPhase("Build phase", 2, top_pb)
2422
if tree.inventory.root is not None:
2423
# This is kind of a hack: we should be altering the root
2424
# as part of the regular tree shape diff logic.
2425
# The conditional test here is to avoid doing an
2426
# expensive operation (flush) every time the root id
2427
# is set within the tree, nor setting the root and thus
2428
# marking the tree as dirty, because we use two different
2429
# idioms here: tree interfaces and inventory interfaces.
2430
if wt.get_root_id() != tree.get_root_id():
2431
wt.set_root_id(tree.get_root_id())
2433
tt = TreeTransform(wt)
2437
file_trans_id[wt.get_root_id()] = \
2438
tt.trans_id_tree_file_id(wt.get_root_id())
2439
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2441
deferred_contents = []
2443
total = len(tree.inventory)
2445
precomputed_delta = []
2447
precomputed_delta = None
2448
# Check if tree inventory has content. If so, we populate
2449
# existing_files with the directory content. If there are no
2450
# entries we skip populating existing_files as its not used.
2451
# This improves performance and unncessary work on large
2452
# directory trees. (#501307)
2454
existing_files = set()
2455
for dir, files in wt.walkdirs():
2456
existing_files.update(f[0] for f in files)
2457
for num, (tree_path, entry) in \
2458
enumerate(tree.inventory.iter_entries_by_dir()):
2459
pb.update("Building tree", num - len(deferred_contents), total)
2460
if entry.parent_id is None:
2463
file_id = entry.file_id
2465
precomputed_delta.append((None, tree_path, file_id, entry))
2466
if tree_path in existing_files:
2467
target_path = wt.abspath(tree_path)
2468
kind = file_kind(target_path)
2469
if kind == "directory":
2471
bzrdir.BzrDir.open(target_path)
2472
except errors.NotBranchError:
2476
if (file_id not in divert and
2477
_content_match(tree, entry, file_id, kind,
2479
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2480
if kind == 'directory':
2482
parent_id = file_trans_id[entry.parent_id]
2483
if entry.kind == 'file':
2484
# We *almost* replicate new_by_entry, so that we can defer
2485
# getting the file text, and get them all at once.
2486
trans_id = tt.create_path(entry.name, parent_id)
2487
file_trans_id[file_id] = trans_id
2488
tt.version_file(file_id, trans_id)
2489
executable = tree.is_executable(file_id, tree_path)
2491
tt.set_executability(executable, trans_id)
2492
trans_data = (trans_id, tree_path)
2493
deferred_contents.append((file_id, trans_data))
2495
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2498
new_trans_id = file_trans_id[file_id]
2499
old_parent = tt.trans_id_tree_path(tree_path)
2500
_reparent_children(tt, old_parent, new_trans_id)
2501
offset = num + 1 - len(deferred_contents)
2502
_create_files(tt, tree, deferred_contents, pb, offset,
2503
accelerator_tree, hardlink)
2507
divert_trans = set(file_trans_id[f] for f in divert)
2508
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2509
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2510
if len(raw_conflicts) > 0:
2511
precomputed_delta = None
2512
conflicts = cook_conflicts(raw_conflicts, tt)
2513
for conflict in conflicts:
2516
wt.add_conflicts(conflicts)
2517
except errors.UnsupportedOperation:
2519
result = tt.apply(no_conflicts=True,
2520
precomputed_delta=precomputed_delta)
2527
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2529
total = len(desired_files) + offset
2531
if accelerator_tree is None:
2532
new_desired_files = desired_files
2534
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2535
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2536
in iter if not (c or e[0] != e[1])]
2537
if accelerator_tree.supports_content_filtering():
2538
unchanged = [(f, p) for (f, p) in unchanged
2539
if not accelerator_tree.iter_search_rules([p]).next()]
2540
unchanged = dict(unchanged)
2541
new_desired_files = []
2543
for file_id, (trans_id, tree_path) in desired_files:
2544
accelerator_path = unchanged.get(file_id)
2545
if accelerator_path is None:
2546
new_desired_files.append((file_id, (trans_id, tree_path)))
2548
pb.update('Adding file contents', count + offset, total)
2550
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2553
contents = accelerator_tree.get_file(file_id, accelerator_path)
2554
if wt.supports_content_filtering():
2555
filters = wt._content_filter_stack(tree_path)
2556
contents = filtered_output_bytes(contents, filters,
2557
ContentFilterContext(tree_path, tree))
2559
tt.create_file(contents, trans_id)
2563
except AttributeError:
2564
# after filtering, contents may no longer be file-like
2568
for count, ((trans_id, tree_path), contents) in enumerate(
2569
tree.iter_files_bytes(new_desired_files)):
2570
if wt.supports_content_filtering():
2571
filters = wt._content_filter_stack(tree_path)
2572
contents = filtered_output_bytes(contents, filters,
2573
ContentFilterContext(tree_path, tree))
2574
tt.create_file(contents, trans_id)
2575
pb.update('Adding file contents', count + offset, total)
2578
def _reparent_children(tt, old_parent, new_parent):
2579
for child in tt.iter_tree_children(old_parent):
2580
tt.adjust_path(tt.final_name(child), new_parent, child)
2583
def _reparent_transform_children(tt, old_parent, new_parent):
2584
by_parent = tt.by_parent()
2585
for child in by_parent[old_parent]:
2586
tt.adjust_path(tt.final_name(child), new_parent, child)
2587
return by_parent[old_parent]
2590
def _content_match(tree, entry, file_id, kind, target_path):
2591
if entry.kind != kind:
2593
if entry.kind == "directory":
2595
if entry.kind == "file":
2596
f = file(target_path, 'rb')
2598
if tree.get_file_text(file_id) == f.read():
2602
elif entry.kind == "symlink":
2603
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2608
def resolve_checkout(tt, conflicts, divert):
2609
new_conflicts = set()
2610
for c_type, conflict in ((c[0], c) for c in conflicts):
2611
# Anything but a 'duplicate' would indicate programmer error
2612
if c_type != 'duplicate':
2613
raise AssertionError(c_type)
2614
# Now figure out which is new and which is old
2615
if tt.new_contents(conflict[1]):
2616
new_file = conflict[1]
2617
old_file = conflict[2]
2619
new_file = conflict[2]
2620
old_file = conflict[1]
2622
# We should only get here if the conflict wasn't completely
2624
final_parent = tt.final_parent(old_file)
2625
if new_file in divert:
2626
new_name = tt.final_name(old_file)+'.diverted'
2627
tt.adjust_path(new_name, final_parent, new_file)
2628
new_conflicts.add((c_type, 'Diverted to',
2629
new_file, old_file))
2631
new_name = tt.final_name(old_file)+'.moved'
2632
tt.adjust_path(new_name, final_parent, old_file)
2633
new_conflicts.add((c_type, 'Moved existing file to',
2634
old_file, new_file))
2635
return new_conflicts
2638
def new_by_entry(tt, entry, parent_id, tree):
2639
"""Create a new file according to its inventory entry"""
2643
contents = tree.get_file(entry.file_id).readlines()
2644
executable = tree.is_executable(entry.file_id)
2645
return tt.new_file(name, parent_id, contents, entry.file_id,
2647
elif kind in ('directory', 'tree-reference'):
2648
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2649
if kind == 'tree-reference':
2650
tt.set_tree_reference(entry.reference_revision, trans_id)
2652
elif kind == 'symlink':
2653
target = tree.get_symlink_target(entry.file_id)
2654
return tt.new_symlink(name, parent_id, target, entry.file_id)
2656
raise errors.BadFileKindError(name, kind)
2659
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2660
filter_tree_path=None):
2661
"""Create new file contents according to tree contents.
2663
:param filter_tree_path: the tree path to use to lookup
2664
content filters to apply to the bytes output in the working tree.
2665
This only applies if the working tree supports content filtering.
2667
kind = tree.kind(file_id)
2668
if kind == 'directory':
2669
tt.create_directory(trans_id)
2670
elif kind == "file":
2672
tree_file = tree.get_file(file_id)
2674
bytes = tree_file.readlines()
2678
if wt.supports_content_filtering() and filter_tree_path is not None:
2679
filters = wt._content_filter_stack(filter_tree_path)
2680
bytes = filtered_output_bytes(bytes, filters,
2681
ContentFilterContext(filter_tree_path, tree))
2682
tt.create_file(bytes, trans_id)
2683
elif kind == "symlink":
2684
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
2686
raise AssertionError('Unknown kind %r' % kind)
2689
def create_entry_executability(tt, entry, trans_id):
2690
"""Set the executability of a trans_id according to an inventory entry"""
2691
if entry.kind == "file":
2692
tt.set_executability(entry.executable, trans_id)
2695
@deprecated_function(deprecated_in((2, 3, 0)))
2696
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2697
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2700
@deprecated_function(deprecated_in((2, 3, 0)))
2701
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2702
"""Produce a backup-style name that appears to be available"""
2706
yield "%s.~%d~" % (name, counter)
2708
for new_name in name_gen():
2709
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2713
def _entry_changes(file_id, entry, working_tree):
2714
"""Determine in which ways the inventory entry has changed.
2716
Returns booleans: has_contents, content_mod, meta_mod
2717
has_contents means there are currently contents, but they differ
2718
contents_mod means contents need to be modified
2719
meta_mod means the metadata needs to be modified
2721
cur_entry = working_tree.inventory[file_id]
2723
working_kind = working_tree.kind(file_id)
2726
has_contents = False
2729
if has_contents is True:
2730
if entry.kind != working_kind:
2731
contents_mod, meta_mod = True, False
2733
cur_entry._read_tree_state(working_tree.id2path(file_id),
2735
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2736
cur_entry._forget_tree_state()
2737
return has_contents, contents_mod, meta_mod
2740
def revert(working_tree, target_tree, filenames, backups=False,
2741
pb=None, change_reporter=None):
2742
"""Revert a working tree's contents to those of a target tree."""
2743
target_tree.lock_read()
2744
pb = ui.ui_factory.nested_progress_bar()
2745
tt = TreeTransform(working_tree, pb)
2747
pp = ProgressPhase("Revert phase", 3, pb)
2748
conflicts, merge_modified = _prepare_revert_transform(
2749
working_tree, target_tree, tt, filenames, backups, pp)
2751
change_reporter = delta._ChangeReporter(
2752
unversioned_filter=working_tree.is_ignored)
2753
delta.report_changes(tt.iter_changes(), change_reporter)
2754
for conflict in conflicts:
2758
working_tree.set_merge_modified(merge_modified)
2760
target_tree.unlock()
2766
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2767
backups, pp, basis_tree=None,
2768
merge_modified=None):
2769
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2771
if merge_modified is None:
2772
merge_modified = working_tree.merge_modified()
2773
merge_modified = _alter_files(working_tree, target_tree, tt,
2774
child_pb, filenames, backups,
2775
merge_modified, basis_tree)
2778
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2780
raw_conflicts = resolve_conflicts(tt, child_pb,
2781
lambda t, c: conflict_pass(t, c, target_tree))
2784
conflicts = cook_conflicts(raw_conflicts, tt)
2785
return conflicts, merge_modified
2788
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2789
backups, merge_modified, basis_tree=None):
2790
if basis_tree is not None:
2791
basis_tree.lock_read()
2792
change_list = target_tree.iter_changes(working_tree,
2793
specific_files=specific_files, pb=pb)
2794
if target_tree.get_root_id() is None:
2800
for id_num, (file_id, path, changed_content, versioned, parent, name,
2801
kind, executable) in enumerate(change_list):
2802
if skip_root and file_id[0] is not None and parent[0] is None:
2804
trans_id = tt.trans_id_file_id(file_id)
2807
keep_content = False
2808
if kind[0] == 'file' and (backups or kind[1] is None):
2809
wt_sha1 = working_tree.get_file_sha1(file_id)
2810
if merge_modified.get(file_id) != wt_sha1:
2811
# acquire the basis tree lazily to prevent the
2812
# expense of accessing it when it's not needed ?
2813
# (Guessing, RBC, 200702)
2814
if basis_tree is None:
2815
basis_tree = working_tree.basis_tree()
2816
basis_tree.lock_read()
2817
if file_id in basis_tree:
2818
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2820
elif kind[1] is None and not versioned[1]:
2822
if kind[0] is not None:
2823
if not keep_content:
2824
tt.delete_contents(trans_id)
2825
elif kind[1] is not None:
2826
parent_trans_id = tt.trans_id_file_id(parent[0])
2827
backup_name = tt._available_backup_name(
2828
name[0], parent_trans_id)
2829
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2830
new_trans_id = tt.create_path(name[0], parent_trans_id)
2831
if versioned == (True, True):
2832
tt.unversion_file(trans_id)
2833
tt.version_file(file_id, new_trans_id)
2834
# New contents should have the same unix perms as old
2837
trans_id = new_trans_id
2838
if kind[1] in ('directory', 'tree-reference'):
2839
tt.create_directory(trans_id)
2840
if kind[1] == 'tree-reference':
2841
revision = target_tree.get_reference_revision(file_id,
2843
tt.set_tree_reference(revision, trans_id)
2844
elif kind[1] == 'symlink':
2845
tt.create_symlink(target_tree.get_symlink_target(file_id),
2847
elif kind[1] == 'file':
2848
deferred_files.append((file_id, (trans_id, mode_id)))
2849
if basis_tree is None:
2850
basis_tree = working_tree.basis_tree()
2851
basis_tree.lock_read()
2852
new_sha1 = target_tree.get_file_sha1(file_id)
2853
if (file_id in basis_tree and new_sha1 ==
2854
basis_tree.get_file_sha1(file_id)):
2855
if file_id in merge_modified:
2856
del merge_modified[file_id]
2858
merge_modified[file_id] = new_sha1
2860
# preserve the execute bit when backing up
2861
if keep_content and executable[0] == executable[1]:
2862
tt.set_executability(executable[1], trans_id)
2863
elif kind[1] is not None:
2864
raise AssertionError(kind[1])
2865
if versioned == (False, True):
2866
tt.version_file(file_id, trans_id)
2867
if versioned == (True, False):
2868
tt.unversion_file(trans_id)
2869
if (name[1] is not None and
2870
(name[0] != name[1] or parent[0] != parent[1])):
2871
if name[1] == '' and parent[1] is None:
2872
parent_trans = ROOT_PARENT
2874
parent_trans = tt.trans_id_file_id(parent[1])
2875
if parent[0] is None and versioned[0]:
2876
tt.adjust_root_path(name[1], parent_trans)
2878
tt.adjust_path(name[1], parent_trans, trans_id)
2879
if executable[0] != executable[1] and kind[1] == "file":
2880
tt.set_executability(executable[1], trans_id)
2881
if working_tree.supports_content_filtering():
2882
for index, ((trans_id, mode_id), bytes) in enumerate(
2883
target_tree.iter_files_bytes(deferred_files)):
2884
file_id = deferred_files[index][0]
2885
# We're reverting a tree to the target tree so using the
2886
# target tree to find the file path seems the best choice
2887
# here IMO - Ian C 27/Oct/2009
2888
filter_tree_path = target_tree.id2path(file_id)
2889
filters = working_tree._content_filter_stack(filter_tree_path)
2890
bytes = filtered_output_bytes(bytes, filters,
2891
ContentFilterContext(filter_tree_path, working_tree))
2892
tt.create_file(bytes, trans_id, mode_id)
2894
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2896
tt.create_file(bytes, trans_id, mode_id)
2897
tt.fixup_new_roots()
2899
if basis_tree is not None:
2901
return merge_modified
2904
def resolve_conflicts(tt, pb=None, pass_func=None):
2905
"""Make many conflict-resolution attempts, but die if they fail"""
2906
if pass_func is None:
2907
pass_func = conflict_pass
2908
new_conflicts = set()
2909
pb = ui.ui_factory.nested_progress_bar()
2912
pb.update('Resolution pass', n+1, 10)
2913
conflicts = tt.find_conflicts()
2914
if len(conflicts) == 0:
2915
return new_conflicts
2916
new_conflicts.update(pass_func(tt, conflicts))
2917
raise MalformedTransform(conflicts=conflicts)
2922
def conflict_pass(tt, conflicts, path_tree=None):
2923
"""Resolve some classes of conflicts.
2925
:param tt: The transform to resolve conflicts in
2926
:param conflicts: The conflicts to resolve
2927
:param path_tree: A Tree to get supplemental paths from
2929
new_conflicts = set()
2930
for c_type, conflict in ((c[0], c) for c in conflicts):
2931
if c_type == 'duplicate id':
2932
tt.unversion_file(conflict[1])
2933
new_conflicts.add((c_type, 'Unversioned existing file',
2934
conflict[1], conflict[2], ))
2935
elif c_type == 'duplicate':
2936
# files that were renamed take precedence
2937
final_parent = tt.final_parent(conflict[1])
2938
if tt.path_changed(conflict[1]):
2939
existing_file, new_file = conflict[2], conflict[1]
2941
existing_file, new_file = conflict[1], conflict[2]
2942
new_name = tt.final_name(existing_file)+'.moved'
2943
tt.adjust_path(new_name, final_parent, existing_file)
2944
new_conflicts.add((c_type, 'Moved existing file to',
2945
existing_file, new_file))
2946
elif c_type == 'parent loop':
2947
# break the loop by undoing one of the ops that caused the loop
2949
while not tt.path_changed(cur):
2950
cur = tt.final_parent(cur)
2951
new_conflicts.add((c_type, 'Cancelled move', cur,
2952
tt.final_parent(cur),))
2953
tt.adjust_path(tt.final_name(cur), tt.get_tree_parent(cur), cur)
2955
elif c_type == 'missing parent':
2956
trans_id = conflict[1]
2957
if trans_id in tt._removed_contents:
2958
cancel_deletion = True
2959
orphans = tt._get_potential_orphans(trans_id)
2961
cancel_deletion = False
2962
# All children are orphans
2965
tt.new_orphan(o, trans_id)
2966
except OrphaningError:
2967
# Something bad happened so we cancel the directory
2968
# deletion which will leave it in place with a
2969
# conflict. The user can deal with it from there.
2970
# Note that this also catch the case where we don't
2971
# want to create orphans and leave the directory in
2973
cancel_deletion = True
2976
# Cancel the directory deletion
2977
tt.cancel_deletion(trans_id)
2978
new_conflicts.add(('deleting parent', 'Not deleting',
2983
tt.final_name(trans_id)
2985
if path_tree is not None:
2986
file_id = tt.final_file_id(trans_id)
2988
file_id = tt.inactive_file_id(trans_id)
2989
entry = path_tree.inventory[file_id]
2990
# special-case the other tree root (move its
2991
# children to current root)
2992
if entry.parent_id is None:
2994
moved = _reparent_transform_children(
2995
tt, trans_id, tt.root)
2997
new_conflicts.add((c_type, 'Moved to root',
3000
parent_trans_id = tt.trans_id_file_id(
3002
tt.adjust_path(entry.name, parent_trans_id,
3005
tt.create_directory(trans_id)
3006
new_conflicts.add((c_type, 'Created directory', trans_id))
3007
elif c_type == 'unversioned parent':
3008
file_id = tt.inactive_file_id(conflict[1])
3009
# special-case the other tree root (move its children instead)
3010
if path_tree and file_id in path_tree:
3011
if path_tree.inventory[file_id].parent_id is None:
3013
tt.version_file(file_id, conflict[1])
3014
new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
3015
elif c_type == 'non-directory parent':
3016
parent_id = conflict[1]
3017
parent_parent = tt.final_parent(parent_id)
3018
parent_name = tt.final_name(parent_id)
3019
parent_file_id = tt.final_file_id(parent_id)
3020
new_parent_id = tt.new_directory(parent_name + '.new',
3021
parent_parent, parent_file_id)
3022
_reparent_transform_children(tt, parent_id, new_parent_id)
3023
if parent_file_id is not None:
3024
tt.unversion_file(parent_id)
3025
new_conflicts.add((c_type, 'Created directory', new_parent_id))
3026
elif c_type == 'versioning no contents':
3027
tt.cancel_versioning(conflict[1])
3028
return new_conflicts
3031
def cook_conflicts(raw_conflicts, tt):
3032
"""Generate a list of cooked conflicts, sorted by file path"""
3033
from bzrlib.conflicts import Conflict
3034
conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3035
return sorted(conflict_iter, key=Conflict.sort_key)
3038
def iter_cook_conflicts(raw_conflicts, tt):
3039
from bzrlib.conflicts import Conflict
3041
for conflict in raw_conflicts:
3042
c_type = conflict[0]
3043
action = conflict[1]
3044
modified_path = fp.get_path(conflict[2])
3045
modified_id = tt.final_file_id(conflict[2])
3046
if len(conflict) == 3:
3047
yield Conflict.factory(c_type, action=action, path=modified_path,
3048
file_id=modified_id)
3051
conflicting_path = fp.get_path(conflict[3])
3052
conflicting_id = tt.final_file_id(conflict[3])
3053
yield Conflict.factory(c_type, action=action, path=modified_path,
3054
file_id=modified_id,
3055
conflict_path=conflicting_path,
3056
conflict_file_id=conflicting_id)
3059
class _FileMover(object):
3060
"""Moves and deletes files for TreeTransform, tracking operations"""
3063
self.past_renames = []
3064
self.pending_deletions = []
3066
def rename(self, from_, to):
3067
"""Rename a file from one path to another."""
3069
os.rename(from_, to)
3071
if e.errno in (errno.EEXIST, errno.ENOTEMPTY):
3072
raise errors.FileExists(to, str(e))
3073
# normal OSError doesn't include filenames so it's hard to see where
3074
# the problem is, see https://bugs.launchpad.net/bzr/+bug/491763
3075
raise errors.TransformRenameFailed(from_, to, str(e), e.errno)
3076
self.past_renames.append((from_, to))
3078
def pre_delete(self, from_, to):
3079
"""Rename a file out of the way and mark it for deletion.
3081
Unlike os.unlink, this works equally well for files and directories.
3082
:param from_: The current file path
3083
:param to: A temporary path for the file
3085
self.rename(from_, to)
3086
self.pending_deletions.append(to)
3089
"""Reverse all renames that have been performed"""
3090
for from_, to in reversed(self.past_renames):
3092
os.rename(to, from_)
3094
raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
3095
# after rollback, don't reuse _FileMover
3097
pending_deletions = None
3099
def apply_deletions(self):
3100
"""Apply all marked deletions"""
3101
for path in self.pending_deletions:
3103
# after apply_deletions, don't reuse _FileMover
3105
pending_deletions = None