1
# Copyright (C) 2006-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from stat import S_ISREG, S_IEXEC
29
lazy_import.lazy_import(globals(), """
41
revision as _mod_revision,
46
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
47
ReusingTransform, CantMoveRoot,
48
ExistingLimbo, ImmortalLimbo, NoFinalPath,
50
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
51
from bzrlib.osutils import (
60
from bzrlib.progress import ProgressPhase
61
from bzrlib.symbol_versioning import (
68
ROOT_PARENT = "root-parent"
70
def unique_add(map, key, value):
72
raise DuplicateKey(key=key)
77
class _TransformResults(object):
78
def __init__(self, modified_paths, rename_count):
80
self.modified_paths = modified_paths
81
self.rename_count = rename_count
84
class TreeTransformBase(object):
85
"""The base class for TreeTransform and its kin."""
87
def __init__(self, tree, pb=None,
91
:param tree: The tree that will be transformed, but not necessarily
94
:param case_sensitive: If True, the target of the transform is
95
case sensitive, not just case preserving.
100
# mapping of trans_id -> new basename
102
# mapping of trans_id -> new parent trans_id
103
self._new_parent = {}
104
# mapping of trans_id with new contents -> new file_kind
105
self._new_contents = {}
106
# mapping of trans_id => (sha1 of content, stat_value)
107
self._observed_sha1s = {}
108
# Set of trans_ids whose contents will be removed
109
self._removed_contents = set()
110
# Mapping of trans_id -> new execute-bit value
111
self._new_executability = {}
112
# Mapping of trans_id -> new tree-reference value
113
self._new_reference_revision = {}
114
# Mapping of trans_id -> new file_id
116
# Mapping of old file-id -> trans_id
117
self._non_present_ids = {}
118
# Mapping of new file_id -> trans_id
120
# Set of trans_ids that will be removed
121
self._removed_id = set()
122
# Mapping of path in old tree -> trans_id
123
self._tree_path_ids = {}
124
# Mapping trans_id -> path in old tree
125
self._tree_id_paths = {}
126
# The trans_id that will be used as the tree root
127
root_id = tree.get_root_id()
128
if root_id is not None:
129
self._new_root = self.trans_id_tree_file_id(root_id)
131
self._new_root = None
132
# Indicator of whether the transform has been applied
136
# Whether the target is case sensitive
137
self._case_sensitive_target = case_sensitive
138
# A counter of how many files have been renamed
139
self.rename_count = 0
142
"""Support Context Manager API."""
145
def __exit__(self, exc_type, exc_val, exc_tb):
146
"""Support Context Manager API."""
150
"""Release the working tree lock, if held.
152
This is required if apply has not been invoked, but can be invoked
155
if self._tree is None:
160
def __get_root(self):
161
return self._new_root
163
root = property(__get_root)
165
def _assign_id(self):
166
"""Produce a new tranform id"""
167
new_id = "new-%s" % self._id_number
171
def create_path(self, name, parent):
172
"""Assign a transaction id to a new path"""
173
trans_id = self._assign_id()
174
unique_add(self._new_name, trans_id, name)
175
unique_add(self._new_parent, trans_id, parent)
178
def adjust_path(self, name, parent, trans_id):
179
"""Change the path that is assigned to a transaction id."""
181
raise ValueError("Parent trans-id may not be None")
182
if trans_id == self._new_root:
184
self._new_name[trans_id] = name
185
self._new_parent[trans_id] = parent
187
def adjust_root_path(self, name, parent):
188
"""Emulate moving the root by moving all children, instead.
190
We do this by undoing the association of root's transaction id with the
191
current tree. This allows us to create a new directory with that
192
transaction id. We unversion the root directory and version the
193
physically new directory, and hope someone versions the tree root
196
old_root = self._new_root
197
old_root_file_id = self.final_file_id(old_root)
198
# force moving all children of root
199
for child_id in self.iter_tree_children(old_root):
200
if child_id != parent:
201
self.adjust_path(self.final_name(child_id),
202
self.final_parent(child_id), child_id)
203
file_id = self.final_file_id(child_id)
204
if file_id is not None:
205
self.unversion_file(child_id)
206
self.version_file(file_id, child_id)
208
# the physical root needs a new transaction id
209
self._tree_path_ids.pop("")
210
self._tree_id_paths.pop(old_root)
211
self._new_root = self.trans_id_tree_file_id(self._tree.get_root_id())
212
if parent == old_root:
213
parent = self._new_root
214
self.adjust_path(name, parent, old_root)
215
self.create_directory(old_root)
216
self.version_file(old_root_file_id, old_root)
217
self.unversion_file(self._new_root)
219
def fixup_new_roots(self):
220
"""Reinterpret requests to change the root directory
222
Instead of creating a root directory, or moving an existing directory,
223
all the attributes and children of the new root are applied to the
224
existing root directory.
226
This means that the old root trans-id becomes obsolete, so it is
227
recommended only to invoke this after the root trans-id has become
231
new_roots = [k for k, v in self._new_parent.iteritems() if v is
233
if len(new_roots) < 1:
234
if self.final_kind(self.root) is None:
235
self.cancel_deletion(self.root)
236
if self.final_file_id(self.root) is None:
237
self.version_file(self.tree_file_id(self.root),
240
if len(new_roots) != 1:
241
raise ValueError('A tree cannot have two roots!')
242
if self._new_root is None:
243
self._new_root = new_roots[0]
245
old_new_root = new_roots[0]
246
# unversion the new root's directory.
247
if self.final_kind(self._new_root) is None:
248
file_id = self.final_file_id(old_new_root)
250
file_id = self.final_file_id(self._new_root)
251
if old_new_root in self._new_id:
252
self.cancel_versioning(old_new_root)
254
self.unversion_file(old_new_root)
255
# if, at this stage, root still has an old file_id, zap it so we can
256
# stick a new one in.
257
if (self.tree_file_id(self._new_root) is not None and
258
self._new_root not in self._removed_id):
259
self.unversion_file(self._new_root)
260
if file_id is not None:
261
self.version_file(file_id, self._new_root)
263
# Now move children of new root into old root directory.
264
# Ensure all children are registered with the transaction, but don't
265
# use directly-- some tree children have new parents
266
list(self.iter_tree_children(old_new_root))
267
# Move all children of new root into old root directory.
268
for child in self.by_parent().get(old_new_root, []):
269
self.adjust_path(self.final_name(child), self._new_root, child)
271
# Ensure old_new_root has no directory.
272
if old_new_root in self._new_contents:
273
self.cancel_creation(old_new_root)
275
self.delete_contents(old_new_root)
277
# prevent deletion of root directory.
278
if self._new_root in self._removed_contents:
279
self.cancel_deletion(self._new_root)
281
# destroy path info for old_new_root.
282
del self._new_parent[old_new_root]
283
del self._new_name[old_new_root]
285
def trans_id_tree_file_id(self, inventory_id):
286
"""Determine the transaction id of a working tree file.
288
This reflects only files that already exist, not ones that will be
289
added by transactions.
291
if inventory_id is None:
292
raise ValueError('None is not a valid file id')
293
path = self._tree.id2path(inventory_id)
294
return self.trans_id_tree_path(path)
296
def trans_id_file_id(self, file_id):
297
"""Determine or set the transaction id associated with a file ID.
298
A new id is only created for file_ids that were never present. If
299
a transaction has been unversioned, it is deliberately still returned.
300
(this will likely lead to an unversioned parent conflict.)
303
raise ValueError('None is not a valid file id')
304
if file_id in self._r_new_id and self._r_new_id[file_id] is not None:
305
return self._r_new_id[file_id]
308
self._tree.iter_entries_by_dir([file_id]).next()
309
except StopIteration:
310
if file_id in self._non_present_ids:
311
return self._non_present_ids[file_id]
313
trans_id = self._assign_id()
314
self._non_present_ids[file_id] = trans_id
317
return self.trans_id_tree_file_id(file_id)
319
def trans_id_tree_path(self, path):
320
"""Determine (and maybe set) the transaction ID for a tree path."""
321
path = self.canonical_path(path)
322
if path not in self._tree_path_ids:
323
self._tree_path_ids[path] = self._assign_id()
324
self._tree_id_paths[self._tree_path_ids[path]] = path
325
return self._tree_path_ids[path]
327
def get_tree_parent(self, trans_id):
328
"""Determine id of the parent in the tree."""
329
path = self._tree_id_paths[trans_id]
332
return self.trans_id_tree_path(os.path.dirname(path))
334
def delete_contents(self, trans_id):
335
"""Schedule the contents of a path entry for deletion"""
336
kind = self.tree_kind(trans_id)
338
self._removed_contents.add(trans_id)
340
def cancel_deletion(self, trans_id):
341
"""Cancel a scheduled deletion"""
342
self._removed_contents.remove(trans_id)
344
def unversion_file(self, trans_id):
345
"""Schedule a path entry to become unversioned"""
346
self._removed_id.add(trans_id)
348
def delete_versioned(self, trans_id):
349
"""Delete and unversion a versioned file"""
350
self.delete_contents(trans_id)
351
self.unversion_file(trans_id)
353
def set_executability(self, executability, trans_id):
354
"""Schedule setting of the 'execute' bit
355
To unschedule, set to None
357
if executability is None:
358
del self._new_executability[trans_id]
360
unique_add(self._new_executability, trans_id, executability)
362
def set_tree_reference(self, revision_id, trans_id):
363
"""Set the reference associated with a directory"""
364
unique_add(self._new_reference_revision, trans_id, revision_id)
366
def version_file(self, file_id, trans_id):
367
"""Schedule a file to become versioned."""
370
unique_add(self._new_id, trans_id, file_id)
371
unique_add(self._r_new_id, file_id, trans_id)
373
def cancel_versioning(self, trans_id):
374
"""Undo a previous versioning of a file"""
375
file_id = self._new_id[trans_id]
376
del self._new_id[trans_id]
377
del self._r_new_id[file_id]
379
def new_paths(self, filesystem_only=False):
380
"""Determine the paths of all new and changed files.
382
:param filesystem_only: if True, only calculate values for files
383
that require renames or execute bit changes.
387
stale_ids = self._needs_rename.difference(self._new_name)
388
stale_ids.difference_update(self._new_parent)
389
stale_ids.difference_update(self._new_contents)
390
stale_ids.difference_update(self._new_id)
391
needs_rename = self._needs_rename.difference(stale_ids)
392
id_sets = (needs_rename, self._new_executability)
394
id_sets = (self._new_name, self._new_parent, self._new_contents,
395
self._new_id, self._new_executability)
396
for id_set in id_sets:
397
new_ids.update(id_set)
398
return sorted(FinalPaths(self).get_paths(new_ids))
400
def _inventory_altered(self):
401
"""Determine which trans_ids need new Inventory entries.
403
An new entry is needed when anything that would be reflected by an
404
inventory entry changes, including file name, file_id, parent file_id,
405
file kind, and the execute bit.
407
Some care is taken to return entries with real changes, not cases
408
where the value is deleted and then restored to its original value,
409
but some actually unchanged values may be returned.
411
:returns: A list of (path, trans_id) for all items requiring an
412
inventory change. Ordered by path.
415
# Find entries whose file_ids are new (or changed).
416
new_file_id = set(t for t in self._new_id
417
if self._new_id[t] != self.tree_file_id(t))
418
for id_set in [self._new_name, self._new_parent, new_file_id,
419
self._new_executability]:
420
changed_ids.update(id_set)
421
# removing implies a kind change
422
changed_kind = set(self._removed_contents)
424
changed_kind.intersection_update(self._new_contents)
425
# Ignore entries that are already known to have changed.
426
changed_kind.difference_update(changed_ids)
427
# to keep only the truly changed ones
428
changed_kind = (t for t in changed_kind
429
if self.tree_kind(t) != self.final_kind(t))
430
# all kind changes will alter the inventory
431
changed_ids.update(changed_kind)
432
# To find entries with changed parent_ids, find parents which existed,
433
# but changed file_id.
434
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
435
# Now add all their children to the set.
436
for parent_trans_id in new_file_id:
437
changed_ids.update(self.iter_tree_children(parent_trans_id))
438
return sorted(FinalPaths(self).get_paths(changed_ids))
440
def final_kind(self, trans_id):
441
"""Determine the final file kind, after any changes applied.
443
:return: None if the file does not exist/has no contents. (It is
444
conceivable that a path would be created without the corresponding
445
contents insertion command)
447
if trans_id in self._new_contents:
448
return self._new_contents[trans_id]
449
elif trans_id in self._removed_contents:
452
return self.tree_kind(trans_id)
454
def tree_file_id(self, trans_id):
455
"""Determine the file id associated with the trans_id in the tree"""
457
path = self._tree_id_paths[trans_id]
459
# the file is a new, unversioned file, or invalid trans_id
461
# the file is old; the old id is still valid
462
if self._new_root == trans_id:
463
return self._tree.get_root_id()
464
return self._tree.path2id(path)
466
def final_file_id(self, trans_id):
467
"""Determine the file id after any changes are applied, or None.
469
None indicates that the file will not be versioned after changes are
473
return self._new_id[trans_id]
475
if trans_id in self._removed_id:
477
return self.tree_file_id(trans_id)
479
def inactive_file_id(self, trans_id):
480
"""Return the inactive file_id associated with a transaction id.
481
That is, the one in the tree or in non_present_ids.
482
The file_id may actually be active, too.
484
file_id = self.tree_file_id(trans_id)
485
if file_id is not None:
487
for key, value in self._non_present_ids.iteritems():
488
if value == trans_id:
491
def final_parent(self, trans_id):
492
"""Determine the parent file_id, after any changes are applied.
494
ROOT_PARENT is returned for the tree root.
497
return self._new_parent[trans_id]
499
return self.get_tree_parent(trans_id)
501
def final_name(self, trans_id):
502
"""Determine the final filename, after all changes are applied."""
504
return self._new_name[trans_id]
507
return os.path.basename(self._tree_id_paths[trans_id])
509
raise NoFinalPath(trans_id, self)
512
"""Return a map of parent: children for known parents.
514
Only new paths and parents of tree files with assigned ids are used.
517
items = list(self._new_parent.iteritems())
518
items.extend((t, self.final_parent(t)) for t in
519
self._tree_id_paths.keys())
520
for trans_id, parent_id in items:
521
if parent_id not in by_parent:
522
by_parent[parent_id] = set()
523
by_parent[parent_id].add(trans_id)
526
def path_changed(self, trans_id):
527
"""Return True if a trans_id's path has changed."""
528
return (trans_id in self._new_name) or (trans_id in self._new_parent)
530
def new_contents(self, trans_id):
531
return (trans_id in self._new_contents)
533
def find_conflicts(self):
534
"""Find any violations of inventory or filesystem invariants"""
535
if self._done is True:
536
raise ReusingTransform()
538
# ensure all children of all existent parents are known
539
# all children of non-existent parents are known, by definition.
540
self._add_tree_children()
541
by_parent = self.by_parent()
542
conflicts.extend(self._unversioned_parents(by_parent))
543
conflicts.extend(self._parent_loops())
544
conflicts.extend(self._duplicate_entries(by_parent))
545
conflicts.extend(self._duplicate_ids())
546
conflicts.extend(self._parent_type_conflicts(by_parent))
547
conflicts.extend(self._improper_versioning())
548
conflicts.extend(self._executability_conflicts())
549
conflicts.extend(self._overwrite_conflicts())
552
def _check_malformed(self):
553
conflicts = self.find_conflicts()
554
if len(conflicts) != 0:
555
raise MalformedTransform(conflicts=conflicts)
557
def _add_tree_children(self):
558
"""Add all the children of all active parents to the known paths.
560
Active parents are those which gain children, and those which are
561
removed. This is a necessary first step in detecting conflicts.
563
parents = self.by_parent().keys()
564
parents.extend([t for t in self._removed_contents if
565
self.tree_kind(t) == 'directory'])
566
for trans_id in self._removed_id:
567
file_id = self.tree_file_id(trans_id)
568
if file_id is not None:
569
# XXX: This seems like something that should go via a different
571
if self._tree.inventory[file_id].kind == 'directory':
572
parents.append(trans_id)
573
elif self.tree_kind(trans_id) == 'directory':
574
parents.append(trans_id)
576
for parent_id in parents:
577
# ensure that all children are registered with the transaction
578
list(self.iter_tree_children(parent_id))
580
@deprecated_method(deprecated_in((2, 3, 0)))
581
def has_named_child(self, by_parent, parent_id, name):
582
return self._has_named_child(
583
name, parent_id, known_children=by_parent.get(parent_id, []))
585
def _has_named_child(self, name, parent_id, known_children):
586
"""Does a parent already have a name child.
588
:param name: The searched for name.
590
:param parent_id: The parent for which the check is made.
592
:param known_children: The already known children. This should have
593
been recently obtained from `self.by_parent.get(parent_id)`
594
(or will be if None is passed).
596
if known_children is None:
597
known_children = self.by_parent().get(parent_id, [])
598
for child in known_children:
599
if self.final_name(child) == name:
601
parent_path = self._tree_id_paths.get(parent_id, None)
602
if parent_path is None:
603
# No parent... no children
605
child_path = joinpath(parent_path, name)
606
child_id = self._tree_path_ids.get(child_path, None)
608
# Not known by the tree transform yet, check the filesystem
609
return osutils.lexists(self._tree.abspath(child_path))
611
raise AssertionError('child_id is missing: %s, %s, %s'
612
% (name, parent_id, child_id))
614
def _available_backup_name(self, name, target_id):
615
"""Find an available backup name.
617
:param name: The basename of the file.
619
:param target_id: The directory trans_id where the backup should
622
known_children = self.by_parent().get(target_id, [])
623
return osutils.available_backup_name(
625
lambda base: self._has_named_child(
626
base, target_id, known_children))
628
def _parent_loops(self):
629
"""No entry should be its own ancestor"""
631
for trans_id in self._new_parent:
634
while parent_id is not ROOT_PARENT:
637
parent_id = self.final_parent(parent_id)
640
if parent_id == trans_id:
641
conflicts.append(('parent loop', trans_id))
642
if parent_id in seen:
646
def _unversioned_parents(self, by_parent):
647
"""If parent directories are versioned, children must be versioned."""
649
for parent_id, children in by_parent.iteritems():
650
if parent_id is ROOT_PARENT:
652
if self.final_file_id(parent_id) is not None:
654
for child_id in children:
655
if self.final_file_id(child_id) is not None:
656
conflicts.append(('unversioned parent', parent_id))
660
def _improper_versioning(self):
661
"""Cannot version a file with no contents, or a bad type.
663
However, existing entries with no contents are okay.
666
for trans_id in self._new_id.iterkeys():
667
kind = self.final_kind(trans_id)
669
conflicts.append(('versioning no contents', trans_id))
671
if not inventory.InventoryEntry.versionable_kind(kind):
672
conflicts.append(('versioning bad kind', trans_id, kind))
675
def _executability_conflicts(self):
676
"""Check for bad executability changes.
678
Only versioned files may have their executability set, because
679
1. only versioned entries can have executability under windows
680
2. only files can be executable. (The execute bit on a directory
681
does not indicate searchability)
684
for trans_id in self._new_executability:
685
if self.final_file_id(trans_id) is None:
686
conflicts.append(('unversioned executability', trans_id))
688
if self.final_kind(trans_id) != "file":
689
conflicts.append(('non-file executability', trans_id))
692
def _overwrite_conflicts(self):
693
"""Check for overwrites (not permitted on Win32)"""
695
for trans_id in self._new_contents:
696
if self.tree_kind(trans_id) is None:
698
if trans_id not in self._removed_contents:
699
conflicts.append(('overwrite', trans_id,
700
self.final_name(trans_id)))
703
def _duplicate_entries(self, by_parent):
704
"""No directory may have two entries with the same name."""
706
if (self._new_name, self._new_parent) == ({}, {}):
708
for children in by_parent.itervalues():
710
for child_tid in children:
711
name = self.final_name(child_tid)
713
# Keep children only if they still exist in the end
714
if not self._case_sensitive_target:
716
name_ids.append((name, child_tid))
720
for name, trans_id in name_ids:
721
kind = self.final_kind(trans_id)
722
file_id = self.final_file_id(trans_id)
723
if kind is None and file_id is None:
725
if name == last_name:
726
conflicts.append(('duplicate', last_trans_id, trans_id,
729
last_trans_id = trans_id
732
def _duplicate_ids(self):
733
"""Each inventory id may only be used once"""
735
removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in
737
all_ids = self._tree.all_file_ids()
738
active_tree_ids = all_ids.difference(removed_tree_ids)
739
for trans_id, file_id in self._new_id.iteritems():
740
if file_id in active_tree_ids:
741
old_trans_id = self.trans_id_tree_file_id(file_id)
742
conflicts.append(('duplicate id', old_trans_id, trans_id))
745
def _parent_type_conflicts(self, by_parent):
746
"""Children must have a directory parent"""
748
for parent_id, children in by_parent.iteritems():
749
if parent_id is ROOT_PARENT:
752
for child_id in children:
753
if self.final_kind(child_id) is not None:
758
# There is at least a child, so we need an existing directory to
760
kind = self.final_kind(parent_id)
762
# The directory will be deleted
763
conflicts.append(('missing parent', parent_id))
764
elif kind != "directory":
765
# Meh, we need a *directory* to put something in it
766
conflicts.append(('non-directory parent', parent_id))
769
def _set_executability(self, path, trans_id):
770
"""Set the executability of versioned files """
771
if supports_executable():
772
new_executability = self._new_executability[trans_id]
773
abspath = self._tree.abspath(path)
774
current_mode = os.stat(abspath).st_mode
775
if new_executability:
778
to_mode = current_mode | (0100 & ~umask)
779
# Enable x-bit for others only if they can read it.
780
if current_mode & 0004:
781
to_mode |= 0001 & ~umask
782
if current_mode & 0040:
783
to_mode |= 0010 & ~umask
785
to_mode = current_mode & ~0111
786
osutils.chmod_if_possible(abspath, to_mode)
788
def _new_entry(self, name, parent_id, file_id):
789
"""Helper function to create a new filesystem entry."""
790
trans_id = self.create_path(name, parent_id)
791
if file_id is not None:
792
self.version_file(file_id, trans_id)
795
def new_file(self, name, parent_id, contents, file_id=None,
796
executable=None, sha1=None):
797
"""Convenience method to create files.
799
name is the name of the file to create.
800
parent_id is the transaction id of the parent directory of the file.
801
contents is an iterator of bytestrings, which will be used to produce
803
:param file_id: The inventory ID of the file, if it is to be versioned.
804
:param executable: Only valid when a file_id has been supplied.
806
trans_id = self._new_entry(name, parent_id, file_id)
807
# TODO: rather than scheduling a set_executable call,
808
# have create_file create the file with the right mode.
809
self.create_file(contents, trans_id, sha1=sha1)
810
if executable is not None:
811
self.set_executability(executable, trans_id)
814
def new_directory(self, name, parent_id, file_id=None):
815
"""Convenience method to create directories.
817
name is the name of the directory to create.
818
parent_id is the transaction id of the parent directory of the
820
file_id is the inventory ID of the directory, if it is to be versioned.
822
trans_id = self._new_entry(name, parent_id, file_id)
823
self.create_directory(trans_id)
826
def new_symlink(self, name, parent_id, target, file_id=None):
827
"""Convenience method to create symbolic link.
829
name is the name of the symlink to create.
830
parent_id is the transaction id of the parent directory of the symlink.
831
target is a bytestring of the target of the symlink.
832
file_id is the inventory ID of the file, if it is to be versioned.
834
trans_id = self._new_entry(name, parent_id, file_id)
835
self.create_symlink(target, trans_id)
838
def new_orphan(self, trans_id, parent_id):
839
"""Schedule an item to be orphaned.
841
When a directory is about to be removed, its children, if they are not
842
versioned are moved out of the way: they don't have a parent anymore.
844
:param trans_id: The trans_id of the existing item.
845
:param parent_id: The parent trans_id of the item.
847
raise NotImplementedError(self.new_orphan)
849
def _get_potential_orphans(self, dir_id):
850
"""Find the potential orphans in a directory.
852
A directory can't be safely deleted if there are versioned files in it.
853
If all the contained files are unversioned then they can be orphaned.
855
The 'None' return value means that the directory contains at least one
856
versioned file and should not be deleted.
858
:param dir_id: The directory trans id.
860
:return: A list of the orphan trans ids or None if at least one
861
versioned file is present.
864
# Find the potential orphans, stop if one item should be kept
865
for child_tid in self.by_parent()[dir_id]:
866
if child_tid in self._removed_contents:
867
# The child is removed as part of the transform. Since it was
868
# versioned before, it's not an orphan
870
elif self.final_file_id(child_tid) is None:
871
# The child is not versioned
872
orphans.append(child_tid)
874
# We have a versioned file here, searching for orphans is
880
def _affected_ids(self):
881
"""Return the set of transform ids affected by the transform"""
882
trans_ids = set(self._removed_id)
883
trans_ids.update(self._new_id.keys())
884
trans_ids.update(self._removed_contents)
885
trans_ids.update(self._new_contents.keys())
886
trans_ids.update(self._new_executability.keys())
887
trans_ids.update(self._new_name.keys())
888
trans_ids.update(self._new_parent.keys())
891
def _get_file_id_maps(self):
892
"""Return mapping of file_ids to trans_ids in the to and from states"""
893
trans_ids = self._affected_ids()
896
# Build up two dicts: trans_ids associated with file ids in the
897
# FROM state, vs the TO state.
898
for trans_id in trans_ids:
899
from_file_id = self.tree_file_id(trans_id)
900
if from_file_id is not None:
901
from_trans_ids[from_file_id] = trans_id
902
to_file_id = self.final_file_id(trans_id)
903
if to_file_id is not None:
904
to_trans_ids[to_file_id] = trans_id
905
return from_trans_ids, to_trans_ids
907
def _from_file_data(self, from_trans_id, from_versioned, file_id):
908
"""Get data about a file in the from (tree) state
910
Return a (name, parent, kind, executable) tuple
912
from_path = self._tree_id_paths.get(from_trans_id)
914
# get data from working tree if versioned
915
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
916
from_name = from_entry.name
917
from_parent = from_entry.parent_id
920
if from_path is None:
921
# File does not exist in FROM state
925
# File exists, but is not versioned. Have to use path-
927
from_name = os.path.basename(from_path)
928
tree_parent = self.get_tree_parent(from_trans_id)
929
from_parent = self.tree_file_id(tree_parent)
930
if from_path is not None:
931
from_kind, from_executable, from_stats = \
932
self._tree._comparison_data(from_entry, from_path)
935
from_executable = False
936
return from_name, from_parent, from_kind, from_executable
938
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
939
"""Get data about a file in the to (target) state
941
Return a (name, parent, kind, executable) tuple
943
to_name = self.final_name(to_trans_id)
944
to_kind = self.final_kind(to_trans_id)
945
to_parent = self.final_file_id(self.final_parent(to_trans_id))
946
if to_trans_id in self._new_executability:
947
to_executable = self._new_executability[to_trans_id]
948
elif to_trans_id == from_trans_id:
949
to_executable = from_executable
951
to_executable = False
952
return to_name, to_parent, to_kind, to_executable
954
def iter_changes(self):
955
"""Produce output in the same format as Tree.iter_changes.
957
Will produce nonsensical results if invoked while inventory/filesystem
958
conflicts (as reported by TreeTransform.find_conflicts()) are present.
960
This reads the Transform, but only reproduces changes involving a
961
file_id. Files that are not versioned in either of the FROM or TO
962
states are not reflected.
964
final_paths = FinalPaths(self)
965
from_trans_ids, to_trans_ids = self._get_file_id_maps()
967
# Now iterate through all active file_ids
968
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
970
from_trans_id = from_trans_ids.get(file_id)
971
# find file ids, and determine versioning state
972
if from_trans_id is None:
973
from_versioned = False
974
from_trans_id = to_trans_ids[file_id]
976
from_versioned = True
977
to_trans_id = to_trans_ids.get(file_id)
978
if to_trans_id is None:
980
to_trans_id = from_trans_id
984
from_name, from_parent, from_kind, from_executable = \
985
self._from_file_data(from_trans_id, from_versioned, file_id)
987
to_name, to_parent, to_kind, to_executable = \
988
self._to_file_data(to_trans_id, from_trans_id, from_executable)
990
if not from_versioned:
993
from_path = self._tree_id_paths.get(from_trans_id)
997
to_path = final_paths.get_path(to_trans_id)
998
if from_kind != to_kind:
1000
elif to_kind in ('file', 'symlink') and (
1001
to_trans_id != from_trans_id or
1002
to_trans_id in self._new_contents):
1004
if (not modified and from_versioned == to_versioned and
1005
from_parent==to_parent and from_name == to_name and
1006
from_executable == to_executable):
1008
results.append((file_id, (from_path, to_path), modified,
1009
(from_versioned, to_versioned),
1010
(from_parent, to_parent),
1011
(from_name, to_name),
1012
(from_kind, to_kind),
1013
(from_executable, to_executable)))
1014
return iter(sorted(results, key=lambda x:x[1]))
1016
def get_preview_tree(self):
1017
"""Return a tree representing the result of the transform.
1019
The tree is a snapshot, and altering the TreeTransform will invalidate
1022
return _PreviewTree(self)
1024
def commit(self, branch, message, merge_parents=None, strict=False,
1025
timestamp=None, timezone=None, committer=None, authors=None,
1026
revprops=None, revision_id=None):
1027
"""Commit the result of this TreeTransform to a branch.
1029
:param branch: The branch to commit to.
1030
:param message: The message to attach to the commit.
1031
:param merge_parents: Additional parent revision-ids specified by
1033
:param strict: If True, abort the commit if there are unversioned
1035
:param timestamp: if not None, seconds-since-epoch for the time and
1036
date. (May be a float.)
1037
:param timezone: Optional timezone for timestamp, as an offset in
1039
:param committer: Optional committer in email-id format.
1040
(e.g. "J Random Hacker <jrandom@example.com>")
1041
:param authors: Optional list of authors in email-id format.
1042
:param revprops: Optional dictionary of revision properties.
1043
:param revision_id: Optional revision id. (Specifying a revision-id
1044
may reduce performance for some non-native formats.)
1045
:return: The revision_id of the revision committed.
1047
self._check_malformed()
1049
unversioned = set(self._new_contents).difference(set(self._new_id))
1050
for trans_id in unversioned:
1051
if self.final_file_id(trans_id) is None:
1052
raise errors.StrictCommitFailed()
1054
revno, last_rev_id = branch.last_revision_info()
1055
if last_rev_id == _mod_revision.NULL_REVISION:
1056
if merge_parents is not None:
1057
raise ValueError('Cannot supply merge parents for first'
1061
parent_ids = [last_rev_id]
1062
if merge_parents is not None:
1063
parent_ids.extend(merge_parents)
1064
if self._tree.get_revision_id() != last_rev_id:
1065
raise ValueError('TreeTransform not based on branch basis: %s' %
1066
self._tree.get_revision_id())
1067
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1068
builder = branch.get_commit_builder(parent_ids,
1069
timestamp=timestamp,
1071
committer=committer,
1073
revision_id=revision_id)
1074
preview = self.get_preview_tree()
1075
list(builder.record_iter_changes(preview, last_rev_id,
1076
self.iter_changes()))
1077
builder.finish_inventory()
1078
revision_id = builder.commit(message)
1079
branch.set_last_revision_info(revno + 1, revision_id)
1082
def _text_parent(self, trans_id):
1083
file_id = self.tree_file_id(trans_id)
1085
if file_id is None or self._tree.kind(file_id) != 'file':
1087
except errors.NoSuchFile:
1091
def _get_parents_texts(self, trans_id):
1092
"""Get texts for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_text(file_id),)
1098
def _get_parents_lines(self, trans_id):
1099
"""Get lines for compression parents of this file."""
1100
file_id = self._text_parent(trans_id)
1103
return (self._tree.get_file_lines(file_id),)
1105
def serialize(self, serializer):
1106
"""Serialize this TreeTransform.
1108
:param serializer: A Serialiser like pack.ContainerSerializer.
1110
new_name = dict((k, v.encode('utf-8')) for k, v in
1111
self._new_name.items())
1112
new_executability = dict((k, int(v)) for k, v in
1113
self._new_executability.items())
1114
tree_path_ids = dict((k.encode('utf-8'), v)
1115
for k, v in self._tree_path_ids.items())
1117
'_id_number': self._id_number,
1118
'_new_name': new_name,
1119
'_new_parent': self._new_parent,
1120
'_new_executability': new_executability,
1121
'_new_id': self._new_id,
1122
'_tree_path_ids': tree_path_ids,
1123
'_removed_id': list(self._removed_id),
1124
'_removed_contents': list(self._removed_contents),
1125
'_non_present_ids': self._non_present_ids,
1127
yield serializer.bytes_record(bencode.bencode(attribs),
1129
for trans_id, kind in self._new_contents.items():
1131
lines = osutils.chunks_to_lines(
1132
self._read_file_chunks(trans_id))
1133
parents = self._get_parents_lines(trans_id)
1134
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1135
content = ''.join(mpdiff.to_patch())
1136
if kind == 'directory':
1138
if kind == 'symlink':
1139
content = self._read_symlink_target(trans_id)
1140
yield serializer.bytes_record(content, ((trans_id, kind),))
1142
def deserialize(self, records):
1143
"""Deserialize a stored TreeTransform.
1145
:param records: An iterable of (names, content) tuples, as per
1146
pack.ContainerPushParser.
1148
names, content = records.next()
1149
attribs = bencode.bdecode(content)
1150
self._id_number = attribs['_id_number']
1151
self._new_name = dict((k, v.decode('utf-8'))
1152
for k, v in attribs['_new_name'].items())
1153
self._new_parent = attribs['_new_parent']
1154
self._new_executability = dict((k, bool(v)) for k, v in
1155
attribs['_new_executability'].items())
1156
self._new_id = attribs['_new_id']
1157
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1158
self._tree_path_ids = {}
1159
self._tree_id_paths = {}
1160
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1161
path = bytepath.decode('utf-8')
1162
self._tree_path_ids[path] = trans_id
1163
self._tree_id_paths[trans_id] = path
1164
self._removed_id = set(attribs['_removed_id'])
1165
self._removed_contents = set(attribs['_removed_contents'])
1166
self._non_present_ids = attribs['_non_present_ids']
1167
for ((trans_id, kind),), content in records:
1169
mpdiff = multiparent.MultiParent.from_patch(content)
1170
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1171
self.create_file(lines, trans_id)
1172
if kind == 'directory':
1173
self.create_directory(trans_id)
1174
if kind == 'symlink':
1175
self.create_symlink(content.decode('utf-8'), trans_id)
1178
class DiskTreeTransform(TreeTransformBase):
1179
"""Tree transform storing its contents on disk."""
1181
def __init__(self, tree, limbodir, pb=None,
1182
case_sensitive=True):
1184
:param tree: The tree that will be transformed, but not necessarily
1186
:param limbodir: A directory where new files can be stored until
1187
they are installed in their proper places
1189
:param case_sensitive: If True, the target of the transform is
1190
case sensitive, not just case preserving.
1192
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1193
self._limbodir = limbodir
1194
self._deletiondir = None
1195
# A mapping of transform ids to their limbo filename
1196
self._limbo_files = {}
1197
self._possibly_stale_limbo_files = set()
1198
# A mapping of transform ids to a set of the transform ids of children
1199
# that their limbo directory has
1200
self._limbo_children = {}
1201
# Map transform ids to maps of child filename to child transform id
1202
self._limbo_children_names = {}
1203
# List of transform ids that need to be renamed from limbo into place
1204
self._needs_rename = set()
1205
self._creation_mtime = None
1208
"""Release the working tree lock, if held, clean up limbo dir.
1210
This is required if apply has not been invoked, but can be invoked
1213
if self._tree is None:
1216
limbo_paths = self._limbo_files.values() + list(
1217
self._possibly_stale_limbo_files)
1218
limbo_paths = sorted(limbo_paths, reverse=True)
1219
for path in limbo_paths:
1223
if e.errno != errno.ENOENT:
1225
# XXX: warn? perhaps we just got interrupted at an
1226
# inconvenient moment, but perhaps files are disappearing
1229
delete_any(self._limbodir)
1231
# We don't especially care *why* the dir is immortal.
1232
raise ImmortalLimbo(self._limbodir)
1234
if self._deletiondir is not None:
1235
delete_any(self._deletiondir)
1237
raise errors.ImmortalPendingDeletion(self._deletiondir)
1239
TreeTransformBase.finalize(self)
1241
def _limbo_name(self, trans_id):
1242
"""Generate the limbo name of a file"""
1243
limbo_name = self._limbo_files.get(trans_id)
1244
if limbo_name is None:
1245
limbo_name = self._generate_limbo_path(trans_id)
1246
self._limbo_files[trans_id] = limbo_name
1249
def _generate_limbo_path(self, trans_id):
1250
"""Generate a limbo path using the trans_id as the relative path.
1252
This is suitable as a fallback, and when the transform should not be
1253
sensitive to the path encoding of the limbo directory.
1255
self._needs_rename.add(trans_id)
1256
return pathjoin(self._limbodir, trans_id)
1258
def adjust_path(self, name, parent, trans_id):
1259
previous_parent = self._new_parent.get(trans_id)
1260
previous_name = self._new_name.get(trans_id)
1261
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1262
if (trans_id in self._limbo_files and
1263
trans_id not in self._needs_rename):
1264
self._rename_in_limbo([trans_id])
1265
if previous_parent != parent:
1266
self._limbo_children[previous_parent].remove(trans_id)
1267
if previous_parent != parent or previous_name != name:
1268
del self._limbo_children_names[previous_parent][previous_name]
1270
def _rename_in_limbo(self, trans_ids):
1271
"""Fix limbo names so that the right final path is produced.
1273
This means we outsmarted ourselves-- we tried to avoid renaming
1274
these files later by creating them with their final names in their
1275
final parents. But now the previous name or parent is no longer
1276
suitable, so we have to rename them.
1278
Even for trans_ids that have no new contents, we must remove their
1279
entries from _limbo_files, because they are now stale.
1281
for trans_id in trans_ids:
1282
old_path = self._limbo_files[trans_id]
1283
self._possibly_stale_limbo_files.add(old_path)
1284
del self._limbo_files[trans_id]
1285
if trans_id not in self._new_contents:
1287
new_path = self._limbo_name(trans_id)
1288
os.rename(old_path, new_path)
1289
self._possibly_stale_limbo_files.remove(old_path)
1290
for descendant in self._limbo_descendants(trans_id):
1291
desc_path = self._limbo_files[descendant]
1292
desc_path = new_path + desc_path[len(old_path):]
1293
self._limbo_files[descendant] = desc_path
1295
def _limbo_descendants(self, trans_id):
1296
"""Return the set of trans_ids whose limbo paths descend from this."""
1297
descendants = set(self._limbo_children.get(trans_id, []))
1298
for descendant in list(descendants):
1299
descendants.update(self._limbo_descendants(descendant))
1302
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1303
"""Schedule creation of a new file.
1307
:param contents: an iterator of strings, all of which will be written
1308
to the target destination.
1309
:param trans_id: TreeTransform handle
1310
:param mode_id: If not None, force the mode of the target file to match
1311
the mode of the object referenced by mode_id.
1312
Otherwise, we will try to preserve mode bits of an existing file.
1313
:param sha1: If the sha1 of this content is already known, pass it in.
1314
We can use it to prevent future sha1 computations.
1316
name = self._limbo_name(trans_id)
1317
f = open(name, 'wb')
1319
unique_add(self._new_contents, trans_id, 'file')
1320
f.writelines(contents)
1323
self._set_mtime(name)
1324
self._set_mode(trans_id, mode_id, S_ISREG)
1325
# It is unfortunate we have to use lstat instead of fstat, but we just
1326
# used utime and chmod on the file, so we need the accurate final
1328
if sha1 is not None:
1329
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1331
def _read_file_chunks(self, trans_id):
1332
cur_file = open(self._limbo_name(trans_id), 'rb')
1334
return cur_file.readlines()
1338
def _read_symlink_target(self, trans_id):
1339
return os.readlink(self._limbo_name(trans_id))
1341
def _set_mtime(self, path):
1342
"""All files that are created get the same mtime.
1344
This time is set by the first object to be created.
1346
if self._creation_mtime is None:
1347
self._creation_mtime = time.time()
1348
os.utime(path, (self._creation_mtime, self._creation_mtime))
1350
def create_hardlink(self, path, trans_id):
1351
"""Schedule creation of a hard link"""
1352
name = self._limbo_name(trans_id)
1356
if e.errno != errno.EPERM:
1358
raise errors.HardLinkNotSupported(path)
1360
unique_add(self._new_contents, trans_id, 'file')
1362
# Clean up the file, it never got registered so
1363
# TreeTransform.finalize() won't clean it up.
1367
def create_directory(self, trans_id):
1368
"""Schedule creation of a new directory.
1370
See also new_directory.
1372
os.mkdir(self._limbo_name(trans_id))
1373
unique_add(self._new_contents, trans_id, 'directory')
1375
def create_symlink(self, target, trans_id):
1376
"""Schedule creation of a new symbolic link.
1378
target is a bytestring.
1379
See also new_symlink.
1382
os.symlink(target, self._limbo_name(trans_id))
1383
unique_add(self._new_contents, trans_id, 'symlink')
1386
path = FinalPaths(self).get_path(trans_id)
1389
raise UnableCreateSymlink(path=path)
1391
def cancel_creation(self, trans_id):
1392
"""Cancel the creation of new file contents."""
1393
del self._new_contents[trans_id]
1394
if trans_id in self._observed_sha1s:
1395
del self._observed_sha1s[trans_id]
1396
children = self._limbo_children.get(trans_id)
1397
# if this is a limbo directory with children, move them before removing
1399
if children is not None:
1400
self._rename_in_limbo(children)
1401
del self._limbo_children[trans_id]
1402
del self._limbo_children_names[trans_id]
1403
delete_any(self._limbo_name(trans_id))
1405
def new_orphan(self, trans_id, parent_id):
1406
# FIXME: There is no tree config, so we use the branch one (it's weird
1407
# to define it this way as orphaning can only occur in a working tree,
1408
# but that's all we have (for now). It will find the option in
1409
# locations.conf or bazaar.conf though) -- vila 20100916
1410
conf = self._tree.branch.get_config()
1411
conf_var_name = 'bzr.transform.orphan_policy'
1412
orphan_policy = conf.get_user_option(conf_var_name)
1413
default_policy = orphaning_registry.default_key
1414
if orphan_policy is None:
1415
orphan_policy = default_policy
1416
if orphan_policy not in orphaning_registry:
1417
trace.warning('%s (from %s) is not a known policy, defaulting '
1418
'to %s' % (orphan_policy, conf_var_name, default_policy))
1419
orphan_policy = default_policy
1420
handle_orphan = orphaning_registry.get(orphan_policy)
1421
handle_orphan(self, trans_id, parent_id)
1424
class OrphaningError(errors.BzrError):
1426
# Only bugs could lead to such exception being seen by the user
1427
internal_error = True
1428
_fmt = "Error while orphaning %s in %s directory"
1430
def __init__(self, orphan, parent):
1431
errors.BzrError.__init__(self)
1432
self.orphan = orphan
1433
self.parent = parent
1436
class OrphaningForbidden(OrphaningError):
1438
_fmt = "Policy: %s doesn't allow creating orphans."
1440
def __init__(self, policy):
1441
errors.BzrError.__init__(self)
1442
self.policy = policy
1445
def move_orphan(tt, orphan_id, parent_id):
1446
"""See TreeTransformBase.new_orphan.
1448
This creates a new orphan in the `bzr-orphans` dir at the root of the
1451
:param tt: The TreeTransform orphaning `trans_id`.
1453
:param orphan_id: The trans id that should be orphaned.
1455
:param parent_id: The orphan parent trans id.
1457
# Add the orphan dir if it doesn't exist
1458
orphan_dir_basename = 'bzr-orphans'
1459
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1460
if tt.final_kind(od_id) is None:
1461
tt.create_directory(od_id)
1462
parent_path = tt._tree_id_paths[parent_id]
1463
# Find a name that doesn't exist yet in the orphan dir
1464
actual_name = tt.final_name(orphan_id)
1465
new_name = tt._available_backup_name(actual_name, od_id)
1466
tt.adjust_path(new_name, od_id, orphan_id)
1467
trace.warning('%s has been orphaned in %s'
1468
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1471
def refuse_orphan(tt, orphan_id, parent_id):
1472
"""See TreeTransformBase.new_orphan.
1474
This refuses to create orphan, letting the caller handle the conflict.
1476
raise OrphaningForbidden('never')
1479
orphaning_registry = registry.Registry()
1480
orphaning_registry.register(
1481
'conflict', refuse_orphan,
1482
'Leave orphans in place and create a conflict on the directory.')
1483
orphaning_registry.register(
1484
'move', move_orphan,
1485
'Move orphans into the bzr-orphans directory.')
1486
orphaning_registry._set_default_key('conflict')
1489
class TreeTransform(DiskTreeTransform):
1490
"""Represent a tree transformation.
1492
This object is designed to support incremental generation of the transform,
1495
However, it gives optimum performance when parent directories are created
1496
before their contents. The transform is then able to put child files
1497
directly in their parent directory, avoiding later renames.
1499
It is easy to produce malformed transforms, but they are generally
1500
harmless. Attempting to apply a malformed transform will cause an
1501
exception to be raised before any modifications are made to the tree.
1503
Many kinds of malformed transforms can be corrected with the
1504
resolve_conflicts function. The remaining ones indicate programming error,
1505
such as trying to create a file with no path.
1507
Two sets of file creation methods are supplied. Convenience methods are:
1512
These are composed of the low-level methods:
1514
* create_file or create_directory or create_symlink
1518
Transform/Transaction ids
1519
-------------------------
1520
trans_ids are temporary ids assigned to all files involved in a transform.
1521
It's possible, even common, that not all files in the Tree have trans_ids.
1523
trans_ids are used because filenames and file_ids are not good enough
1524
identifiers; filenames change, and not all files have file_ids. File-ids
1525
are also associated with trans-ids, so that moving a file moves its
1528
trans_ids are only valid for the TreeTransform that generated them.
1532
Limbo is a temporary directory use to hold new versions of files.
1533
Files are added to limbo by create_file, create_directory, create_symlink,
1534
and their convenience variants (new_*). Files may be removed from limbo
1535
using cancel_creation. Files are renamed from limbo into their final
1536
location as part of TreeTransform.apply
1538
Limbo must be cleaned up, by either calling TreeTransform.apply or
1539
calling TreeTransform.finalize.
1541
Files are placed into limbo inside their parent directories, where
1542
possible. This reduces subsequent renames, and makes operations involving
1543
lots of files faster. This optimization is only possible if the parent
1544
directory is created *before* creating any of its children, so avoid
1545
creating children before parents, where possible.
1549
This temporary directory is used by _FileMover for storing files that are
1550
about to be deleted. In case of rollback, the files will be restored.
1551
FileMover does not delete files until it is sure that a rollback will not
1554
def __init__(self, tree, pb=None):
1555
"""Note: a tree_write lock is taken on the tree.
1557
Use TreeTransform.finalize() to release the lock (can be omitted if
1558
TreeTransform.apply() called).
1560
tree.lock_tree_write()
1563
limbodir = urlutils.local_path_from_url(
1564
tree._transport.abspath('limbo'))
1565
osutils.ensure_empty_directory_exists(
1567
errors.ExistingLimbo)
1568
deletiondir = urlutils.local_path_from_url(
1569
tree._transport.abspath('pending-deletion'))
1570
osutils.ensure_empty_directory_exists(
1572
errors.ExistingPendingDeletion)
1577
# Cache of realpath results, to speed up canonical_path
1578
self._realpaths = {}
1579
# Cache of relpath results, to speed up canonical_path
1581
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1582
tree.case_sensitive)
1583
self._deletiondir = deletiondir
1585
def canonical_path(self, path):
1586
"""Get the canonical tree-relative path"""
1587
# don't follow final symlinks
1588
abs = self._tree.abspath(path)
1589
if abs in self._relpaths:
1590
return self._relpaths[abs]
1591
dirname, basename = os.path.split(abs)
1592
if dirname not in self._realpaths:
1593
self._realpaths[dirname] = os.path.realpath(dirname)
1594
dirname = self._realpaths[dirname]
1595
abs = pathjoin(dirname, basename)
1596
if dirname in self._relpaths:
1597
relpath = pathjoin(self._relpaths[dirname], basename)
1598
relpath = relpath.rstrip('/\\')
1600
relpath = self._tree.relpath(abs)
1601
self._relpaths[abs] = relpath
1604
def tree_kind(self, trans_id):
1605
"""Determine the file kind in the working tree.
1607
:returns: The file kind or None if the file does not exist
1609
path = self._tree_id_paths.get(trans_id)
1613
return file_kind(self._tree.abspath(path))
1614
except errors.NoSuchFile:
1617
def _set_mode(self, trans_id, mode_id, typefunc):
1618
"""Set the mode of new file contents.
1619
The mode_id is the existing file to get the mode from (often the same
1620
as trans_id). The operation is only performed if there's a mode match
1621
according to typefunc.
1626
old_path = self._tree_id_paths[mode_id]
1630
mode = os.stat(self._tree.abspath(old_path)).st_mode
1632
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1633
# Either old_path doesn't exist, or the parent of the
1634
# target is not a directory (but will be one eventually)
1635
# Either way, we know it doesn't exist *right now*
1636
# See also bug #248448
1641
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1643
def iter_tree_children(self, parent_id):
1644
"""Iterate through the entry's tree children, if any"""
1646
path = self._tree_id_paths[parent_id]
1650
children = os.listdir(self._tree.abspath(path))
1652
if not (osutils._is_error_enotdir(e)
1653
or e.errno in (errno.ENOENT, errno.ESRCH)):
1657
for child in children:
1658
childpath = joinpath(path, child)
1659
if self._tree.is_control_filename(childpath):
1661
yield self.trans_id_tree_path(childpath)
1663
def _generate_limbo_path(self, trans_id):
1664
"""Generate a limbo path using the final path if possible.
1666
This optimizes the performance of applying the tree transform by
1667
avoiding renames. These renames can be avoided only when the parent
1668
directory is already scheduled for creation.
1670
If the final path cannot be used, falls back to using the trans_id as
1673
parent = self._new_parent.get(trans_id)
1674
# if the parent directory is already in limbo (e.g. when building a
1675
# tree), choose a limbo name inside the parent, to reduce further
1677
use_direct_path = False
1678
if self._new_contents.get(parent) == 'directory':
1679
filename = self._new_name.get(trans_id)
1680
if filename is not None:
1681
if parent not in self._limbo_children:
1682
self._limbo_children[parent] = set()
1683
self._limbo_children_names[parent] = {}
1684
use_direct_path = True
1685
# the direct path can only be used if no other file has
1686
# already taken this pathname, i.e. if the name is unused, or
1687
# if it is already associated with this trans_id.
1688
elif self._case_sensitive_target:
1689
if (self._limbo_children_names[parent].get(filename)
1690
in (trans_id, None)):
1691
use_direct_path = True
1693
for l_filename, l_trans_id in\
1694
self._limbo_children_names[parent].iteritems():
1695
if l_trans_id == trans_id:
1697
if l_filename.lower() == filename.lower():
1700
use_direct_path = True
1702
if not use_direct_path:
1703
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1705
limbo_name = pathjoin(self._limbo_files[parent], filename)
1706
self._limbo_children[parent].add(trans_id)
1707
self._limbo_children_names[parent][filename] = trans_id
1711
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1712
"""Apply all changes to the inventory and filesystem.
1714
If filesystem or inventory conflicts are present, MalformedTransform
1717
If apply succeeds, finalize is not necessary.
1719
:param no_conflicts: if True, the caller guarantees there are no
1720
conflicts, so no check is made.
1721
:param precomputed_delta: An inventory delta to use instead of
1723
:param _mover: Supply an alternate FileMover, for testing
1725
if not no_conflicts:
1726
self._check_malformed()
1727
child_pb = ui.ui_factory.nested_progress_bar()
1729
if precomputed_delta is None:
1730
child_pb.update('Apply phase', 0, 2)
1731
inventory_delta = self._generate_inventory_delta()
1734
inventory_delta = precomputed_delta
1737
mover = _FileMover()
1741
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1742
self._apply_removals(mover)
1743
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1744
modified_paths = self._apply_insertions(mover)
1749
mover.apply_deletions()
1752
self._tree.apply_inventory_delta(inventory_delta)
1753
self._apply_observed_sha1s()
1756
return _TransformResults(modified_paths, self.rename_count)
1758
def _generate_inventory_delta(self):
1759
"""Generate an inventory delta for the current transform."""
1760
inventory_delta = []
1761
child_pb = ui.ui_factory.nested_progress_bar()
1762
new_paths = self._inventory_altered()
1763
total_entries = len(new_paths) + len(self._removed_id)
1765
for num, trans_id in enumerate(self._removed_id):
1767
child_pb.update('removing file', num, total_entries)
1768
if trans_id == self._new_root:
1769
file_id = self._tree.get_root_id()
1771
file_id = self.tree_file_id(trans_id)
1772
# File-id isn't really being deleted, just moved
1773
if file_id in self._r_new_id:
1775
path = self._tree_id_paths[trans_id]
1776
inventory_delta.append((path, None, file_id, None))
1777
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1779
entries = self._tree.iter_entries_by_dir(
1780
new_path_file_ids.values())
1781
old_paths = dict((e.file_id, p) for p, e in entries)
1783
for num, (path, trans_id) in enumerate(new_paths):
1785
child_pb.update('adding file',
1786
num + len(self._removed_id), total_entries)
1787
file_id = new_path_file_ids[trans_id]
1791
kind = self.final_kind(trans_id)
1793
kind = self._tree.stored_kind(file_id)
1794
parent_trans_id = self.final_parent(trans_id)
1795
parent_file_id = new_path_file_ids.get(parent_trans_id)
1796
if parent_file_id is None:
1797
parent_file_id = self.final_file_id(parent_trans_id)
1798
if trans_id in self._new_reference_revision:
1799
new_entry = inventory.TreeReference(
1801
self._new_name[trans_id],
1802
self.final_file_id(self._new_parent[trans_id]),
1803
None, self._new_reference_revision[trans_id])
1805
new_entry = inventory.make_entry(kind,
1806
self.final_name(trans_id),
1807
parent_file_id, file_id)
1808
old_path = old_paths.get(new_entry.file_id)
1809
new_executability = self._new_executability.get(trans_id)
1810
if new_executability is not None:
1811
new_entry.executable = new_executability
1812
inventory_delta.append(
1813
(old_path, path, new_entry.file_id, new_entry))
1816
return inventory_delta
1818
def _apply_removals(self, mover):
1819
"""Perform tree operations that remove directory/inventory names.
1821
That is, delete files that are to be deleted, and put any files that
1822
need renaming into limbo. This must be done in strict child-to-parent
1825
If inventory_delta is None, no inventory delta generation is performed.
1827
tree_paths = list(self._tree_path_ids.iteritems())
1828
tree_paths.sort(reverse=True)
1829
child_pb = ui.ui_factory.nested_progress_bar()
1831
for num, (path, trans_id) in enumerate(tree_paths):
1832
# do not attempt to move root into a subdirectory of itself.
1835
child_pb.update('removing file', num, len(tree_paths))
1836
full_path = self._tree.abspath(path)
1837
if trans_id in self._removed_contents:
1838
delete_path = os.path.join(self._deletiondir, trans_id)
1839
mover.pre_delete(full_path, delete_path)
1840
elif (trans_id in self._new_name
1841
or trans_id in self._new_parent):
1843
mover.rename(full_path, self._limbo_name(trans_id))
1844
except errors.TransformRenameFailed, e:
1845
if e.errno != errno.ENOENT:
1848
self.rename_count += 1
1852
def _apply_insertions(self, mover):
1853
"""Perform tree operations that insert directory/inventory names.
1855
That is, create any files that need to be created, and restore from
1856
limbo any files that needed renaming. This must be done in strict
1857
parent-to-child order.
1859
If inventory_delta is None, no inventory delta is calculated, and
1860
no list of modified paths is returned.
1862
new_paths = self.new_paths(filesystem_only=True)
1864
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1866
child_pb = ui.ui_factory.nested_progress_bar()
1868
for num, (path, trans_id) in enumerate(new_paths):
1870
child_pb.update('adding file', num, len(new_paths))
1871
full_path = self._tree.abspath(path)
1872
if trans_id in self._needs_rename:
1874
mover.rename(self._limbo_name(trans_id), full_path)
1875
except errors.TransformRenameFailed, e:
1876
# We may be renaming a dangling inventory id
1877
if e.errno != errno.ENOENT:
1880
self.rename_count += 1
1881
# TODO: if trans_id in self._observed_sha1s, we should
1882
# re-stat the final target, since ctime will be
1883
# updated by the change.
1884
if (trans_id in self._new_contents or
1885
self.path_changed(trans_id)):
1886
if trans_id in self._new_contents:
1887
modified_paths.append(full_path)
1888
if trans_id in self._new_executability:
1889
self._set_executability(path, trans_id)
1890
if trans_id in self._observed_sha1s:
1891
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1892
st = osutils.lstat(full_path)
1893
self._observed_sha1s[trans_id] = (o_sha1, st)
1896
for path, trans_id in new_paths:
1897
# new_paths includes stuff like workingtree conflicts. Only the
1898
# stuff in new_contents actually comes from limbo.
1899
if trans_id in self._limbo_files:
1900
del self._limbo_files[trans_id]
1901
self._new_contents.clear()
1902
return modified_paths
1904
def _apply_observed_sha1s(self):
1905
"""After we have finished renaming everything, update observed sha1s
1907
This has to be done after self._tree.apply_inventory_delta, otherwise
1908
it doesn't know anything about the files we are updating. Also, we want
1909
to do this as late as possible, so that most entries end up cached.
1911
# TODO: this doesn't update the stat information for directories. So
1912
# the first 'bzr status' will still need to rewrite
1913
# .bzr/checkout/dirstate. However, we at least don't need to
1914
# re-read all of the files.
1915
# TODO: If the operation took a while, we could do a time.sleep(3) here
1916
# to allow the clock to tick over and ensure we won't have any
1917
# problems. (we could observe start time, and finish time, and if
1918
# it is less than eg 10% overhead, add a sleep call.)
1919
paths = FinalPaths(self)
1920
for trans_id, observed in self._observed_sha1s.iteritems():
1921
path = paths.get_path(trans_id)
1922
# We could get the file_id, but dirstate prefers to use the path
1923
# anyway, and it is 'cheaper' to determine.
1924
# file_id = self._new_id[trans_id]
1925
self._tree._observed_sha1(None, path, observed)
1928
class TransformPreview(DiskTreeTransform):
1929
"""A TreeTransform for generating preview trees.
1931
Unlike TreeTransform, this version works when the input tree is a
1932
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1933
unversioned files in the input tree.
1936
def __init__(self, tree, pb=None, case_sensitive=True):
1938
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1939
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1941
def canonical_path(self, path):
1944
def tree_kind(self, trans_id):
1945
path = self._tree_id_paths.get(trans_id)
1948
kind = self._tree.path_content_summary(path)[0]
1949
if kind == 'missing':
1953
def _set_mode(self, trans_id, mode_id, typefunc):
1954
"""Set the mode of new file contents.
1955
The mode_id is the existing file to get the mode from (often the same
1956
as trans_id). The operation is only performed if there's a mode match
1957
according to typefunc.
1959
# is it ok to ignore this? probably
1962
def iter_tree_children(self, parent_id):
1963
"""Iterate through the entry's tree children, if any"""
1965
path = self._tree_id_paths[parent_id]
1968
file_id = self.tree_file_id(parent_id)
1971
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1972
children = getattr(entry, 'children', {})
1973
for child in children:
1974
childpath = joinpath(path, child)
1975
yield self.trans_id_tree_path(childpath)
1977
def new_orphan(self, trans_id, parent_id):
1978
raise NotImplementedError(self.new_orphan)
1981
class _PreviewTree(tree.InventoryTree):
1982
"""Partial implementation of Tree to support show_diff_trees"""
1984
def __init__(self, transform):
1985
self._transform = transform
1986
self._final_paths = FinalPaths(transform)
1987
self.__by_parent = None
1988
self._parent_ids = []
1989
self._all_children_cache = {}
1990
self._path2trans_id_cache = {}
1991
self._final_name_cache = {}
1992
self._iter_changes_cache = dict((c[0], c) for c in
1993
self._transform.iter_changes())
1995
def _content_change(self, file_id):
1996
"""Return True if the content of this file changed"""
1997
changes = self._iter_changes_cache.get(file_id)
1998
# changes[2] is true if the file content changed. See
1999
# InterTree.iter_changes.
2000
return (changes is not None and changes[2])
2002
def _get_repository(self):
2003
repo = getattr(self._transform._tree, '_repository', None)
2005
repo = self._transform._tree.branch.repository
2008
def _iter_parent_trees(self):
2009
for revision_id in self.get_parent_ids():
2011
yield self.revision_tree(revision_id)
2012
except errors.NoSuchRevisionInTree:
2013
yield self._get_repository().revision_tree(revision_id)
2015
def _get_file_revision(self, file_id, vf, tree_revision):
2016
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2017
self._iter_parent_trees()]
2018
vf.add_lines((file_id, tree_revision), parent_keys,
2019
self.get_file_lines(file_id))
2020
repo = self._get_repository()
2021
base_vf = repo.texts
2022
if base_vf not in vf.fallback_versionedfiles:
2023
vf.fallback_versionedfiles.append(base_vf)
2024
return tree_revision
2026
def _stat_limbo_file(self, file_id=None, trans_id=None):
2027
if trans_id is None:
2028
trans_id = self._transform.trans_id_file_id(file_id)
2029
name = self._transform._limbo_name(trans_id)
2030
return os.lstat(name)
2033
def _by_parent(self):
2034
if self.__by_parent is None:
2035
self.__by_parent = self._transform.by_parent()
2036
return self.__by_parent
2038
def _comparison_data(self, entry, path):
2039
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2040
if kind == 'missing':
2044
file_id = self._transform.final_file_id(self._path2trans_id(path))
2045
executable = self.is_executable(file_id, path)
2046
return kind, executable, None
2048
def is_locked(self):
2051
def lock_read(self):
2052
# Perhaps in theory, this should lock the TreeTransform?
2059
def inventory(self):
2060
"""This Tree does not use inventory as its backing data."""
2061
raise NotImplementedError(_PreviewTree.inventory)
2063
def get_root_id(self):
2064
return self._transform.final_file_id(self._transform.root)
2066
def all_file_ids(self):
2067
tree_ids = set(self._transform._tree.all_file_ids())
2068
tree_ids.difference_update(self._transform.tree_file_id(t)
2069
for t in self._transform._removed_id)
2070
tree_ids.update(self._transform._new_id.values())
2074
return iter(self.all_file_ids())
2076
def _has_id(self, file_id, fallback_check):
2077
if file_id in self._transform._r_new_id:
2079
elif file_id in set([self._transform.tree_file_id(trans_id) for
2080
trans_id in self._transform._removed_id]):
2083
return fallback_check(file_id)
2085
def has_id(self, file_id):
2086
return self._has_id(file_id, self._transform._tree.has_id)
2088
def has_or_had_id(self, file_id):
2089
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2091
def _path2trans_id(self, path):
2092
# We must not use None here, because that is a valid value to store.
2093
trans_id = self._path2trans_id_cache.get(path, object)
2094
if trans_id is not object:
2096
segments = splitpath(path)
2097
cur_parent = self._transform.root
2098
for cur_segment in segments:
2099
for child in self._all_children(cur_parent):
2100
final_name = self._final_name_cache.get(child)
2101
if final_name is None:
2102
final_name = self._transform.final_name(child)
2103
self._final_name_cache[child] = final_name
2104
if final_name == cur_segment:
2108
self._path2trans_id_cache[path] = None
2110
self._path2trans_id_cache[path] = cur_parent
2113
def path2id(self, path):
2114
return self._transform.final_file_id(self._path2trans_id(path))
2116
def id2path(self, file_id):
2117
trans_id = self._transform.trans_id_file_id(file_id)
2119
return self._final_paths._determine_path(trans_id)
2121
raise errors.NoSuchId(self, file_id)
2123
def _all_children(self, trans_id):
2124
children = self._all_children_cache.get(trans_id)
2125
if children is not None:
2127
children = set(self._transform.iter_tree_children(trans_id))
2128
# children in the _new_parent set are provided by _by_parent.
2129
children.difference_update(self._transform._new_parent.keys())
2130
children.update(self._by_parent.get(trans_id, []))
2131
self._all_children_cache[trans_id] = children
2134
def iter_children(self, file_id):
2135
trans_id = self._transform.trans_id_file_id(file_id)
2136
for child_trans_id in self._all_children(trans_id):
2137
yield self._transform.final_file_id(child_trans_id)
2140
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2141
in self._transform._tree.extras())
2142
possible_extras.update(self._transform._new_contents)
2143
possible_extras.update(self._transform._removed_id)
2144
for trans_id in possible_extras:
2145
if self._transform.final_file_id(trans_id) is None:
2146
yield self._final_paths._determine_path(trans_id)
2148
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2149
yield_parents=False):
2150
for trans_id, parent_file_id in ordered_entries:
2151
file_id = self._transform.final_file_id(trans_id)
2154
if (specific_file_ids is not None
2155
and file_id not in specific_file_ids):
2157
kind = self._transform.final_kind(trans_id)
2159
kind = self._transform._tree.stored_kind(file_id)
2160
new_entry = inventory.make_entry(
2162
self._transform.final_name(trans_id),
2163
parent_file_id, file_id)
2164
yield new_entry, trans_id
2166
def _list_files_by_dir(self):
2167
todo = [ROOT_PARENT]
2169
while len(todo) > 0:
2171
parent_file_id = self._transform.final_file_id(parent)
2172
children = list(self._all_children(parent))
2173
paths = dict(zip(children, self._final_paths.get_paths(children)))
2174
children.sort(key=paths.get)
2175
todo.extend(reversed(children))
2176
for trans_id in children:
2177
ordered_ids.append((trans_id, parent_file_id))
2180
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2181
# This may not be a maximally efficient implementation, but it is
2182
# reasonably straightforward. An implementation that grafts the
2183
# TreeTransform changes onto the tree's iter_entries_by_dir results
2184
# might be more efficient, but requires tricky inferences about stack
2186
ordered_ids = self._list_files_by_dir()
2187
for entry, trans_id in self._make_inv_entries(ordered_ids,
2188
specific_file_ids, yield_parents=yield_parents):
2189
yield unicode(self._final_paths.get_path(trans_id)), entry
2191
def _iter_entries_for_dir(self, dir_path):
2192
"""Return path, entry for items in a directory without recursing down."""
2193
dir_file_id = self.path2id(dir_path)
2195
for file_id in self.iter_children(dir_file_id):
2196
trans_id = self._transform.trans_id_file_id(file_id)
2197
ordered_ids.append((trans_id, file_id))
2198
for entry, trans_id in self._make_inv_entries(ordered_ids):
2199
yield unicode(self._final_paths.get_path(trans_id)), entry
2201
def list_files(self, include_root=False, from_dir=None, recursive=True):
2202
"""See WorkingTree.list_files."""
2203
# XXX This should behave like WorkingTree.list_files, but is really
2204
# more like RevisionTree.list_files.
2208
prefix = from_dir + '/'
2209
entries = self.iter_entries_by_dir()
2210
for path, entry in entries:
2211
if entry.name == '' and not include_root:
2214
if not path.startswith(prefix):
2216
path = path[len(prefix):]
2217
yield path, 'V', entry.kind, entry.file_id, entry
2219
if from_dir is None and include_root is True:
2220
root_entry = inventory.make_entry('directory', '',
2221
ROOT_PARENT, self.get_root_id())
2222
yield '', 'V', 'directory', root_entry.file_id, root_entry
2223
entries = self._iter_entries_for_dir(from_dir or '')
2224
for path, entry in entries:
2225
yield path, 'V', entry.kind, entry.file_id, entry
2227
def kind(self, file_id):
2228
trans_id = self._transform.trans_id_file_id(file_id)
2229
return self._transform.final_kind(trans_id)
2231
def stored_kind(self, file_id):
2232
trans_id = self._transform.trans_id_file_id(file_id)
2234
return self._transform._new_contents[trans_id]
2236
return self._transform._tree.stored_kind(file_id)
2238
def get_file_mtime(self, file_id, path=None):
2239
"""See Tree.get_file_mtime"""
2240
if not self._content_change(file_id):
2241
return self._transform._tree.get_file_mtime(file_id)
2242
return self._stat_limbo_file(file_id).st_mtime
2244
def _file_size(self, entry, stat_value):
2245
return self.get_file_size(entry.file_id)
2247
def get_file_size(self, file_id):
2248
"""See Tree.get_file_size"""
2249
trans_id = self._transform.trans_id_file_id(file_id)
2250
kind = self._transform.final_kind(trans_id)
2253
if trans_id in self._transform._new_contents:
2254
return self._stat_limbo_file(trans_id=trans_id).st_size
2255
if self.kind(file_id) == 'file':
2256
return self._transform._tree.get_file_size(file_id)
2260
def get_file_sha1(self, file_id, path=None, stat_value=None):
2261
trans_id = self._transform.trans_id_file_id(file_id)
2262
kind = self._transform._new_contents.get(trans_id)
2264
return self._transform._tree.get_file_sha1(file_id)
2266
fileobj = self.get_file(file_id)
2268
return sha_file(fileobj)
2272
def is_executable(self, file_id, path=None):
2275
trans_id = self._transform.trans_id_file_id(file_id)
2277
return self._transform._new_executability[trans_id]
2280
return self._transform._tree.is_executable(file_id, path)
2282
if e.errno == errno.ENOENT:
2285
except errors.NoSuchId:
2288
def has_filename(self, path):
2289
trans_id = self._path2trans_id(path)
2290
if trans_id in self._transform._new_contents:
2292
elif trans_id in self._transform._removed_contents:
2295
return self._transform._tree.has_filename(path)
2297
def path_content_summary(self, path):
2298
trans_id = self._path2trans_id(path)
2299
tt = self._transform
2300
tree_path = tt._tree_id_paths.get(trans_id)
2301
kind = tt._new_contents.get(trans_id)
2303
if tree_path is None or trans_id in tt._removed_contents:
2304
return 'missing', None, None, None
2305
summary = tt._tree.path_content_summary(tree_path)
2306
kind, size, executable, link_or_sha1 = summary
2309
limbo_name = tt._limbo_name(trans_id)
2310
if trans_id in tt._new_reference_revision:
2311
kind = 'tree-reference'
2313
statval = os.lstat(limbo_name)
2314
size = statval.st_size
2315
if not supports_executable():
2318
executable = statval.st_mode & S_IEXEC
2322
if kind == 'symlink':
2323
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2324
executable = tt._new_executability.get(trans_id, executable)
2325
return kind, size, executable, link_or_sha1
2327
def iter_changes(self, from_tree, include_unchanged=False,
2328
specific_files=None, pb=None, extra_trees=None,
2329
require_versioned=True, want_unversioned=False):
2330
"""See InterTree.iter_changes.
2332
This has a fast path that is only used when the from_tree matches
2333
the transform tree, and no fancy options are supplied.
2335
if (from_tree is not self._transform._tree or include_unchanged or
2336
specific_files or want_unversioned):
2337
return tree.InterTree(from_tree, self).iter_changes(
2338
include_unchanged=include_unchanged,
2339
specific_files=specific_files,
2341
extra_trees=extra_trees,
2342
require_versioned=require_versioned,
2343
want_unversioned=want_unversioned)
2344
if want_unversioned:
2345
raise ValueError('want_unversioned is not supported')
2346
return self._transform.iter_changes()
2348
def get_file(self, file_id, path=None):
2349
"""See Tree.get_file"""
2350
if not self._content_change(file_id):
2351
return self._transform._tree.get_file(file_id, path)
2352
trans_id = self._transform.trans_id_file_id(file_id)
2353
name = self._transform._limbo_name(trans_id)
2354
return open(name, 'rb')
2356
def get_file_with_stat(self, file_id, path=None):
2357
return self.get_file(file_id, path), None
2359
def annotate_iter(self, file_id,
2360
default_revision=_mod_revision.CURRENT_REVISION):
2361
changes = self._iter_changes_cache.get(file_id)
2365
changed_content, versioned, kind = (changes[2], changes[3],
2369
get_old = (kind[0] == 'file' and versioned[0])
2371
old_annotation = self._transform._tree.annotate_iter(file_id,
2372
default_revision=default_revision)
2376
return old_annotation
2377
if not changed_content:
2378
return old_annotation
2379
# TODO: This is doing something similar to what WT.annotate_iter is
2380
# doing, however it fails slightly because it doesn't know what
2381
# the *other* revision_id is, so it doesn't know how to give the
2382
# other as the origin for some lines, they all get
2383
# 'default_revision'
2384
# It would be nice to be able to use the new Annotator based
2385
# approach, as well.
2386
return annotate.reannotate([old_annotation],
2387
self.get_file(file_id).readlines(),
2390
def get_symlink_target(self, file_id, path=None):
2391
"""See Tree.get_symlink_target"""
2392
if not self._content_change(file_id):
2393
return self._transform._tree.get_symlink_target(file_id)
2394
trans_id = self._transform.trans_id_file_id(file_id)
2395
name = self._transform._limbo_name(trans_id)
2396
return osutils.readlink(name)
2398
def walkdirs(self, prefix=''):
2399
pending = [self._transform.root]
2400
while len(pending) > 0:
2401
parent_id = pending.pop()
2404
prefix = prefix.rstrip('/')
2405
parent_path = self._final_paths.get_path(parent_id)
2406
parent_file_id = self._transform.final_file_id(parent_id)
2407
for child_id in self._all_children(parent_id):
2408
path_from_root = self._final_paths.get_path(child_id)
2409
basename = self._transform.final_name(child_id)
2410
file_id = self._transform.final_file_id(child_id)
2411
kind = self._transform.final_kind(child_id)
2412
if kind is not None:
2413
versioned_kind = kind
2416
versioned_kind = self._transform._tree.stored_kind(file_id)
2417
if versioned_kind == 'directory':
2418
subdirs.append(child_id)
2419
children.append((path_from_root, basename, kind, None,
2420
file_id, versioned_kind))
2422
if parent_path.startswith(prefix):
2423
yield (parent_path, parent_file_id), children
2424
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2427
def get_parent_ids(self):
2428
return self._parent_ids
2430
def set_parent_ids(self, parent_ids):
2431
self._parent_ids = parent_ids
2433
def get_revision_tree(self, revision_id):
2434
return self._transform._tree.get_revision_tree(revision_id)
2437
def joinpath(parent, child):
2438
"""Join tree-relative paths, handling the tree root specially"""
2439
if parent is None or parent == "":
2442
return pathjoin(parent, child)
2445
class FinalPaths(object):
2446
"""Make path calculation cheap by memoizing paths.
2448
The underlying tree must not be manipulated between calls, or else
2449
the results will likely be incorrect.
2451
def __init__(self, transform):
2452
object.__init__(self)
2453
self._known_paths = {}
2454
self.transform = transform
2456
def _determine_path(self, trans_id):
2457
if (trans_id == self.transform.root or trans_id == ROOT_PARENT):
2459
name = self.transform.final_name(trans_id)
2460
parent_id = self.transform.final_parent(trans_id)
2461
if parent_id == self.transform.root:
2464
return pathjoin(self.get_path(parent_id), name)
2466
def get_path(self, trans_id):
2467
"""Find the final path associated with a trans_id"""
2468
if trans_id not in self._known_paths:
2469
self._known_paths[trans_id] = self._determine_path(trans_id)
2470
return self._known_paths[trans_id]
2472
def get_paths(self, trans_ids):
2473
return [(self.get_path(t), t) for t in trans_ids]
2477
def topology_sorted_ids(tree):
2478
"""Determine the topological order of the ids in a tree"""
2479
file_ids = list(tree)
2480
file_ids.sort(key=tree.id2path)
2484
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2485
delta_from_tree=False):
2486
"""Create working tree for a branch, using a TreeTransform.
2488
This function should be used on empty trees, having a tree root at most.
2489
(see merge and revert functionality for working with existing trees)
2491
Existing files are handled like so:
2493
- Existing bzrdirs take precedence over creating new items. They are
2494
created as '%s.diverted' % name.
2495
- Otherwise, if the content on disk matches the content we are building,
2496
it is silently replaced.
2497
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2499
:param tree: The tree to convert wt into a copy of
2500
:param wt: The working tree that files will be placed into
2501
:param accelerator_tree: A tree which can be used for retrieving file
2502
contents more quickly than tree itself, i.e. a workingtree. tree
2503
will be used for cases where accelerator_tree's content is different.
2504
:param hardlink: If true, hard-link files to accelerator_tree, where
2505
possible. accelerator_tree must implement abspath, i.e. be a
2507
:param delta_from_tree: If true, build_tree may use the input Tree to
2508
generate the inventory delta.
2510
wt.lock_tree_write()
2514
if accelerator_tree is not None:
2515
accelerator_tree.lock_read()
2517
return _build_tree(tree, wt, accelerator_tree, hardlink,
2520
if accelerator_tree is not None:
2521
accelerator_tree.unlock()
2528
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2529
"""See build_tree."""
2530
for num, _unused in enumerate(wt.all_file_ids()):
2531
if num > 0: # more than just a root
2532
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2534
top_pb = ui.ui_factory.nested_progress_bar()
2535
pp = ProgressPhase("Build phase", 2, top_pb)
2536
if tree.inventory.root is not None:
2537
# This is kind of a hack: we should be altering the root
2538
# as part of the regular tree shape diff logic.
2539
# The conditional test here is to avoid doing an
2540
# expensive operation (flush) every time the root id
2541
# is set within the tree, nor setting the root and thus
2542
# marking the tree as dirty, because we use two different
2543
# idioms here: tree interfaces and inventory interfaces.
2544
if wt.get_root_id() != tree.get_root_id():
2545
wt.set_root_id(tree.get_root_id())
2547
tt = TreeTransform(wt)
2551
file_trans_id[wt.get_root_id()] = \
2552
tt.trans_id_tree_file_id(wt.get_root_id())
2553
pb = ui.ui_factory.nested_progress_bar()
2555
deferred_contents = []
2557
total = len(tree.inventory)
2559
precomputed_delta = []
2561
precomputed_delta = None
2562
# Check if tree inventory has content. If so, we populate
2563
# existing_files with the directory content. If there are no
2564
# entries we skip populating existing_files as its not used.
2565
# This improves performance and unncessary work on large
2566
# directory trees. (#501307)
2568
existing_files = set()
2569
for dir, files in wt.walkdirs():
2570
existing_files.update(f[0] for f in files)
2571
for num, (tree_path, entry) in \
2572
enumerate(tree.inventory.iter_entries_by_dir()):
2573
pb.update("Building tree", num - len(deferred_contents), total)
2574
if entry.parent_id is None:
2577
file_id = entry.file_id
2579
precomputed_delta.append((None, tree_path, file_id, entry))
2580
if tree_path in existing_files:
2581
target_path = wt.abspath(tree_path)
2582
kind = file_kind(target_path)
2583
if kind == "directory":
2585
bzrdir.BzrDir.open(target_path)
2586
except errors.NotBranchError:
2590
if (file_id not in divert and
2591
_content_match(tree, entry, file_id, kind,
2593
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2594
if kind == 'directory':
2596
parent_id = file_trans_id[entry.parent_id]
2597
if entry.kind == 'file':
2598
# We *almost* replicate new_by_entry, so that we can defer
2599
# getting the file text, and get them all at once.
2600
trans_id = tt.create_path(entry.name, parent_id)
2601
file_trans_id[file_id] = trans_id
2602
tt.version_file(file_id, trans_id)
2603
executable = tree.is_executable(file_id, tree_path)
2605
tt.set_executability(executable, trans_id)
2606
trans_data = (trans_id, tree_path, entry.text_sha1)
2607
deferred_contents.append((file_id, trans_data))
2609
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2612
new_trans_id = file_trans_id[file_id]
2613
old_parent = tt.trans_id_tree_path(tree_path)
2614
_reparent_children(tt, old_parent, new_trans_id)
2615
offset = num + 1 - len(deferred_contents)
2616
_create_files(tt, tree, deferred_contents, pb, offset,
2617
accelerator_tree, hardlink)
2621
divert_trans = set(file_trans_id[f] for f in divert)
2622
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2623
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2624
if len(raw_conflicts) > 0:
2625
precomputed_delta = None
2626
conflicts = cook_conflicts(raw_conflicts, tt)
2627
for conflict in conflicts:
2628
trace.warning(unicode(conflict))
2630
wt.add_conflicts(conflicts)
2631
except errors.UnsupportedOperation:
2633
result = tt.apply(no_conflicts=True,
2634
precomputed_delta=precomputed_delta)
2641
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2643
total = len(desired_files) + offset
2645
if accelerator_tree is None:
2646
new_desired_files = desired_files
2648
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2649
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2650
in iter if not (c or e[0] != e[1])]
2651
if accelerator_tree.supports_content_filtering():
2652
unchanged = [(f, p) for (f, p) in unchanged
2653
if not accelerator_tree.iter_search_rules([p]).next()]
2654
unchanged = dict(unchanged)
2655
new_desired_files = []
2657
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2658
accelerator_path = unchanged.get(file_id)
2659
if accelerator_path is None:
2660
new_desired_files.append((file_id,
2661
(trans_id, tree_path, text_sha1)))
2663
pb.update('Adding file contents', count + offset, total)
2665
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2668
contents = accelerator_tree.get_file(file_id, accelerator_path)
2669
if wt.supports_content_filtering():
2670
filters = wt._content_filter_stack(tree_path)
2671
contents = filtered_output_bytes(contents, filters,
2672
ContentFilterContext(tree_path, tree))
2674
tt.create_file(contents, trans_id, sha1=text_sha1)
2678
except AttributeError:
2679
# after filtering, contents may no longer be file-like
2683
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2684
tree.iter_files_bytes(new_desired_files)):
2685
if wt.supports_content_filtering():
2686
filters = wt._content_filter_stack(tree_path)
2687
contents = filtered_output_bytes(contents, filters,
2688
ContentFilterContext(tree_path, tree))
2689
tt.create_file(contents, trans_id, sha1=text_sha1)
2690
pb.update('Adding file contents', count + offset, total)
2693
def _reparent_children(tt, old_parent, new_parent):
2694
for child in tt.iter_tree_children(old_parent):
2695
tt.adjust_path(tt.final_name(child), new_parent, child)
2698
def _reparent_transform_children(tt, old_parent, new_parent):
2699
by_parent = tt.by_parent()
2700
for child in by_parent[old_parent]:
2701
tt.adjust_path(tt.final_name(child), new_parent, child)
2702
return by_parent[old_parent]
2705
def _content_match(tree, entry, file_id, kind, target_path):
2706
if entry.kind != kind:
2708
if entry.kind == "directory":
2710
if entry.kind == "file":
2711
f = file(target_path, 'rb')
2713
if tree.get_file_text(file_id) == f.read():
2717
elif entry.kind == "symlink":
2718
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2723
def resolve_checkout(tt, conflicts, divert):
2724
new_conflicts = set()
2725
for c_type, conflict in ((c[0], c) for c in conflicts):
2726
# Anything but a 'duplicate' would indicate programmer error
2727
if c_type != 'duplicate':
2728
raise AssertionError(c_type)
2729
# Now figure out which is new and which is old
2730
if tt.new_contents(conflict[1]):
2731
new_file = conflict[1]
2732
old_file = conflict[2]
2734
new_file = conflict[2]
2735
old_file = conflict[1]
2737
# We should only get here if the conflict wasn't completely
2739
final_parent = tt.final_parent(old_file)
2740
if new_file in divert:
2741
new_name = tt.final_name(old_file)+'.diverted'
2742
tt.adjust_path(new_name, final_parent, new_file)
2743
new_conflicts.add((c_type, 'Diverted to',
2744
new_file, old_file))
2746
new_name = tt.final_name(old_file)+'.moved'
2747
tt.adjust_path(new_name, final_parent, old_file)
2748
new_conflicts.add((c_type, 'Moved existing file to',
2749
old_file, new_file))
2750
return new_conflicts
2753
def new_by_entry(tt, entry, parent_id, tree):
2754
"""Create a new file according to its inventory entry"""
2758
contents = tree.get_file(entry.file_id).readlines()
2759
executable = tree.is_executable(entry.file_id)
2760
return tt.new_file(name, parent_id, contents, entry.file_id,
2762
elif kind in ('directory', 'tree-reference'):
2763
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2764
if kind == 'tree-reference':
2765
tt.set_tree_reference(entry.reference_revision, trans_id)
2767
elif kind == 'symlink':
2768
target = tree.get_symlink_target(entry.file_id)
2769
return tt.new_symlink(name, parent_id, target, entry.file_id)
2771
raise errors.BadFileKindError(name, kind)
2774
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2775
filter_tree_path=None):
2776
"""Create new file contents according to tree contents.
2778
:param filter_tree_path: the tree path to use to lookup
2779
content filters to apply to the bytes output in the working tree.
2780
This only applies if the working tree supports content filtering.
2782
kind = tree.kind(file_id)
2783
if kind == 'directory':
2784
tt.create_directory(trans_id)
2785
elif kind == "file":
2787
tree_file = tree.get_file(file_id)
2789
bytes = tree_file.readlines()
2793
if wt.supports_content_filtering() and filter_tree_path is not None:
2794
filters = wt._content_filter_stack(filter_tree_path)
2795
bytes = filtered_output_bytes(bytes, filters,
2796
ContentFilterContext(filter_tree_path, tree))
2797
tt.create_file(bytes, trans_id)
2798
elif kind == "symlink":
2799
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
2801
raise AssertionError('Unknown kind %r' % kind)
2804
def create_entry_executability(tt, entry, trans_id):
2805
"""Set the executability of a trans_id according to an inventory entry"""
2806
if entry.kind == "file":
2807
tt.set_executability(entry.executable, trans_id)
2810
@deprecated_function(deprecated_in((2, 3, 0)))
2811
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2812
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2815
@deprecated_function(deprecated_in((2, 3, 0)))
2816
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2817
"""Produce a backup-style name that appears to be available"""
2821
yield "%s.~%d~" % (name, counter)
2823
for new_name in name_gen():
2824
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2828
def _entry_changes(file_id, entry, working_tree):
2829
"""Determine in which ways the inventory entry has changed.
2831
Returns booleans: has_contents, content_mod, meta_mod
2832
has_contents means there are currently contents, but they differ
2833
contents_mod means contents need to be modified
2834
meta_mod means the metadata needs to be modified
2836
cur_entry = working_tree.inventory[file_id]
2838
working_kind = working_tree.kind(file_id)
2841
has_contents = False
2844
if has_contents is True:
2845
if entry.kind != working_kind:
2846
contents_mod, meta_mod = True, False
2848
cur_entry._read_tree_state(working_tree.id2path(file_id),
2850
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2851
cur_entry._forget_tree_state()
2852
return has_contents, contents_mod, meta_mod
2855
def revert(working_tree, target_tree, filenames, backups=False,
2856
pb=None, change_reporter=None):
2857
"""Revert a working tree's contents to those of a target tree."""
2858
target_tree.lock_read()
2859
pb = ui.ui_factory.nested_progress_bar()
2860
tt = TreeTransform(working_tree, pb)
2862
pp = ProgressPhase("Revert phase", 3, pb)
2863
conflicts, merge_modified = _prepare_revert_transform(
2864
working_tree, target_tree, tt, filenames, backups, pp)
2866
change_reporter = delta._ChangeReporter(
2867
unversioned_filter=working_tree.is_ignored)
2868
delta.report_changes(tt.iter_changes(), change_reporter)
2869
for conflict in conflicts:
2870
trace.warning(unicode(conflict))
2873
working_tree.set_merge_modified(merge_modified)
2875
target_tree.unlock()
2881
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2882
backups, pp, basis_tree=None,
2883
merge_modified=None):
2884
child_pb = ui.ui_factory.nested_progress_bar()
2886
if merge_modified is None:
2887
merge_modified = working_tree.merge_modified()
2888
merge_modified = _alter_files(working_tree, target_tree, tt,
2889
child_pb, filenames, backups,
2890
merge_modified, basis_tree)
2893
child_pb = ui.ui_factory.nested_progress_bar()
2895
raw_conflicts = resolve_conflicts(tt, child_pb,
2896
lambda t, c: conflict_pass(t, c, target_tree))
2899
conflicts = cook_conflicts(raw_conflicts, tt)
2900
return conflicts, merge_modified
2903
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2904
backups, merge_modified, basis_tree=None):
2905
if basis_tree is not None:
2906
basis_tree.lock_read()
2907
# We ask the working_tree for its changes relative to the target, rather
2908
# than the target changes relative to the working tree. Because WT4 has an
2909
# optimizer to compare itself to a target, but no optimizer for the
2911
change_list = working_tree.iter_changes(target_tree,
2912
specific_files=specific_files, pb=pb)
2913
if target_tree.get_root_id() is None:
2919
for id_num, (file_id, path, changed_content, versioned, parent, name,
2920
kind, executable) in enumerate(change_list):
2921
target_path, wt_path = path
2922
target_versioned, wt_versioned = versioned
2923
target_parent, wt_parent = parent
2924
target_name, wt_name = name
2925
target_kind, wt_kind = kind
2926
target_executable, wt_executable = executable
2927
if skip_root and wt_parent is None:
2929
trans_id = tt.trans_id_file_id(file_id)
2932
keep_content = False
2933
if wt_kind == 'file' and (backups or target_kind is None):
2934
wt_sha1 = working_tree.get_file_sha1(file_id)
2935
if merge_modified.get(file_id) != wt_sha1:
2936
# acquire the basis tree lazily to prevent the
2937
# expense of accessing it when it's not needed ?
2938
# (Guessing, RBC, 200702)
2939
if basis_tree is None:
2940
basis_tree = working_tree.basis_tree()
2941
basis_tree.lock_read()
2942
if basis_tree.has_id(file_id):
2943
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2945
elif target_kind is None and not target_versioned:
2947
if wt_kind is not None:
2948
if not keep_content:
2949
tt.delete_contents(trans_id)
2950
elif target_kind is not None:
2951
parent_trans_id = tt.trans_id_file_id(wt_parent)
2952
backup_name = tt._available_backup_name(
2953
wt_name, parent_trans_id)
2954
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2955
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2956
if wt_versioned and target_versioned:
2957
tt.unversion_file(trans_id)
2958
tt.version_file(file_id, new_trans_id)
2959
# New contents should have the same unix perms as old
2962
trans_id = new_trans_id
2963
if target_kind in ('directory', 'tree-reference'):
2964
tt.create_directory(trans_id)
2965
if target_kind == 'tree-reference':
2966
revision = target_tree.get_reference_revision(file_id,
2968
tt.set_tree_reference(revision, trans_id)
2969
elif target_kind == 'symlink':
2970
tt.create_symlink(target_tree.get_symlink_target(file_id),
2972
elif target_kind == 'file':
2973
deferred_files.append((file_id, (trans_id, mode_id)))
2974
if basis_tree is None:
2975
basis_tree = working_tree.basis_tree()
2976
basis_tree.lock_read()
2977
new_sha1 = target_tree.get_file_sha1(file_id)
2978
if (basis_tree.has_id(file_id) and
2979
new_sha1 == basis_tree.get_file_sha1(file_id)):
2980
if file_id in merge_modified:
2981
del merge_modified[file_id]
2983
merge_modified[file_id] = new_sha1
2985
# preserve the execute bit when backing up
2986
if keep_content and wt_executable == target_executable:
2987
tt.set_executability(target_executable, trans_id)
2988
elif target_kind is not None:
2989
raise AssertionError(target_kind)
2990
if not wt_versioned and target_versioned:
2991
tt.version_file(file_id, trans_id)
2992
if wt_versioned and not target_versioned:
2993
tt.unversion_file(trans_id)
2994
if (target_name is not None and
2995
(wt_name != target_name or wt_parent != target_parent)):
2996
if target_name == '' and target_parent is None:
2997
parent_trans = ROOT_PARENT
2999
parent_trans = tt.trans_id_file_id(target_parent)
3000
if wt_parent is None and wt_versioned:
3001
tt.adjust_root_path(target_name, parent_trans)
3003
tt.adjust_path(target_name, parent_trans, trans_id)
3004
if wt_executable != target_executable and target_kind == "file":
3005
tt.set_executability(target_executable, trans_id)
3006
if working_tree.supports_content_filtering():
3007
for index, ((trans_id, mode_id), bytes) in enumerate(
3008
target_tree.iter_files_bytes(deferred_files)):
3009
file_id = deferred_files[index][0]
3010
# We're reverting a tree to the target tree so using the
3011
# target tree to find the file path seems the best choice
3012
# here IMO - Ian C 27/Oct/2009
3013
filter_tree_path = target_tree.id2path(file_id)
3014
filters = working_tree._content_filter_stack(filter_tree_path)
3015
bytes = filtered_output_bytes(bytes, filters,
3016
ContentFilterContext(filter_tree_path, working_tree))
3017
tt.create_file(bytes, trans_id, mode_id)
3019
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3021
tt.create_file(bytes, trans_id, mode_id)
3022
tt.fixup_new_roots()
3024
if basis_tree is not None:
3026
return merge_modified
3029
def resolve_conflicts(tt, pb=None, pass_func=None):
3030
"""Make many conflict-resolution attempts, but die if they fail"""
3031
if pass_func is None:
3032
pass_func = conflict_pass
3033
new_conflicts = set()
3034
pb = ui.ui_factory.nested_progress_bar()
3037
pb.update('Resolution pass', n+1, 10)
3038
conflicts = tt.find_conflicts()
3039
if len(conflicts) == 0:
3040
return new_conflicts
3041
new_conflicts.update(pass_func(tt, conflicts))
3042
raise MalformedTransform(conflicts=conflicts)
3047
def conflict_pass(tt, conflicts, path_tree=None):
3048
"""Resolve some classes of conflicts.
3050
:param tt: The transform to resolve conflicts in
3051
:param conflicts: The conflicts to resolve
3052
:param path_tree: A Tree to get supplemental paths from
3054
new_conflicts = set()
3055
for c_type, conflict in ((c[0], c) for c in conflicts):
3056
if c_type == 'duplicate id':
3057
tt.unversion_file(conflict[1])
3058
new_conflicts.add((c_type, 'Unversioned existing file',
3059
conflict[1], conflict[2], ))
3060
elif c_type == 'duplicate':
3061
# files that were renamed take precedence
3062
final_parent = tt.final_parent(conflict[1])
3063
if tt.path_changed(conflict[1]):
3064
existing_file, new_file = conflict[2], conflict[1]
3066
existing_file, new_file = conflict[1], conflict[2]
3067
new_name = tt.final_name(existing_file) + '.moved'
3068
tt.adjust_path(new_name, final_parent, existing_file)
3069
new_conflicts.add((c_type, 'Moved existing file to',
3070
existing_file, new_file))
3071
elif c_type == 'parent loop':
3072
# break the loop by undoing one of the ops that caused the loop
3074
while not tt.path_changed(cur):
3075
cur = tt.final_parent(cur)
3076
new_conflicts.add((c_type, 'Cancelled move', cur,
3077
tt.final_parent(cur),))
3078
tt.adjust_path(tt.final_name(cur), tt.get_tree_parent(cur), cur)
3080
elif c_type == 'missing parent':
3081
trans_id = conflict[1]
3082
if trans_id in tt._removed_contents:
3083
cancel_deletion = True
3084
orphans = tt._get_potential_orphans(trans_id)
3086
cancel_deletion = False
3087
# All children are orphans
3090
tt.new_orphan(o, trans_id)
3091
except OrphaningError:
3092
# Something bad happened so we cancel the directory
3093
# deletion which will leave it in place with a
3094
# conflict. The user can deal with it from there.
3095
# Note that this also catch the case where we don't
3096
# want to create orphans and leave the directory in
3098
cancel_deletion = True
3101
# Cancel the directory deletion
3102
tt.cancel_deletion(trans_id)
3103
new_conflicts.add(('deleting parent', 'Not deleting',
3108
tt.final_name(trans_id)
3110
if path_tree is not None:
3111
file_id = tt.final_file_id(trans_id)
3113
file_id = tt.inactive_file_id(trans_id)
3114
_, entry = path_tree.iter_entries_by_dir(
3116
# special-case the other tree root (move its
3117
# children to current root)
3118
if entry.parent_id is None:
3120
moved = _reparent_transform_children(
3121
tt, trans_id, tt.root)
3123
new_conflicts.add((c_type, 'Moved to root',
3126
parent_trans_id = tt.trans_id_file_id(
3128
tt.adjust_path(entry.name, parent_trans_id,
3131
tt.create_directory(trans_id)
3132
new_conflicts.add((c_type, 'Created directory', trans_id))
3133
elif c_type == 'unversioned parent':
3134
file_id = tt.inactive_file_id(conflict[1])
3135
# special-case the other tree root (move its children instead)
3136
if path_tree and path_tree.has_id(file_id):
3137
if path_tree.path2id('') == file_id:
3138
# This is the root entry, skip it
3140
tt.version_file(file_id, conflict[1])
3141
new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
3142
elif c_type == 'non-directory parent':
3143
parent_id = conflict[1]
3144
parent_parent = tt.final_parent(parent_id)
3145
parent_name = tt.final_name(parent_id)
3146
parent_file_id = tt.final_file_id(parent_id)
3147
new_parent_id = tt.new_directory(parent_name + '.new',
3148
parent_parent, parent_file_id)
3149
_reparent_transform_children(tt, parent_id, new_parent_id)
3150
if parent_file_id is not None:
3151
tt.unversion_file(parent_id)
3152
new_conflicts.add((c_type, 'Created directory', new_parent_id))
3153
elif c_type == 'versioning no contents':
3154
tt.cancel_versioning(conflict[1])
3155
return new_conflicts
3158
def cook_conflicts(raw_conflicts, tt):
3159
"""Generate a list of cooked conflicts, sorted by file path"""
3160
conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3161
return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
3164
def iter_cook_conflicts(raw_conflicts, tt):
3166
for conflict in raw_conflicts:
3167
c_type = conflict[0]
3168
action = conflict[1]
3169
modified_path = fp.get_path(conflict[2])
3170
modified_id = tt.final_file_id(conflict[2])
3171
if len(conflict) == 3:
3172
yield conflicts.Conflict.factory(
3173
c_type, action=action, path=modified_path, file_id=modified_id)
3176
conflicting_path = fp.get_path(conflict[3])
3177
conflicting_id = tt.final_file_id(conflict[3])
3178
yield conflicts.Conflict.factory(
3179
c_type, action=action, path=modified_path,
3180
file_id=modified_id,
3181
conflict_path=conflicting_path,
3182
conflict_file_id=conflicting_id)
3185
class _FileMover(object):
3186
"""Moves and deletes files for TreeTransform, tracking operations"""
3189
self.past_renames = []
3190
self.pending_deletions = []
3192
def rename(self, from_, to):
3193
"""Rename a file from one path to another."""
3195
os.rename(from_, to)
3197
if e.errno in (errno.EEXIST, errno.ENOTEMPTY):
3198
raise errors.FileExists(to, str(e))
3199
# normal OSError doesn't include filenames so it's hard to see where
3200
# the problem is, see https://bugs.launchpad.net/bzr/+bug/491763
3201
raise errors.TransformRenameFailed(from_, to, str(e), e.errno)
3202
self.past_renames.append((from_, to))
3204
def pre_delete(self, from_, to):
3205
"""Rename a file out of the way and mark it for deletion.
3207
Unlike os.unlink, this works equally well for files and directories.
3208
:param from_: The current file path
3209
:param to: A temporary path for the file
3211
self.rename(from_, to)
3212
self.pending_deletions.append(to)
3215
"""Reverse all renames that have been performed"""
3216
for from_, to in reversed(self.past_renames):
3218
os.rename(to, from_)
3220
raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
3221
# after rollback, don't reuse _FileMover
3223
pending_deletions = None
3225
def apply_deletions(self):
3226
"""Apply all marked deletions"""
3227
for path in self.pending_deletions:
3229
# after apply_deletions, don't reuse _FileMover
3231
pending_deletions = None