1
# Copyright (C) 2006-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from stat import S_ISREG, S_IEXEC
29
lazy_import.lazy_import(globals(), """
41
revision as _mod_revision,
46
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
47
ReusingTransform, CantMoveRoot,
48
ExistingLimbo, ImmortalLimbo, NoFinalPath,
50
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
51
from bzrlib.osutils import (
60
from bzrlib.progress import ProgressPhase
61
from bzrlib.symbol_versioning import (
68
ROOT_PARENT = "root-parent"
70
def unique_add(map, key, value):
72
raise DuplicateKey(key=key)
77
class _TransformResults(object):
78
def __init__(self, modified_paths, rename_count):
80
self.modified_paths = modified_paths
81
self.rename_count = rename_count
84
class TreeTransformBase(object):
85
"""The base class for TreeTransform and its kin."""
87
def __init__(self, tree, pb=None,
91
:param tree: The tree that will be transformed, but not necessarily
94
:param case_sensitive: If True, the target of the transform is
95
case sensitive, not just case preserving.
100
# mapping of trans_id -> new basename
102
# mapping of trans_id -> new parent trans_id
103
self._new_parent = {}
104
# mapping of trans_id with new contents -> new file_kind
105
self._new_contents = {}
106
# mapping of trans_id => (sha1 of content, stat_value)
107
self._observed_sha1s = {}
108
# Set of trans_ids whose contents will be removed
109
self._removed_contents = set()
110
# Mapping of trans_id -> new execute-bit value
111
self._new_executability = {}
112
# Mapping of trans_id -> new tree-reference value
113
self._new_reference_revision = {}
114
# Mapping of trans_id -> new file_id
116
# Mapping of old file-id -> trans_id
117
self._non_present_ids = {}
118
# Mapping of new file_id -> trans_id
120
# Set of trans_ids that will be removed
121
self._removed_id = set()
122
# Mapping of path in old tree -> trans_id
123
self._tree_path_ids = {}
124
# Mapping trans_id -> path in old tree
125
self._tree_id_paths = {}
126
# The trans_id that will be used as the tree root
127
root_id = tree.get_root_id()
128
if root_id is not None:
129
self._new_root = self.trans_id_tree_file_id(root_id)
131
self._new_root = None
132
# Indicator of whether the transform has been applied
136
# Whether the target is case sensitive
137
self._case_sensitive_target = case_sensitive
138
# A counter of how many files have been renamed
139
self.rename_count = 0
142
"""Support Context Manager API."""
145
def __exit__(self, exc_type, exc_val, exc_tb):
146
"""Support Context Manager API."""
150
"""Release the working tree lock, if held.
152
This is required if apply has not been invoked, but can be invoked
155
if self._tree is None:
160
def __get_root(self):
161
return self._new_root
163
root = property(__get_root)
165
def _assign_id(self):
166
"""Produce a new tranform id"""
167
new_id = "new-%s" % self._id_number
171
def create_path(self, name, parent):
172
"""Assign a transaction id to a new path"""
173
trans_id = self._assign_id()
174
unique_add(self._new_name, trans_id, name)
175
unique_add(self._new_parent, trans_id, parent)
178
def adjust_path(self, name, parent, trans_id):
179
"""Change the path that is assigned to a transaction id."""
181
raise ValueError("Parent trans-id may not be None")
182
if trans_id == self._new_root:
184
self._new_name[trans_id] = name
185
self._new_parent[trans_id] = parent
187
def adjust_root_path(self, name, parent):
188
"""Emulate moving the root by moving all children, instead.
190
We do this by undoing the association of root's transaction id with the
191
current tree. This allows us to create a new directory with that
192
transaction id. We unversion the root directory and version the
193
physically new directory, and hope someone versions the tree root
196
old_root = self._new_root
197
old_root_file_id = self.final_file_id(old_root)
198
# force moving all children of root
199
for child_id in self.iter_tree_children(old_root):
200
if child_id != parent:
201
self.adjust_path(self.final_name(child_id),
202
self.final_parent(child_id), child_id)
203
file_id = self.final_file_id(child_id)
204
if file_id is not None:
205
self.unversion_file(child_id)
206
self.version_file(file_id, child_id)
208
# the physical root needs a new transaction id
209
self._tree_path_ids.pop("")
210
self._tree_id_paths.pop(old_root)
211
self._new_root = self.trans_id_tree_file_id(self._tree.get_root_id())
212
if parent == old_root:
213
parent = self._new_root
214
self.adjust_path(name, parent, old_root)
215
self.create_directory(old_root)
216
self.version_file(old_root_file_id, old_root)
217
self.unversion_file(self._new_root)
219
def fixup_new_roots(self):
220
"""Reinterpret requests to change the root directory
222
Instead of creating a root directory, or moving an existing directory,
223
all the attributes and children of the new root are applied to the
224
existing root directory.
226
This means that the old root trans-id becomes obsolete, so it is
227
recommended only to invoke this after the root trans-id has become
231
new_roots = [k for k, v in self._new_parent.iteritems() if v is
233
if len(new_roots) < 1:
234
if self.final_kind(self.root) is None:
235
self.cancel_deletion(self.root)
236
if self.final_file_id(self.root) is None:
237
self.version_file(self.tree_file_id(self.root),
240
if len(new_roots) != 1:
241
raise ValueError('A tree cannot have two roots!')
242
if self._new_root is None:
243
self._new_root = new_roots[0]
245
old_new_root = new_roots[0]
246
# unversion the new root's directory.
247
if self.final_kind(self._new_root) is None:
248
file_id = self.final_file_id(old_new_root)
250
file_id = self.final_file_id(self._new_root)
251
if old_new_root in self._new_id:
252
self.cancel_versioning(old_new_root)
254
self.unversion_file(old_new_root)
255
# if, at this stage, root still has an old file_id, zap it so we can
256
# stick a new one in.
257
if (self.tree_file_id(self._new_root) is not None and
258
self._new_root not in self._removed_id):
259
self.unversion_file(self._new_root)
260
if file_id is not None:
261
self.version_file(file_id, self._new_root)
263
# Now move children of new root into old root directory.
264
# Ensure all children are registered with the transaction, but don't
265
# use directly-- some tree children have new parents
266
list(self.iter_tree_children(old_new_root))
267
# Move all children of new root into old root directory.
268
for child in self.by_parent().get(old_new_root, []):
269
self.adjust_path(self.final_name(child), self._new_root, child)
271
# Ensure old_new_root has no directory.
272
if old_new_root in self._new_contents:
273
self.cancel_creation(old_new_root)
275
self.delete_contents(old_new_root)
277
# prevent deletion of root directory.
278
if self._new_root in self._removed_contents:
279
self.cancel_deletion(self._new_root)
281
# destroy path info for old_new_root.
282
del self._new_parent[old_new_root]
283
del self._new_name[old_new_root]
285
def trans_id_tree_file_id(self, inventory_id):
286
"""Determine the transaction id of a working tree file.
288
This reflects only files that already exist, not ones that will be
289
added by transactions.
291
if inventory_id is None:
292
raise ValueError('None is not a valid file id')
293
path = self._tree.id2path(inventory_id)
294
return self.trans_id_tree_path(path)
296
def trans_id_file_id(self, file_id):
297
"""Determine or set the transaction id associated with a file ID.
298
A new id is only created for file_ids that were never present. If
299
a transaction has been unversioned, it is deliberately still returned.
300
(this will likely lead to an unversioned parent conflict.)
303
raise ValueError('None is not a valid file id')
304
if file_id in self._r_new_id and self._r_new_id[file_id] is not None:
305
return self._r_new_id[file_id]
308
self._tree.iter_entries_by_dir([file_id]).next()
309
except StopIteration:
310
if file_id in self._non_present_ids:
311
return self._non_present_ids[file_id]
313
trans_id = self._assign_id()
314
self._non_present_ids[file_id] = trans_id
317
return self.trans_id_tree_file_id(file_id)
319
def trans_id_tree_path(self, path):
320
"""Determine (and maybe set) the transaction ID for a tree path."""
321
path = self.canonical_path(path)
322
if path not in self._tree_path_ids:
323
self._tree_path_ids[path] = self._assign_id()
324
self._tree_id_paths[self._tree_path_ids[path]] = path
325
return self._tree_path_ids[path]
327
def get_tree_parent(self, trans_id):
328
"""Determine id of the parent in the tree."""
329
path = self._tree_id_paths[trans_id]
332
return self.trans_id_tree_path(os.path.dirname(path))
334
def delete_contents(self, trans_id):
335
"""Schedule the contents of a path entry for deletion"""
336
kind = self.tree_kind(trans_id)
338
self._removed_contents.add(trans_id)
340
def cancel_deletion(self, trans_id):
341
"""Cancel a scheduled deletion"""
342
self._removed_contents.remove(trans_id)
344
def unversion_file(self, trans_id):
345
"""Schedule a path entry to become unversioned"""
346
self._removed_id.add(trans_id)
348
def delete_versioned(self, trans_id):
349
"""Delete and unversion a versioned file"""
350
self.delete_contents(trans_id)
351
self.unversion_file(trans_id)
353
def set_executability(self, executability, trans_id):
354
"""Schedule setting of the 'execute' bit
355
To unschedule, set to None
357
if executability is None:
358
del self._new_executability[trans_id]
360
unique_add(self._new_executability, trans_id, executability)
362
def set_tree_reference(self, revision_id, trans_id):
363
"""Set the reference associated with a directory"""
364
unique_add(self._new_reference_revision, trans_id, revision_id)
366
def version_file(self, file_id, trans_id):
367
"""Schedule a file to become versioned."""
370
unique_add(self._new_id, trans_id, file_id)
371
unique_add(self._r_new_id, file_id, trans_id)
373
def cancel_versioning(self, trans_id):
374
"""Undo a previous versioning of a file"""
375
file_id = self._new_id[trans_id]
376
del self._new_id[trans_id]
377
del self._r_new_id[file_id]
379
def new_paths(self, filesystem_only=False):
380
"""Determine the paths of all new and changed files.
382
:param filesystem_only: if True, only calculate values for files
383
that require renames or execute bit changes.
387
stale_ids = self._needs_rename.difference(self._new_name)
388
stale_ids.difference_update(self._new_parent)
389
stale_ids.difference_update(self._new_contents)
390
stale_ids.difference_update(self._new_id)
391
needs_rename = self._needs_rename.difference(stale_ids)
392
id_sets = (needs_rename, self._new_executability)
394
id_sets = (self._new_name, self._new_parent, self._new_contents,
395
self._new_id, self._new_executability)
396
for id_set in id_sets:
397
new_ids.update(id_set)
398
return sorted(FinalPaths(self).get_paths(new_ids))
400
def _inventory_altered(self):
401
"""Determine which trans_ids need new Inventory entries.
403
An new entry is needed when anything that would be reflected by an
404
inventory entry changes, including file name, file_id, parent file_id,
405
file kind, and the execute bit.
407
Some care is taken to return entries with real changes, not cases
408
where the value is deleted and then restored to its original value,
409
but some actually unchanged values may be returned.
411
:returns: A list of (path, trans_id) for all items requiring an
412
inventory change. Ordered by path.
415
# Find entries whose file_ids are new (or changed).
416
new_file_id = set(t for t in self._new_id
417
if self._new_id[t] != self.tree_file_id(t))
418
for id_set in [self._new_name, self._new_parent, new_file_id,
419
self._new_executability]:
420
changed_ids.update(id_set)
421
# removing implies a kind change
422
changed_kind = set(self._removed_contents)
424
changed_kind.intersection_update(self._new_contents)
425
# Ignore entries that are already known to have changed.
426
changed_kind.difference_update(changed_ids)
427
# to keep only the truly changed ones
428
changed_kind = (t for t in changed_kind
429
if self.tree_kind(t) != self.final_kind(t))
430
# all kind changes will alter the inventory
431
changed_ids.update(changed_kind)
432
# To find entries with changed parent_ids, find parents which existed,
433
# but changed file_id.
434
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
435
# Now add all their children to the set.
436
for parent_trans_id in new_file_id:
437
changed_ids.update(self.iter_tree_children(parent_trans_id))
438
return sorted(FinalPaths(self).get_paths(changed_ids))
440
def final_kind(self, trans_id):
441
"""Determine the final file kind, after any changes applied.
443
:return: None if the file does not exist/has no contents. (It is
444
conceivable that a path would be created without the corresponding
445
contents insertion command)
447
if trans_id in self._new_contents:
448
return self._new_contents[trans_id]
449
elif trans_id in self._removed_contents:
452
return self.tree_kind(trans_id)
454
def tree_file_id(self, trans_id):
455
"""Determine the file id associated with the trans_id in the tree"""
457
path = self._tree_id_paths[trans_id]
459
# the file is a new, unversioned file, or invalid trans_id
461
# the file is old; the old id is still valid
462
if self._new_root == trans_id:
463
return self._tree.get_root_id()
464
return self._tree.path2id(path)
466
def final_file_id(self, trans_id):
467
"""Determine the file id after any changes are applied, or None.
469
None indicates that the file will not be versioned after changes are
473
return self._new_id[trans_id]
475
if trans_id in self._removed_id:
477
return self.tree_file_id(trans_id)
479
def inactive_file_id(self, trans_id):
480
"""Return the inactive file_id associated with a transaction id.
481
That is, the one in the tree or in non_present_ids.
482
The file_id may actually be active, too.
484
file_id = self.tree_file_id(trans_id)
485
if file_id is not None:
487
for key, value in self._non_present_ids.iteritems():
488
if value == trans_id:
491
def final_parent(self, trans_id):
492
"""Determine the parent file_id, after any changes are applied.
494
ROOT_PARENT is returned for the tree root.
497
return self._new_parent[trans_id]
499
return self.get_tree_parent(trans_id)
501
def final_name(self, trans_id):
502
"""Determine the final filename, after all changes are applied."""
504
return self._new_name[trans_id]
507
return os.path.basename(self._tree_id_paths[trans_id])
509
raise NoFinalPath(trans_id, self)
512
"""Return a map of parent: children for known parents.
514
Only new paths and parents of tree files with assigned ids are used.
517
items = list(self._new_parent.iteritems())
518
items.extend((t, self.final_parent(t)) for t in
519
self._tree_id_paths.keys())
520
for trans_id, parent_id in items:
521
if parent_id not in by_parent:
522
by_parent[parent_id] = set()
523
by_parent[parent_id].add(trans_id)
526
def path_changed(self, trans_id):
527
"""Return True if a trans_id's path has changed."""
528
return (trans_id in self._new_name) or (trans_id in self._new_parent)
530
def new_contents(self, trans_id):
531
return (trans_id in self._new_contents)
533
def find_conflicts(self):
534
"""Find any violations of inventory or filesystem invariants"""
535
if self._done is True:
536
raise ReusingTransform()
538
# ensure all children of all existent parents are known
539
# all children of non-existent parents are known, by definition.
540
self._add_tree_children()
541
by_parent = self.by_parent()
542
conflicts.extend(self._unversioned_parents(by_parent))
543
conflicts.extend(self._parent_loops())
544
conflicts.extend(self._duplicate_entries(by_parent))
545
conflicts.extend(self._duplicate_ids())
546
conflicts.extend(self._parent_type_conflicts(by_parent))
547
conflicts.extend(self._improper_versioning())
548
conflicts.extend(self._executability_conflicts())
549
conflicts.extend(self._overwrite_conflicts())
552
def _check_malformed(self):
553
conflicts = self.find_conflicts()
554
if len(conflicts) != 0:
555
raise MalformedTransform(conflicts=conflicts)
557
def _add_tree_children(self):
558
"""Add all the children of all active parents to the known paths.
560
Active parents are those which gain children, and those which are
561
removed. This is a necessary first step in detecting conflicts.
563
parents = self.by_parent().keys()
564
parents.extend([t for t in self._removed_contents if
565
self.tree_kind(t) == 'directory'])
566
for trans_id in self._removed_id:
567
file_id = self.tree_file_id(trans_id)
568
if file_id is not None:
569
# XXX: This seems like something that should go via a different
571
if self._tree.inventory[file_id].kind == 'directory':
572
parents.append(trans_id)
573
elif self.tree_kind(trans_id) == 'directory':
574
parents.append(trans_id)
576
for parent_id in parents:
577
# ensure that all children are registered with the transaction
578
list(self.iter_tree_children(parent_id))
580
@deprecated_method(deprecated_in((2, 3, 0)))
581
def has_named_child(self, by_parent, parent_id, name):
582
return self._has_named_child(
583
name, parent_id, known_children=by_parent.get(parent_id, []))
585
def _has_named_child(self, name, parent_id, known_children):
586
"""Does a parent already have a name child.
588
:param name: The searched for name.
590
:param parent_id: The parent for which the check is made.
592
:param known_children: The already known children. This should have
593
been recently obtained from `self.by_parent.get(parent_id)`
594
(or will be if None is passed).
596
if known_children is None:
597
known_children = self.by_parent().get(parent_id, [])
598
for child in known_children:
599
if self.final_name(child) == name:
601
parent_path = self._tree_id_paths.get(parent_id, None)
602
if parent_path is None:
603
# No parent... no children
605
child_path = joinpath(parent_path, name)
606
child_id = self._tree_path_ids.get(child_path, None)
608
# Not known by the tree transform yet, check the filesystem
609
return osutils.lexists(self._tree.abspath(child_path))
611
raise AssertionError('child_id is missing: %s, %s, %s'
612
% (name, parent_id, child_id))
614
def _available_backup_name(self, name, target_id):
615
"""Find an available backup name.
617
:param name: The basename of the file.
619
:param target_id: The directory trans_id where the backup should
622
known_children = self.by_parent().get(target_id, [])
623
return osutils.available_backup_name(
625
lambda base: self._has_named_child(
626
base, target_id, known_children))
628
def _parent_loops(self):
629
"""No entry should be its own ancestor"""
631
for trans_id in self._new_parent:
634
while parent_id is not ROOT_PARENT:
637
parent_id = self.final_parent(parent_id)
640
if parent_id == trans_id:
641
conflicts.append(('parent loop', trans_id))
642
if parent_id in seen:
646
def _unversioned_parents(self, by_parent):
647
"""If parent directories are versioned, children must be versioned."""
649
for parent_id, children in by_parent.iteritems():
650
if parent_id is ROOT_PARENT:
652
if self.final_file_id(parent_id) is not None:
654
for child_id in children:
655
if self.final_file_id(child_id) is not None:
656
conflicts.append(('unversioned parent', parent_id))
660
def _improper_versioning(self):
661
"""Cannot version a file with no contents, or a bad type.
663
However, existing entries with no contents are okay.
666
for trans_id in self._new_id.iterkeys():
667
kind = self.final_kind(trans_id)
669
conflicts.append(('versioning no contents', trans_id))
671
if not inventory.InventoryEntry.versionable_kind(kind):
672
conflicts.append(('versioning bad kind', trans_id, kind))
675
def _executability_conflicts(self):
676
"""Check for bad executability changes.
678
Only versioned files may have their executability set, because
679
1. only versioned entries can have executability under windows
680
2. only files can be executable. (The execute bit on a directory
681
does not indicate searchability)
684
for trans_id in self._new_executability:
685
if self.final_file_id(trans_id) is None:
686
conflicts.append(('unversioned executability', trans_id))
688
if self.final_kind(trans_id) != "file":
689
conflicts.append(('non-file executability', trans_id))
692
def _overwrite_conflicts(self):
693
"""Check for overwrites (not permitted on Win32)"""
695
for trans_id in self._new_contents:
696
if self.tree_kind(trans_id) is None:
698
if trans_id not in self._removed_contents:
699
conflicts.append(('overwrite', trans_id,
700
self.final_name(trans_id)))
703
def _duplicate_entries(self, by_parent):
704
"""No directory may have two entries with the same name."""
706
if (self._new_name, self._new_parent) == ({}, {}):
708
for children in by_parent.itervalues():
710
for child_tid in children:
711
name = self.final_name(child_tid)
713
# Keep children only if they still exist in the end
714
if not self._case_sensitive_target:
716
name_ids.append((name, child_tid))
720
for name, trans_id in name_ids:
721
kind = self.final_kind(trans_id)
722
file_id = self.final_file_id(trans_id)
723
if kind is None and file_id is None:
725
if name == last_name:
726
conflicts.append(('duplicate', last_trans_id, trans_id,
729
last_trans_id = trans_id
732
def _duplicate_ids(self):
733
"""Each inventory id may only be used once"""
735
removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in
737
all_ids = self._tree.all_file_ids()
738
active_tree_ids = all_ids.difference(removed_tree_ids)
739
for trans_id, file_id in self._new_id.iteritems():
740
if file_id in active_tree_ids:
741
old_trans_id = self.trans_id_tree_file_id(file_id)
742
conflicts.append(('duplicate id', old_trans_id, trans_id))
745
def _parent_type_conflicts(self, by_parent):
746
"""Children must have a directory parent"""
748
for parent_id, children in by_parent.iteritems():
749
if parent_id is ROOT_PARENT:
752
for child_id in children:
753
if self.final_kind(child_id) is not None:
758
# There is at least a child, so we need an existing directory to
760
kind = self.final_kind(parent_id)
762
# The directory will be deleted
763
conflicts.append(('missing parent', parent_id))
764
elif kind != "directory":
765
# Meh, we need a *directory* to put something in it
766
conflicts.append(('non-directory parent', parent_id))
769
def _set_executability(self, path, trans_id):
770
"""Set the executability of versioned files """
771
if supports_executable():
772
new_executability = self._new_executability[trans_id]
773
abspath = self._tree.abspath(path)
774
current_mode = os.stat(abspath).st_mode
775
if new_executability:
778
to_mode = current_mode | (0100 & ~umask)
779
# Enable x-bit for others only if they can read it.
780
if current_mode & 0004:
781
to_mode |= 0001 & ~umask
782
if current_mode & 0040:
783
to_mode |= 0010 & ~umask
785
to_mode = current_mode & ~0111
786
os.chmod(abspath, to_mode)
788
def _new_entry(self, name, parent_id, file_id):
789
"""Helper function to create a new filesystem entry."""
790
trans_id = self.create_path(name, parent_id)
791
if file_id is not None:
792
self.version_file(file_id, trans_id)
795
def new_file(self, name, parent_id, contents, file_id=None,
796
executable=None, sha1=None):
797
"""Convenience method to create files.
799
name is the name of the file to create.
800
parent_id is the transaction id of the parent directory of the file.
801
contents is an iterator of bytestrings, which will be used to produce
803
:param file_id: The inventory ID of the file, if it is to be versioned.
804
:param executable: Only valid when a file_id has been supplied.
806
trans_id = self._new_entry(name, parent_id, file_id)
807
# TODO: rather than scheduling a set_executable call,
808
# have create_file create the file with the right mode.
809
self.create_file(contents, trans_id, sha1=sha1)
810
if executable is not None:
811
self.set_executability(executable, trans_id)
814
def new_directory(self, name, parent_id, file_id=None):
815
"""Convenience method to create directories.
817
name is the name of the directory to create.
818
parent_id is the transaction id of the parent directory of the
820
file_id is the inventory ID of the directory, if it is to be versioned.
822
trans_id = self._new_entry(name, parent_id, file_id)
823
self.create_directory(trans_id)
826
def new_symlink(self, name, parent_id, target, file_id=None):
827
"""Convenience method to create symbolic link.
829
name is the name of the symlink to create.
830
parent_id is the transaction id of the parent directory of the symlink.
831
target is a bytestring of the target of the symlink.
832
file_id is the inventory ID of the file, if it is to be versioned.
834
trans_id = self._new_entry(name, parent_id, file_id)
835
self.create_symlink(target, trans_id)
838
def new_orphan(self, trans_id, parent_id):
839
"""Schedule an item to be orphaned.
841
When a directory is about to be removed, its children, if they are not
842
versioned are moved out of the way: they don't have a parent anymore.
844
:param trans_id: The trans_id of the existing item.
845
:param parent_id: The parent trans_id of the item.
847
raise NotImplementedError(self.new_orphan)
849
def _get_potential_orphans(self, dir_id):
850
"""Find the potential orphans in a directory.
852
A directory can't be safely deleted if there are versioned files in it.
853
If all the contained files are unversioned then they can be orphaned.
855
The 'None' return value means that the directory contains at least one
856
versioned file and should not be deleted.
858
:param dir_id: The directory trans id.
860
:return: A list of the orphan trans ids or None if at least one
861
versioned file is present.
864
# Find the potential orphans, stop if one item should be kept
865
for child_tid in self.by_parent()[dir_id]:
866
if child_tid in self._removed_contents:
867
# The child is removed as part of the transform. Since it was
868
# versioned before, it's not an orphan
870
elif self.final_file_id(child_tid) is None:
871
# The child is not versioned
872
orphans.append(child_tid)
874
# We have a versioned file here, searching for orphans is
880
def _affected_ids(self):
881
"""Return the set of transform ids affected by the transform"""
882
trans_ids = set(self._removed_id)
883
trans_ids.update(self._new_id.keys())
884
trans_ids.update(self._removed_contents)
885
trans_ids.update(self._new_contents.keys())
886
trans_ids.update(self._new_executability.keys())
887
trans_ids.update(self._new_name.keys())
888
trans_ids.update(self._new_parent.keys())
891
def _get_file_id_maps(self):
892
"""Return mapping of file_ids to trans_ids in the to and from states"""
893
trans_ids = self._affected_ids()
896
# Build up two dicts: trans_ids associated with file ids in the
897
# FROM state, vs the TO state.
898
for trans_id in trans_ids:
899
from_file_id = self.tree_file_id(trans_id)
900
if from_file_id is not None:
901
from_trans_ids[from_file_id] = trans_id
902
to_file_id = self.final_file_id(trans_id)
903
if to_file_id is not None:
904
to_trans_ids[to_file_id] = trans_id
905
return from_trans_ids, to_trans_ids
907
def _from_file_data(self, from_trans_id, from_versioned, file_id):
908
"""Get data about a file in the from (tree) state
910
Return a (name, parent, kind, executable) tuple
912
from_path = self._tree_id_paths.get(from_trans_id)
914
# get data from working tree if versioned
915
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
916
from_name = from_entry.name
917
from_parent = from_entry.parent_id
920
if from_path is None:
921
# File does not exist in FROM state
925
# File exists, but is not versioned. Have to use path-
927
from_name = os.path.basename(from_path)
928
tree_parent = self.get_tree_parent(from_trans_id)
929
from_parent = self.tree_file_id(tree_parent)
930
if from_path is not None:
931
from_kind, from_executable, from_stats = \
932
self._tree._comparison_data(from_entry, from_path)
935
from_executable = False
936
return from_name, from_parent, from_kind, from_executable
938
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
939
"""Get data about a file in the to (target) state
941
Return a (name, parent, kind, executable) tuple
943
to_name = self.final_name(to_trans_id)
944
to_kind = self.final_kind(to_trans_id)
945
to_parent = self.final_file_id(self.final_parent(to_trans_id))
946
if to_trans_id in self._new_executability:
947
to_executable = self._new_executability[to_trans_id]
948
elif to_trans_id == from_trans_id:
949
to_executable = from_executable
951
to_executable = False
952
return to_name, to_parent, to_kind, to_executable
954
def iter_changes(self):
955
"""Produce output in the same format as Tree.iter_changes.
957
Will produce nonsensical results if invoked while inventory/filesystem
958
conflicts (as reported by TreeTransform.find_conflicts()) are present.
960
This reads the Transform, but only reproduces changes involving a
961
file_id. Files that are not versioned in either of the FROM or TO
962
states are not reflected.
964
final_paths = FinalPaths(self)
965
from_trans_ids, to_trans_ids = self._get_file_id_maps()
967
# Now iterate through all active file_ids
968
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
970
from_trans_id = from_trans_ids.get(file_id)
971
# find file ids, and determine versioning state
972
if from_trans_id is None:
973
from_versioned = False
974
from_trans_id = to_trans_ids[file_id]
976
from_versioned = True
977
to_trans_id = to_trans_ids.get(file_id)
978
if to_trans_id is None:
980
to_trans_id = from_trans_id
984
from_name, from_parent, from_kind, from_executable = \
985
self._from_file_data(from_trans_id, from_versioned, file_id)
987
to_name, to_parent, to_kind, to_executable = \
988
self._to_file_data(to_trans_id, from_trans_id, from_executable)
990
if not from_versioned:
993
from_path = self._tree_id_paths.get(from_trans_id)
997
to_path = final_paths.get_path(to_trans_id)
998
if from_kind != to_kind:
1000
elif to_kind in ('file', 'symlink') and (
1001
to_trans_id != from_trans_id or
1002
to_trans_id in self._new_contents):
1004
if (not modified and from_versioned == to_versioned and
1005
from_parent==to_parent and from_name == to_name and
1006
from_executable == to_executable):
1008
results.append((file_id, (from_path, to_path), modified,
1009
(from_versioned, to_versioned),
1010
(from_parent, to_parent),
1011
(from_name, to_name),
1012
(from_kind, to_kind),
1013
(from_executable, to_executable)))
1014
return iter(sorted(results, key=lambda x:x[1]))
1016
def get_preview_tree(self):
1017
"""Return a tree representing the result of the transform.
1019
The tree is a snapshot, and altering the TreeTransform will invalidate
1022
return _PreviewTree(self)
1024
def commit(self, branch, message, merge_parents=None, strict=False,
1025
timestamp=None, timezone=None, committer=None, authors=None,
1026
revprops=None, revision_id=None):
1027
"""Commit the result of this TreeTransform to a branch.
1029
:param branch: The branch to commit to.
1030
:param message: The message to attach to the commit.
1031
:param merge_parents: Additional parent revision-ids specified by
1033
:param strict: If True, abort the commit if there are unversioned
1035
:param timestamp: if not None, seconds-since-epoch for the time and
1036
date. (May be a float.)
1037
:param timezone: Optional timezone for timestamp, as an offset in
1039
:param committer: Optional committer in email-id format.
1040
(e.g. "J Random Hacker <jrandom@example.com>")
1041
:param authors: Optional list of authors in email-id format.
1042
:param revprops: Optional dictionary of revision properties.
1043
:param revision_id: Optional revision id. (Specifying a revision-id
1044
may reduce performance for some non-native formats.)
1045
:return: The revision_id of the revision committed.
1047
self._check_malformed()
1049
unversioned = set(self._new_contents).difference(set(self._new_id))
1050
for trans_id in unversioned:
1051
if self.final_file_id(trans_id) is None:
1052
raise errors.StrictCommitFailed()
1054
revno, last_rev_id = branch.last_revision_info()
1055
if last_rev_id == _mod_revision.NULL_REVISION:
1056
if merge_parents is not None:
1057
raise ValueError('Cannot supply merge parents for first'
1061
parent_ids = [last_rev_id]
1062
if merge_parents is not None:
1063
parent_ids.extend(merge_parents)
1064
if self._tree.get_revision_id() != last_rev_id:
1065
raise ValueError('TreeTransform not based on branch basis: %s' %
1066
self._tree.get_revision_id())
1067
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1068
builder = branch.get_commit_builder(parent_ids,
1069
timestamp=timestamp,
1071
committer=committer,
1073
revision_id=revision_id)
1074
preview = self.get_preview_tree()
1075
list(builder.record_iter_changes(preview, last_rev_id,
1076
self.iter_changes()))
1077
builder.finish_inventory()
1078
revision_id = builder.commit(message)
1079
branch.set_last_revision_info(revno + 1, revision_id)
1082
def _text_parent(self, trans_id):
1083
file_id = self.tree_file_id(trans_id)
1085
if file_id is None or self._tree.kind(file_id) != 'file':
1087
except errors.NoSuchFile:
1091
def _get_parents_texts(self, trans_id):
1092
"""Get texts for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_text(file_id),)
1098
def _get_parents_lines(self, trans_id):
1099
"""Get lines for compression parents of this file."""
1100
file_id = self._text_parent(trans_id)
1103
return (self._tree.get_file_lines(file_id),)
1105
def serialize(self, serializer):
1106
"""Serialize this TreeTransform.
1108
:param serializer: A Serialiser like pack.ContainerSerializer.
1110
new_name = dict((k, v.encode('utf-8')) for k, v in
1111
self._new_name.items())
1112
new_executability = dict((k, int(v)) for k, v in
1113
self._new_executability.items())
1114
tree_path_ids = dict((k.encode('utf-8'), v)
1115
for k, v in self._tree_path_ids.items())
1117
'_id_number': self._id_number,
1118
'_new_name': new_name,
1119
'_new_parent': self._new_parent,
1120
'_new_executability': new_executability,
1121
'_new_id': self._new_id,
1122
'_tree_path_ids': tree_path_ids,
1123
'_removed_id': list(self._removed_id),
1124
'_removed_contents': list(self._removed_contents),
1125
'_non_present_ids': self._non_present_ids,
1127
yield serializer.bytes_record(bencode.bencode(attribs),
1129
for trans_id, kind in self._new_contents.items():
1131
lines = osutils.chunks_to_lines(
1132
self._read_file_chunks(trans_id))
1133
parents = self._get_parents_lines(trans_id)
1134
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1135
content = ''.join(mpdiff.to_patch())
1136
if kind == 'directory':
1138
if kind == 'symlink':
1139
content = self._read_symlink_target(trans_id)
1140
yield serializer.bytes_record(content, ((trans_id, kind),))
1142
def deserialize(self, records):
1143
"""Deserialize a stored TreeTransform.
1145
:param records: An iterable of (names, content) tuples, as per
1146
pack.ContainerPushParser.
1148
names, content = records.next()
1149
attribs = bencode.bdecode(content)
1150
self._id_number = attribs['_id_number']
1151
self._new_name = dict((k, v.decode('utf-8'))
1152
for k, v in attribs['_new_name'].items())
1153
self._new_parent = attribs['_new_parent']
1154
self._new_executability = dict((k, bool(v)) for k, v in
1155
attribs['_new_executability'].items())
1156
self._new_id = attribs['_new_id']
1157
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1158
self._tree_path_ids = {}
1159
self._tree_id_paths = {}
1160
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1161
path = bytepath.decode('utf-8')
1162
self._tree_path_ids[path] = trans_id
1163
self._tree_id_paths[trans_id] = path
1164
self._removed_id = set(attribs['_removed_id'])
1165
self._removed_contents = set(attribs['_removed_contents'])
1166
self._non_present_ids = attribs['_non_present_ids']
1167
for ((trans_id, kind),), content in records:
1169
mpdiff = multiparent.MultiParent.from_patch(content)
1170
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1171
self.create_file(lines, trans_id)
1172
if kind == 'directory':
1173
self.create_directory(trans_id)
1174
if kind == 'symlink':
1175
self.create_symlink(content.decode('utf-8'), trans_id)
1178
class DiskTreeTransform(TreeTransformBase):
1179
"""Tree transform storing its contents on disk."""
1181
def __init__(self, tree, limbodir, pb=None,
1182
case_sensitive=True):
1184
:param tree: The tree that will be transformed, but not necessarily
1186
:param limbodir: A directory where new files can be stored until
1187
they are installed in their proper places
1189
:param case_sensitive: If True, the target of the transform is
1190
case sensitive, not just case preserving.
1192
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1193
self._limbodir = limbodir
1194
self._deletiondir = None
1195
# A mapping of transform ids to their limbo filename
1196
self._limbo_files = {}
1197
self._possibly_stale_limbo_files = set()
1198
# A mapping of transform ids to a set of the transform ids of children
1199
# that their limbo directory has
1200
self._limbo_children = {}
1201
# Map transform ids to maps of child filename to child transform id
1202
self._limbo_children_names = {}
1203
# List of transform ids that need to be renamed from limbo into place
1204
self._needs_rename = set()
1205
self._creation_mtime = None
1208
"""Release the working tree lock, if held, clean up limbo dir.
1210
This is required if apply has not been invoked, but can be invoked
1213
if self._tree is None:
1216
limbo_paths = self._limbo_files.values() + list(
1217
self._possibly_stale_limbo_files)
1218
limbo_paths = sorted(limbo_paths, reverse=True)
1219
for path in limbo_paths:
1223
if e.errno != errno.ENOENT:
1225
# XXX: warn? perhaps we just got interrupted at an
1226
# inconvenient moment, but perhaps files are disappearing
1229
delete_any(self._limbodir)
1231
# We don't especially care *why* the dir is immortal.
1232
raise ImmortalLimbo(self._limbodir)
1234
if self._deletiondir is not None:
1235
delete_any(self._deletiondir)
1237
raise errors.ImmortalPendingDeletion(self._deletiondir)
1239
TreeTransformBase.finalize(self)
1241
def _limbo_name(self, trans_id):
1242
"""Generate the limbo name of a file"""
1243
limbo_name = self._limbo_files.get(trans_id)
1244
if limbo_name is None:
1245
limbo_name = self._generate_limbo_path(trans_id)
1246
self._limbo_files[trans_id] = limbo_name
1249
def _generate_limbo_path(self, trans_id):
1250
"""Generate a limbo path using the trans_id as the relative path.
1252
This is suitable as a fallback, and when the transform should not be
1253
sensitive to the path encoding of the limbo directory.
1255
self._needs_rename.add(trans_id)
1256
return pathjoin(self._limbodir, trans_id)
1258
def adjust_path(self, name, parent, trans_id):
1259
previous_parent = self._new_parent.get(trans_id)
1260
previous_name = self._new_name.get(trans_id)
1261
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1262
if (trans_id in self._limbo_files and
1263
trans_id not in self._needs_rename):
1264
self._rename_in_limbo([trans_id])
1265
if previous_parent != parent:
1266
self._limbo_children[previous_parent].remove(trans_id)
1267
if previous_parent != parent or previous_name != name:
1268
del self._limbo_children_names[previous_parent][previous_name]
1270
def _rename_in_limbo(self, trans_ids):
1271
"""Fix limbo names so that the right final path is produced.
1273
This means we outsmarted ourselves-- we tried to avoid renaming
1274
these files later by creating them with their final names in their
1275
final parents. But now the previous name or parent is no longer
1276
suitable, so we have to rename them.
1278
Even for trans_ids that have no new contents, we must remove their
1279
entries from _limbo_files, because they are now stale.
1281
for trans_id in trans_ids:
1282
old_path = self._limbo_files[trans_id]
1283
self._possibly_stale_limbo_files.add(old_path)
1284
del self._limbo_files[trans_id]
1285
if trans_id not in self._new_contents:
1287
new_path = self._limbo_name(trans_id)
1288
os.rename(old_path, new_path)
1289
self._possibly_stale_limbo_files.remove(old_path)
1290
for descendant in self._limbo_descendants(trans_id):
1291
desc_path = self._limbo_files[descendant]
1292
desc_path = new_path + desc_path[len(old_path):]
1293
self._limbo_files[descendant] = desc_path
1295
def _limbo_descendants(self, trans_id):
1296
"""Return the set of trans_ids whose limbo paths descend from this."""
1297
descendants = set(self._limbo_children.get(trans_id, []))
1298
for descendant in list(descendants):
1299
descendants.update(self._limbo_descendants(descendant))
1302
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1303
"""Schedule creation of a new file.
1307
:param contents: an iterator of strings, all of which will be written
1308
to the target destination.
1309
:param trans_id: TreeTransform handle
1310
:param mode_id: If not None, force the mode of the target file to match
1311
the mode of the object referenced by mode_id.
1312
Otherwise, we will try to preserve mode bits of an existing file.
1313
:param sha1: If the sha1 of this content is already known, pass it in.
1314
We can use it to prevent future sha1 computations.
1316
name = self._limbo_name(trans_id)
1317
f = open(name, 'wb')
1319
unique_add(self._new_contents, trans_id, 'file')
1320
f.writelines(contents)
1323
self._set_mtime(name)
1324
self._set_mode(trans_id, mode_id, S_ISREG)
1325
# It is unfortunate we have to use lstat instead of fstat, but we just
1326
# used utime and chmod on the file, so we need the accurate final
1328
if sha1 is not None:
1329
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1331
def _read_file_chunks(self, trans_id):
1332
cur_file = open(self._limbo_name(trans_id), 'rb')
1334
return cur_file.readlines()
1338
def _read_symlink_target(self, trans_id):
1339
return os.readlink(self._limbo_name(trans_id))
1341
def _set_mtime(self, path):
1342
"""All files that are created get the same mtime.
1344
This time is set by the first object to be created.
1346
if self._creation_mtime is None:
1347
self._creation_mtime = time.time()
1348
os.utime(path, (self._creation_mtime, self._creation_mtime))
1350
def create_hardlink(self, path, trans_id):
1351
"""Schedule creation of a hard link"""
1352
name = self._limbo_name(trans_id)
1356
if e.errno != errno.EPERM:
1358
raise errors.HardLinkNotSupported(path)
1360
unique_add(self._new_contents, trans_id, 'file')
1362
# Clean up the file, it never got registered so
1363
# TreeTransform.finalize() won't clean it up.
1367
def create_directory(self, trans_id):
1368
"""Schedule creation of a new directory.
1370
See also new_directory.
1372
os.mkdir(self._limbo_name(trans_id))
1373
unique_add(self._new_contents, trans_id, 'directory')
1375
def create_symlink(self, target, trans_id):
1376
"""Schedule creation of a new symbolic link.
1378
target is a bytestring.
1379
See also new_symlink.
1382
os.symlink(target, self._limbo_name(trans_id))
1383
unique_add(self._new_contents, trans_id, 'symlink')
1386
path = FinalPaths(self).get_path(trans_id)
1389
raise UnableCreateSymlink(path=path)
1391
def cancel_creation(self, trans_id):
1392
"""Cancel the creation of new file contents."""
1393
del self._new_contents[trans_id]
1394
if trans_id in self._observed_sha1s:
1395
del self._observed_sha1s[trans_id]
1396
children = self._limbo_children.get(trans_id)
1397
# if this is a limbo directory with children, move them before removing
1399
if children is not None:
1400
self._rename_in_limbo(children)
1401
del self._limbo_children[trans_id]
1402
del self._limbo_children_names[trans_id]
1403
delete_any(self._limbo_name(trans_id))
1405
def new_orphan(self, trans_id, parent_id):
1406
# FIXME: There is no tree config, so we use the branch one (it's weird
1407
# to define it this way as orphaning can only occur in a working tree,
1408
# but that's all we have (for now). It will find the option in
1409
# locations.conf or bazaar.conf though) -- vila 20100916
1410
conf = self._tree.branch.get_config()
1411
conf_var_name = 'bzr.transform.orphan_policy'
1412
orphan_policy = conf.get_user_option(conf_var_name)
1413
default_policy = orphaning_registry.default_key
1414
if orphan_policy is None:
1415
orphan_policy = default_policy
1416
if orphan_policy not in orphaning_registry:
1417
trace.warning('%s (from %s) is not a known policy, defaulting '
1418
'to %s' % (orphan_policy, conf_var_name, default_policy))
1419
orphan_policy = default_policy
1420
handle_orphan = orphaning_registry.get(orphan_policy)
1421
handle_orphan(self, trans_id, parent_id)
1424
class OrphaningError(errors.BzrError):
1426
# Only bugs could lead to such exception being seen by the user
1427
internal_error = True
1428
_fmt = "Error while orphaning %s in %s directory"
1430
def __init__(self, orphan, parent):
1431
errors.BzrError.__init__(self)
1432
self.orphan = orphan
1433
self.parent = parent
1436
class OrphaningForbidden(OrphaningError):
1438
_fmt = "Policy: %s doesn't allow creating orphans."
1440
def __init__(self, policy):
1441
errors.BzrError.__init__(self)
1442
self.policy = policy
1445
def move_orphan(tt, orphan_id, parent_id):
1446
"""See TreeTransformBase.new_orphan.
1448
This creates a new orphan in the `bzr-orphans` dir at the root of the
1451
:param tt: The TreeTransform orphaning `trans_id`.
1453
:param orphan_id: The trans id that should be orphaned.
1455
:param parent_id: The orphan parent trans id.
1457
# Add the orphan dir if it doesn't exist
1458
orphan_dir_basename = 'bzr-orphans'
1459
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1460
if tt.final_kind(od_id) is None:
1461
tt.create_directory(od_id)
1462
parent_path = tt._tree_id_paths[parent_id]
1463
# Find a name that doesn't exist yet in the orphan dir
1464
actual_name = tt.final_name(orphan_id)
1465
new_name = tt._available_backup_name(actual_name, od_id)
1466
tt.adjust_path(new_name, od_id, orphan_id)
1467
trace.warning('%s has been orphaned in %s'
1468
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1471
def refuse_orphan(tt, orphan_id, parent_id):
1472
"""See TreeTransformBase.new_orphan.
1474
This refuses to create orphan, letting the caller handle the conflict.
1476
raise OrphaningForbidden('never')
1479
orphaning_registry = registry.Registry()
1480
orphaning_registry.register(
1481
'conflict', refuse_orphan,
1482
'Leave orphans in place and create a conflict on the directory.')
1483
orphaning_registry.register(
1484
'move', move_orphan,
1485
'Move orphans into the bzr-orphans directory.')
1486
orphaning_registry._set_default_key('conflict')
1489
class TreeTransform(DiskTreeTransform):
1490
"""Represent a tree transformation.
1492
This object is designed to support incremental generation of the transform,
1495
However, it gives optimum performance when parent directories are created
1496
before their contents. The transform is then able to put child files
1497
directly in their parent directory, avoiding later renames.
1499
It is easy to produce malformed transforms, but they are generally
1500
harmless. Attempting to apply a malformed transform will cause an
1501
exception to be raised before any modifications are made to the tree.
1503
Many kinds of malformed transforms can be corrected with the
1504
resolve_conflicts function. The remaining ones indicate programming error,
1505
such as trying to create a file with no path.
1507
Two sets of file creation methods are supplied. Convenience methods are:
1512
These are composed of the low-level methods:
1514
* create_file or create_directory or create_symlink
1518
Transform/Transaction ids
1519
-------------------------
1520
trans_ids are temporary ids assigned to all files involved in a transform.
1521
It's possible, even common, that not all files in the Tree have trans_ids.
1523
trans_ids are used because filenames and file_ids are not good enough
1524
identifiers; filenames change, and not all files have file_ids. File-ids
1525
are also associated with trans-ids, so that moving a file moves its
1528
trans_ids are only valid for the TreeTransform that generated them.
1532
Limbo is a temporary directory use to hold new versions of files.
1533
Files are added to limbo by create_file, create_directory, create_symlink,
1534
and their convenience variants (new_*). Files may be removed from limbo
1535
using cancel_creation. Files are renamed from limbo into their final
1536
location as part of TreeTransform.apply
1538
Limbo must be cleaned up, by either calling TreeTransform.apply or
1539
calling TreeTransform.finalize.
1541
Files are placed into limbo inside their parent directories, where
1542
possible. This reduces subsequent renames, and makes operations involving
1543
lots of files faster. This optimization is only possible if the parent
1544
directory is created *before* creating any of its children, so avoid
1545
creating children before parents, where possible.
1549
This temporary directory is used by _FileMover for storing files that are
1550
about to be deleted. In case of rollback, the files will be restored.
1551
FileMover does not delete files until it is sure that a rollback will not
1554
def __init__(self, tree, pb=None):
1555
"""Note: a tree_write lock is taken on the tree.
1557
Use TreeTransform.finalize() to release the lock (can be omitted if
1558
TreeTransform.apply() called).
1560
tree.lock_tree_write()
1563
limbodir = urlutils.local_path_from_url(
1564
tree._transport.abspath('limbo'))
1568
if e.errno == errno.EEXIST:
1569
raise ExistingLimbo(limbodir)
1570
deletiondir = urlutils.local_path_from_url(
1571
tree._transport.abspath('pending-deletion'))
1573
os.mkdir(deletiondir)
1575
if e.errno == errno.EEXIST:
1576
raise errors.ExistingPendingDeletion(deletiondir)
1581
# Cache of realpath results, to speed up canonical_path
1582
self._realpaths = {}
1583
# Cache of relpath results, to speed up canonical_path
1585
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1586
tree.case_sensitive)
1587
self._deletiondir = deletiondir
1589
def canonical_path(self, path):
1590
"""Get the canonical tree-relative path"""
1591
# don't follow final symlinks
1592
abs = self._tree.abspath(path)
1593
if abs in self._relpaths:
1594
return self._relpaths[abs]
1595
dirname, basename = os.path.split(abs)
1596
if dirname not in self._realpaths:
1597
self._realpaths[dirname] = os.path.realpath(dirname)
1598
dirname = self._realpaths[dirname]
1599
abs = pathjoin(dirname, basename)
1600
if dirname in self._relpaths:
1601
relpath = pathjoin(self._relpaths[dirname], basename)
1602
relpath = relpath.rstrip('/\\')
1604
relpath = self._tree.relpath(abs)
1605
self._relpaths[abs] = relpath
1608
def tree_kind(self, trans_id):
1609
"""Determine the file kind in the working tree.
1611
:returns: The file kind or None if the file does not exist
1613
path = self._tree_id_paths.get(trans_id)
1617
return file_kind(self._tree.abspath(path))
1618
except errors.NoSuchFile:
1621
def _set_mode(self, trans_id, mode_id, typefunc):
1622
"""Set the mode of new file contents.
1623
The mode_id is the existing file to get the mode from (often the same
1624
as trans_id). The operation is only performed if there's a mode match
1625
according to typefunc.
1630
old_path = self._tree_id_paths[mode_id]
1634
mode = os.stat(self._tree.abspath(old_path)).st_mode
1636
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1637
# Either old_path doesn't exist, or the parent of the
1638
# target is not a directory (but will be one eventually)
1639
# Either way, we know it doesn't exist *right now*
1640
# See also bug #248448
1645
os.chmod(self._limbo_name(trans_id), mode)
1647
def iter_tree_children(self, parent_id):
1648
"""Iterate through the entry's tree children, if any"""
1650
path = self._tree_id_paths[parent_id]
1654
children = os.listdir(self._tree.abspath(path))
1656
if not (osutils._is_error_enotdir(e)
1657
or e.errno in (errno.ENOENT, errno.ESRCH)):
1661
for child in children:
1662
childpath = joinpath(path, child)
1663
if self._tree.is_control_filename(childpath):
1665
yield self.trans_id_tree_path(childpath)
1667
def _generate_limbo_path(self, trans_id):
1668
"""Generate a limbo path using the final path if possible.
1670
This optimizes the performance of applying the tree transform by
1671
avoiding renames. These renames can be avoided only when the parent
1672
directory is already scheduled for creation.
1674
If the final path cannot be used, falls back to using the trans_id as
1677
parent = self._new_parent.get(trans_id)
1678
# if the parent directory is already in limbo (e.g. when building a
1679
# tree), choose a limbo name inside the parent, to reduce further
1681
use_direct_path = False
1682
if self._new_contents.get(parent) == 'directory':
1683
filename = self._new_name.get(trans_id)
1684
if filename is not None:
1685
if parent not in self._limbo_children:
1686
self._limbo_children[parent] = set()
1687
self._limbo_children_names[parent] = {}
1688
use_direct_path = True
1689
# the direct path can only be used if no other file has
1690
# already taken this pathname, i.e. if the name is unused, or
1691
# if it is already associated with this trans_id.
1692
elif self._case_sensitive_target:
1693
if (self._limbo_children_names[parent].get(filename)
1694
in (trans_id, None)):
1695
use_direct_path = True
1697
for l_filename, l_trans_id in\
1698
self._limbo_children_names[parent].iteritems():
1699
if l_trans_id == trans_id:
1701
if l_filename.lower() == filename.lower():
1704
use_direct_path = True
1706
if not use_direct_path:
1707
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1709
limbo_name = pathjoin(self._limbo_files[parent], filename)
1710
self._limbo_children[parent].add(trans_id)
1711
self._limbo_children_names[parent][filename] = trans_id
1715
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1716
"""Apply all changes to the inventory and filesystem.
1718
If filesystem or inventory conflicts are present, MalformedTransform
1721
If apply succeeds, finalize is not necessary.
1723
:param no_conflicts: if True, the caller guarantees there are no
1724
conflicts, so no check is made.
1725
:param precomputed_delta: An inventory delta to use instead of
1727
:param _mover: Supply an alternate FileMover, for testing
1729
if not no_conflicts:
1730
self._check_malformed()
1731
child_pb = ui.ui_factory.nested_progress_bar()
1733
if precomputed_delta is None:
1734
child_pb.update('Apply phase', 0, 2)
1735
inventory_delta = self._generate_inventory_delta()
1738
inventory_delta = precomputed_delta
1741
mover = _FileMover()
1745
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1746
self._apply_removals(mover)
1747
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1748
modified_paths = self._apply_insertions(mover)
1753
mover.apply_deletions()
1756
self._tree.apply_inventory_delta(inventory_delta)
1757
self._apply_observed_sha1s()
1760
return _TransformResults(modified_paths, self.rename_count)
1762
def _generate_inventory_delta(self):
1763
"""Generate an inventory delta for the current transform."""
1764
inventory_delta = []
1765
child_pb = ui.ui_factory.nested_progress_bar()
1766
new_paths = self._inventory_altered()
1767
total_entries = len(new_paths) + len(self._removed_id)
1769
for num, trans_id in enumerate(self._removed_id):
1771
child_pb.update('removing file', num, total_entries)
1772
if trans_id == self._new_root:
1773
file_id = self._tree.get_root_id()
1775
file_id = self.tree_file_id(trans_id)
1776
# File-id isn't really being deleted, just moved
1777
if file_id in self._r_new_id:
1779
path = self._tree_id_paths[trans_id]
1780
inventory_delta.append((path, None, file_id, None))
1781
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1783
entries = self._tree.iter_entries_by_dir(
1784
new_path_file_ids.values())
1785
old_paths = dict((e.file_id, p) for p, e in entries)
1787
for num, (path, trans_id) in enumerate(new_paths):
1789
child_pb.update('adding file',
1790
num + len(self._removed_id), total_entries)
1791
file_id = new_path_file_ids[trans_id]
1795
kind = self.final_kind(trans_id)
1797
kind = self._tree.stored_kind(file_id)
1798
parent_trans_id = self.final_parent(trans_id)
1799
parent_file_id = new_path_file_ids.get(parent_trans_id)
1800
if parent_file_id is None:
1801
parent_file_id = self.final_file_id(parent_trans_id)
1802
if trans_id in self._new_reference_revision:
1803
new_entry = inventory.TreeReference(
1805
self._new_name[trans_id],
1806
self.final_file_id(self._new_parent[trans_id]),
1807
None, self._new_reference_revision[trans_id])
1809
new_entry = inventory.make_entry(kind,
1810
self.final_name(trans_id),
1811
parent_file_id, file_id)
1812
old_path = old_paths.get(new_entry.file_id)
1813
new_executability = self._new_executability.get(trans_id)
1814
if new_executability is not None:
1815
new_entry.executable = new_executability
1816
inventory_delta.append(
1817
(old_path, path, new_entry.file_id, new_entry))
1820
return inventory_delta
1822
def _apply_removals(self, mover):
1823
"""Perform tree operations that remove directory/inventory names.
1825
That is, delete files that are to be deleted, and put any files that
1826
need renaming into limbo. This must be done in strict child-to-parent
1829
If inventory_delta is None, no inventory delta generation is performed.
1831
tree_paths = list(self._tree_path_ids.iteritems())
1832
tree_paths.sort(reverse=True)
1833
child_pb = ui.ui_factory.nested_progress_bar()
1835
for num, (path, trans_id) in enumerate(tree_paths):
1836
# do not attempt to move root into a subdirectory of itself.
1839
child_pb.update('removing file', num, len(tree_paths))
1840
full_path = self._tree.abspath(path)
1841
if trans_id in self._removed_contents:
1842
delete_path = os.path.join(self._deletiondir, trans_id)
1843
mover.pre_delete(full_path, delete_path)
1844
elif (trans_id in self._new_name
1845
or trans_id in self._new_parent):
1847
mover.rename(full_path, self._limbo_name(trans_id))
1848
except errors.TransformRenameFailed, e:
1849
if e.errno != errno.ENOENT:
1852
self.rename_count += 1
1856
def _apply_insertions(self, mover):
1857
"""Perform tree operations that insert directory/inventory names.
1859
That is, create any files that need to be created, and restore from
1860
limbo any files that needed renaming. This must be done in strict
1861
parent-to-child order.
1863
If inventory_delta is None, no inventory delta is calculated, and
1864
no list of modified paths is returned.
1866
new_paths = self.new_paths(filesystem_only=True)
1868
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1870
child_pb = ui.ui_factory.nested_progress_bar()
1872
for num, (path, trans_id) in enumerate(new_paths):
1874
child_pb.update('adding file', num, len(new_paths))
1875
full_path = self._tree.abspath(path)
1876
if trans_id in self._needs_rename:
1878
mover.rename(self._limbo_name(trans_id), full_path)
1879
except errors.TransformRenameFailed, e:
1880
# We may be renaming a dangling inventory id
1881
if e.errno != errno.ENOENT:
1884
self.rename_count += 1
1885
# TODO: if trans_id in self._observed_sha1s, we should
1886
# re-stat the final target, since ctime will be
1887
# updated by the change.
1888
if (trans_id in self._new_contents or
1889
self.path_changed(trans_id)):
1890
if trans_id in self._new_contents:
1891
modified_paths.append(full_path)
1892
if trans_id in self._new_executability:
1893
self._set_executability(path, trans_id)
1894
if trans_id in self._observed_sha1s:
1895
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1896
st = osutils.lstat(full_path)
1897
self._observed_sha1s[trans_id] = (o_sha1, st)
1900
for path, trans_id in new_paths:
1901
# new_paths includes stuff like workingtree conflicts. Only the
1902
# stuff in new_contents actually comes from limbo.
1903
if trans_id in self._limbo_files:
1904
del self._limbo_files[trans_id]
1905
self._new_contents.clear()
1906
return modified_paths
1908
def _apply_observed_sha1s(self):
1909
"""After we have finished renaming everything, update observed sha1s
1911
This has to be done after self._tree.apply_inventory_delta, otherwise
1912
it doesn't know anything about the files we are updating. Also, we want
1913
to do this as late as possible, so that most entries end up cached.
1915
# TODO: this doesn't update the stat information for directories. So
1916
# the first 'bzr status' will still need to rewrite
1917
# .bzr/checkout/dirstate. However, we at least don't need to
1918
# re-read all of the files.
1919
# TODO: If the operation took a while, we could do a time.sleep(3) here
1920
# to allow the clock to tick over and ensure we won't have any
1921
# problems. (we could observe start time, and finish time, and if
1922
# it is less than eg 10% overhead, add a sleep call.)
1923
paths = FinalPaths(self)
1924
for trans_id, observed in self._observed_sha1s.iteritems():
1925
path = paths.get_path(trans_id)
1926
# We could get the file_id, but dirstate prefers to use the path
1927
# anyway, and it is 'cheaper' to determine.
1928
# file_id = self._new_id[trans_id]
1929
self._tree._observed_sha1(None, path, observed)
1932
class TransformPreview(DiskTreeTransform):
1933
"""A TreeTransform for generating preview trees.
1935
Unlike TreeTransform, this version works when the input tree is a
1936
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1937
unversioned files in the input tree.
1940
def __init__(self, tree, pb=None, case_sensitive=True):
1942
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1943
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1945
def canonical_path(self, path):
1948
def tree_kind(self, trans_id):
1949
path = self._tree_id_paths.get(trans_id)
1952
kind = self._tree.path_content_summary(path)[0]
1953
if kind == 'missing':
1957
def _set_mode(self, trans_id, mode_id, typefunc):
1958
"""Set the mode of new file contents.
1959
The mode_id is the existing file to get the mode from (often the same
1960
as trans_id). The operation is only performed if there's a mode match
1961
according to typefunc.
1963
# is it ok to ignore this? probably
1966
def iter_tree_children(self, parent_id):
1967
"""Iterate through the entry's tree children, if any"""
1969
path = self._tree_id_paths[parent_id]
1972
file_id = self.tree_file_id(parent_id)
1975
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1976
children = getattr(entry, 'children', {})
1977
for child in children:
1978
childpath = joinpath(path, child)
1979
yield self.trans_id_tree_path(childpath)
1981
def new_orphan(self, trans_id, parent_id):
1982
raise NotImplementedError(self.new_orphan)
1985
class _PreviewTree(tree.InventoryTree):
1986
"""Partial implementation of Tree to support show_diff_trees"""
1988
def __init__(self, transform):
1989
self._transform = transform
1990
self._final_paths = FinalPaths(transform)
1991
self.__by_parent = None
1992
self._parent_ids = []
1993
self._all_children_cache = {}
1994
self._path2trans_id_cache = {}
1995
self._final_name_cache = {}
1996
self._iter_changes_cache = dict((c[0], c) for c in
1997
self._transform.iter_changes())
1999
def _content_change(self, file_id):
2000
"""Return True if the content of this file changed"""
2001
changes = self._iter_changes_cache.get(file_id)
2002
# changes[2] is true if the file content changed. See
2003
# InterTree.iter_changes.
2004
return (changes is not None and changes[2])
2006
def _get_repository(self):
2007
repo = getattr(self._transform._tree, '_repository', None)
2009
repo = self._transform._tree.branch.repository
2012
def _iter_parent_trees(self):
2013
for revision_id in self.get_parent_ids():
2015
yield self.revision_tree(revision_id)
2016
except errors.NoSuchRevisionInTree:
2017
yield self._get_repository().revision_tree(revision_id)
2019
def _get_file_revision(self, file_id, vf, tree_revision):
2020
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2021
self._iter_parent_trees()]
2022
vf.add_lines((file_id, tree_revision), parent_keys,
2023
self.get_file_lines(file_id))
2024
repo = self._get_repository()
2025
base_vf = repo.texts
2026
if base_vf not in vf.fallback_versionedfiles:
2027
vf.fallback_versionedfiles.append(base_vf)
2028
return tree_revision
2030
def _stat_limbo_file(self, file_id=None, trans_id=None):
2031
if trans_id is None:
2032
trans_id = self._transform.trans_id_file_id(file_id)
2033
name = self._transform._limbo_name(trans_id)
2034
return os.lstat(name)
2037
def _by_parent(self):
2038
if self.__by_parent is None:
2039
self.__by_parent = self._transform.by_parent()
2040
return self.__by_parent
2042
def _comparison_data(self, entry, path):
2043
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2044
if kind == 'missing':
2048
file_id = self._transform.final_file_id(self._path2trans_id(path))
2049
executable = self.is_executable(file_id, path)
2050
return kind, executable, None
2052
def is_locked(self):
2055
def lock_read(self):
2056
# Perhaps in theory, this should lock the TreeTransform?
2063
def inventory(self):
2064
"""This Tree does not use inventory as its backing data."""
2065
raise NotImplementedError(_PreviewTree.inventory)
2067
def get_root_id(self):
2068
return self._transform.final_file_id(self._transform.root)
2070
def all_file_ids(self):
2071
tree_ids = set(self._transform._tree.all_file_ids())
2072
tree_ids.difference_update(self._transform.tree_file_id(t)
2073
for t in self._transform._removed_id)
2074
tree_ids.update(self._transform._new_id.values())
2078
return iter(self.all_file_ids())
2080
def _has_id(self, file_id, fallback_check):
2081
if file_id in self._transform._r_new_id:
2083
elif file_id in set([self._transform.tree_file_id(trans_id) for
2084
trans_id in self._transform._removed_id]):
2087
return fallback_check(file_id)
2089
def has_id(self, file_id):
2090
return self._has_id(file_id, self._transform._tree.has_id)
2092
def has_or_had_id(self, file_id):
2093
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2095
def _path2trans_id(self, path):
2096
# We must not use None here, because that is a valid value to store.
2097
trans_id = self._path2trans_id_cache.get(path, object)
2098
if trans_id is not object:
2100
segments = splitpath(path)
2101
cur_parent = self._transform.root
2102
for cur_segment in segments:
2103
for child in self._all_children(cur_parent):
2104
final_name = self._final_name_cache.get(child)
2105
if final_name is None:
2106
final_name = self._transform.final_name(child)
2107
self._final_name_cache[child] = final_name
2108
if final_name == cur_segment:
2112
self._path2trans_id_cache[path] = None
2114
self._path2trans_id_cache[path] = cur_parent
2117
def path2id(self, path):
2118
return self._transform.final_file_id(self._path2trans_id(path))
2120
def id2path(self, file_id):
2121
trans_id = self._transform.trans_id_file_id(file_id)
2123
return self._final_paths._determine_path(trans_id)
2125
raise errors.NoSuchId(self, file_id)
2127
def _all_children(self, trans_id):
2128
children = self._all_children_cache.get(trans_id)
2129
if children is not None:
2131
children = set(self._transform.iter_tree_children(trans_id))
2132
# children in the _new_parent set are provided by _by_parent.
2133
children.difference_update(self._transform._new_parent.keys())
2134
children.update(self._by_parent.get(trans_id, []))
2135
self._all_children_cache[trans_id] = children
2138
def iter_children(self, file_id):
2139
trans_id = self._transform.trans_id_file_id(file_id)
2140
for child_trans_id in self._all_children(trans_id):
2141
yield self._transform.final_file_id(child_trans_id)
2144
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2145
in self._transform._tree.extras())
2146
possible_extras.update(self._transform._new_contents)
2147
possible_extras.update(self._transform._removed_id)
2148
for trans_id in possible_extras:
2149
if self._transform.final_file_id(trans_id) is None:
2150
yield self._final_paths._determine_path(trans_id)
2152
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2153
yield_parents=False):
2154
for trans_id, parent_file_id in ordered_entries:
2155
file_id = self._transform.final_file_id(trans_id)
2158
if (specific_file_ids is not None
2159
and file_id not in specific_file_ids):
2161
kind = self._transform.final_kind(trans_id)
2163
kind = self._transform._tree.stored_kind(file_id)
2164
new_entry = inventory.make_entry(
2166
self._transform.final_name(trans_id),
2167
parent_file_id, file_id)
2168
yield new_entry, trans_id
2170
def _list_files_by_dir(self):
2171
todo = [ROOT_PARENT]
2173
while len(todo) > 0:
2175
parent_file_id = self._transform.final_file_id(parent)
2176
children = list(self._all_children(parent))
2177
paths = dict(zip(children, self._final_paths.get_paths(children)))
2178
children.sort(key=paths.get)
2179
todo.extend(reversed(children))
2180
for trans_id in children:
2181
ordered_ids.append((trans_id, parent_file_id))
2184
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2185
# This may not be a maximally efficient implementation, but it is
2186
# reasonably straightforward. An implementation that grafts the
2187
# TreeTransform changes onto the tree's iter_entries_by_dir results
2188
# might be more efficient, but requires tricky inferences about stack
2190
ordered_ids = self._list_files_by_dir()
2191
for entry, trans_id in self._make_inv_entries(ordered_ids,
2192
specific_file_ids, yield_parents=yield_parents):
2193
yield unicode(self._final_paths.get_path(trans_id)), entry
2195
def _iter_entries_for_dir(self, dir_path):
2196
"""Return path, entry for items in a directory without recursing down."""
2197
dir_file_id = self.path2id(dir_path)
2199
for file_id in self.iter_children(dir_file_id):
2200
trans_id = self._transform.trans_id_file_id(file_id)
2201
ordered_ids.append((trans_id, file_id))
2202
for entry, trans_id in self._make_inv_entries(ordered_ids):
2203
yield unicode(self._final_paths.get_path(trans_id)), entry
2205
def list_files(self, include_root=False, from_dir=None, recursive=True):
2206
"""See WorkingTree.list_files."""
2207
# XXX This should behave like WorkingTree.list_files, but is really
2208
# more like RevisionTree.list_files.
2212
prefix = from_dir + '/'
2213
entries = self.iter_entries_by_dir()
2214
for path, entry in entries:
2215
if entry.name == '' and not include_root:
2218
if not path.startswith(prefix):
2220
path = path[len(prefix):]
2221
yield path, 'V', entry.kind, entry.file_id, entry
2223
if from_dir is None and include_root is True:
2224
root_entry = inventory.make_entry('directory', '',
2225
ROOT_PARENT, self.get_root_id())
2226
yield '', 'V', 'directory', root_entry.file_id, root_entry
2227
entries = self._iter_entries_for_dir(from_dir or '')
2228
for path, entry in entries:
2229
yield path, 'V', entry.kind, entry.file_id, entry
2231
def kind(self, file_id):
2232
trans_id = self._transform.trans_id_file_id(file_id)
2233
return self._transform.final_kind(trans_id)
2235
def stored_kind(self, file_id):
2236
trans_id = self._transform.trans_id_file_id(file_id)
2238
return self._transform._new_contents[trans_id]
2240
return self._transform._tree.stored_kind(file_id)
2242
def get_file_mtime(self, file_id, path=None):
2243
"""See Tree.get_file_mtime"""
2244
if not self._content_change(file_id):
2245
return self._transform._tree.get_file_mtime(file_id)
2246
return self._stat_limbo_file(file_id).st_mtime
2248
def _file_size(self, entry, stat_value):
2249
return self.get_file_size(entry.file_id)
2251
def get_file_size(self, file_id):
2252
"""See Tree.get_file_size"""
2253
trans_id = self._transform.trans_id_file_id(file_id)
2254
kind = self._transform.final_kind(trans_id)
2257
if trans_id in self._transform._new_contents:
2258
return self._stat_limbo_file(trans_id=trans_id).st_size
2259
if self.kind(file_id) == 'file':
2260
return self._transform._tree.get_file_size(file_id)
2264
def get_file_sha1(self, file_id, path=None, stat_value=None):
2265
trans_id = self._transform.trans_id_file_id(file_id)
2266
kind = self._transform._new_contents.get(trans_id)
2268
return self._transform._tree.get_file_sha1(file_id)
2270
fileobj = self.get_file(file_id)
2272
return sha_file(fileobj)
2276
def is_executable(self, file_id, path=None):
2279
trans_id = self._transform.trans_id_file_id(file_id)
2281
return self._transform._new_executability[trans_id]
2284
return self._transform._tree.is_executable(file_id, path)
2286
if e.errno == errno.ENOENT:
2289
except errors.NoSuchId:
2292
def has_filename(self, path):
2293
trans_id = self._path2trans_id(path)
2294
if trans_id in self._transform._new_contents:
2296
elif trans_id in self._transform._removed_contents:
2299
return self._transform._tree.has_filename(path)
2301
def path_content_summary(self, path):
2302
trans_id = self._path2trans_id(path)
2303
tt = self._transform
2304
tree_path = tt._tree_id_paths.get(trans_id)
2305
kind = tt._new_contents.get(trans_id)
2307
if tree_path is None or trans_id in tt._removed_contents:
2308
return 'missing', None, None, None
2309
summary = tt._tree.path_content_summary(tree_path)
2310
kind, size, executable, link_or_sha1 = summary
2313
limbo_name = tt._limbo_name(trans_id)
2314
if trans_id in tt._new_reference_revision:
2315
kind = 'tree-reference'
2317
statval = os.lstat(limbo_name)
2318
size = statval.st_size
2319
if not supports_executable():
2322
executable = statval.st_mode & S_IEXEC
2326
if kind == 'symlink':
2327
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2328
executable = tt._new_executability.get(trans_id, executable)
2329
return kind, size, executable, link_or_sha1
2331
def iter_changes(self, from_tree, include_unchanged=False,
2332
specific_files=None, pb=None, extra_trees=None,
2333
require_versioned=True, want_unversioned=False):
2334
"""See InterTree.iter_changes.
2336
This has a fast path that is only used when the from_tree matches
2337
the transform tree, and no fancy options are supplied.
2339
if (from_tree is not self._transform._tree or include_unchanged or
2340
specific_files or want_unversioned):
2341
return tree.InterTree(from_tree, self).iter_changes(
2342
include_unchanged=include_unchanged,
2343
specific_files=specific_files,
2345
extra_trees=extra_trees,
2346
require_versioned=require_versioned,
2347
want_unversioned=want_unversioned)
2348
if want_unversioned:
2349
raise ValueError('want_unversioned is not supported')
2350
return self._transform.iter_changes()
2352
def get_file(self, file_id, path=None):
2353
"""See Tree.get_file"""
2354
if not self._content_change(file_id):
2355
return self._transform._tree.get_file(file_id, path)
2356
trans_id = self._transform.trans_id_file_id(file_id)
2357
name = self._transform._limbo_name(trans_id)
2358
return open(name, 'rb')
2360
def get_file_with_stat(self, file_id, path=None):
2361
return self.get_file(file_id, path), None
2363
def annotate_iter(self, file_id,
2364
default_revision=_mod_revision.CURRENT_REVISION):
2365
changes = self._iter_changes_cache.get(file_id)
2369
changed_content, versioned, kind = (changes[2], changes[3],
2373
get_old = (kind[0] == 'file' and versioned[0])
2375
old_annotation = self._transform._tree.annotate_iter(file_id,
2376
default_revision=default_revision)
2380
return old_annotation
2381
if not changed_content:
2382
return old_annotation
2383
# TODO: This is doing something similar to what WT.annotate_iter is
2384
# doing, however it fails slightly because it doesn't know what
2385
# the *other* revision_id is, so it doesn't know how to give the
2386
# other as the origin for some lines, they all get
2387
# 'default_revision'
2388
# It would be nice to be able to use the new Annotator based
2389
# approach, as well.
2390
return annotate.reannotate([old_annotation],
2391
self.get_file(file_id).readlines(),
2394
def get_symlink_target(self, file_id, path=None):
2395
"""See Tree.get_symlink_target"""
2396
if not self._content_change(file_id):
2397
return self._transform._tree.get_symlink_target(file_id)
2398
trans_id = self._transform.trans_id_file_id(file_id)
2399
name = self._transform._limbo_name(trans_id)
2400
return osutils.readlink(name)
2402
def walkdirs(self, prefix=''):
2403
pending = [self._transform.root]
2404
while len(pending) > 0:
2405
parent_id = pending.pop()
2408
prefix = prefix.rstrip('/')
2409
parent_path = self._final_paths.get_path(parent_id)
2410
parent_file_id = self._transform.final_file_id(parent_id)
2411
for child_id in self._all_children(parent_id):
2412
path_from_root = self._final_paths.get_path(child_id)
2413
basename = self._transform.final_name(child_id)
2414
file_id = self._transform.final_file_id(child_id)
2415
kind = self._transform.final_kind(child_id)
2416
if kind is not None:
2417
versioned_kind = kind
2420
versioned_kind = self._transform._tree.stored_kind(file_id)
2421
if versioned_kind == 'directory':
2422
subdirs.append(child_id)
2423
children.append((path_from_root, basename, kind, None,
2424
file_id, versioned_kind))
2426
if parent_path.startswith(prefix):
2427
yield (parent_path, parent_file_id), children
2428
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2431
def get_parent_ids(self):
2432
return self._parent_ids
2434
def set_parent_ids(self, parent_ids):
2435
self._parent_ids = parent_ids
2437
def get_revision_tree(self, revision_id):
2438
return self._transform._tree.get_revision_tree(revision_id)
2441
def joinpath(parent, child):
2442
"""Join tree-relative paths, handling the tree root specially"""
2443
if parent is None or parent == "":
2446
return pathjoin(parent, child)
2449
class FinalPaths(object):
2450
"""Make path calculation cheap by memoizing paths.
2452
The underlying tree must not be manipulated between calls, or else
2453
the results will likely be incorrect.
2455
def __init__(self, transform):
2456
object.__init__(self)
2457
self._known_paths = {}
2458
self.transform = transform
2460
def _determine_path(self, trans_id):
2461
if (trans_id == self.transform.root or trans_id == ROOT_PARENT):
2463
name = self.transform.final_name(trans_id)
2464
parent_id = self.transform.final_parent(trans_id)
2465
if parent_id == self.transform.root:
2468
return pathjoin(self.get_path(parent_id), name)
2470
def get_path(self, trans_id):
2471
"""Find the final path associated with a trans_id"""
2472
if trans_id not in self._known_paths:
2473
self._known_paths[trans_id] = self._determine_path(trans_id)
2474
return self._known_paths[trans_id]
2476
def get_paths(self, trans_ids):
2477
return [(self.get_path(t), t) for t in trans_ids]
2481
def topology_sorted_ids(tree):
2482
"""Determine the topological order of the ids in a tree"""
2483
file_ids = list(tree)
2484
file_ids.sort(key=tree.id2path)
2488
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2489
delta_from_tree=False):
2490
"""Create working tree for a branch, using a TreeTransform.
2492
This function should be used on empty trees, having a tree root at most.
2493
(see merge and revert functionality for working with existing trees)
2495
Existing files are handled like so:
2497
- Existing bzrdirs take precedence over creating new items. They are
2498
created as '%s.diverted' % name.
2499
- Otherwise, if the content on disk matches the content we are building,
2500
it is silently replaced.
2501
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2503
:param tree: The tree to convert wt into a copy of
2504
:param wt: The working tree that files will be placed into
2505
:param accelerator_tree: A tree which can be used for retrieving file
2506
contents more quickly than tree itself, i.e. a workingtree. tree
2507
will be used for cases where accelerator_tree's content is different.
2508
:param hardlink: If true, hard-link files to accelerator_tree, where
2509
possible. accelerator_tree must implement abspath, i.e. be a
2511
:param delta_from_tree: If true, build_tree may use the input Tree to
2512
generate the inventory delta.
2514
wt.lock_tree_write()
2518
if accelerator_tree is not None:
2519
accelerator_tree.lock_read()
2521
return _build_tree(tree, wt, accelerator_tree, hardlink,
2524
if accelerator_tree is not None:
2525
accelerator_tree.unlock()
2532
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2533
"""See build_tree."""
2534
for num, _unused in enumerate(wt.all_file_ids()):
2535
if num > 0: # more than just a root
2536
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2538
top_pb = ui.ui_factory.nested_progress_bar()
2539
pp = ProgressPhase("Build phase", 2, top_pb)
2540
if tree.inventory.root is not None:
2541
# This is kind of a hack: we should be altering the root
2542
# as part of the regular tree shape diff logic.
2543
# The conditional test here is to avoid doing an
2544
# expensive operation (flush) every time the root id
2545
# is set within the tree, nor setting the root and thus
2546
# marking the tree as dirty, because we use two different
2547
# idioms here: tree interfaces and inventory interfaces.
2548
if wt.get_root_id() != tree.get_root_id():
2549
wt.set_root_id(tree.get_root_id())
2551
tt = TreeTransform(wt)
2555
file_trans_id[wt.get_root_id()] = \
2556
tt.trans_id_tree_file_id(wt.get_root_id())
2557
pb = ui.ui_factory.nested_progress_bar()
2559
deferred_contents = []
2561
total = len(tree.inventory)
2563
precomputed_delta = []
2565
precomputed_delta = None
2566
# Check if tree inventory has content. If so, we populate
2567
# existing_files with the directory content. If there are no
2568
# entries we skip populating existing_files as its not used.
2569
# This improves performance and unncessary work on large
2570
# directory trees. (#501307)
2572
existing_files = set()
2573
for dir, files in wt.walkdirs():
2574
existing_files.update(f[0] for f in files)
2575
for num, (tree_path, entry) in \
2576
enumerate(tree.inventory.iter_entries_by_dir()):
2577
pb.update("Building tree", num - len(deferred_contents), total)
2578
if entry.parent_id is None:
2581
file_id = entry.file_id
2583
precomputed_delta.append((None, tree_path, file_id, entry))
2584
if tree_path in existing_files:
2585
target_path = wt.abspath(tree_path)
2586
kind = file_kind(target_path)
2587
if kind == "directory":
2589
bzrdir.BzrDir.open(target_path)
2590
except errors.NotBranchError:
2594
if (file_id not in divert and
2595
_content_match(tree, entry, file_id, kind,
2597
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2598
if kind == 'directory':
2600
parent_id = file_trans_id[entry.parent_id]
2601
if entry.kind == 'file':
2602
# We *almost* replicate new_by_entry, so that we can defer
2603
# getting the file text, and get them all at once.
2604
trans_id = tt.create_path(entry.name, parent_id)
2605
file_trans_id[file_id] = trans_id
2606
tt.version_file(file_id, trans_id)
2607
executable = tree.is_executable(file_id, tree_path)
2609
tt.set_executability(executable, trans_id)
2610
trans_data = (trans_id, tree_path, entry.text_sha1)
2611
deferred_contents.append((file_id, trans_data))
2613
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2616
new_trans_id = file_trans_id[file_id]
2617
old_parent = tt.trans_id_tree_path(tree_path)
2618
_reparent_children(tt, old_parent, new_trans_id)
2619
offset = num + 1 - len(deferred_contents)
2620
_create_files(tt, tree, deferred_contents, pb, offset,
2621
accelerator_tree, hardlink)
2625
divert_trans = set(file_trans_id[f] for f in divert)
2626
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2627
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2628
if len(raw_conflicts) > 0:
2629
precomputed_delta = None
2630
conflicts = cook_conflicts(raw_conflicts, tt)
2631
for conflict in conflicts:
2632
trace.warning(unicode(conflict))
2634
wt.add_conflicts(conflicts)
2635
except errors.UnsupportedOperation:
2637
result = tt.apply(no_conflicts=True,
2638
precomputed_delta=precomputed_delta)
2645
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2647
total = len(desired_files) + offset
2649
if accelerator_tree is None:
2650
new_desired_files = desired_files
2652
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2653
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2654
in iter if not (c or e[0] != e[1])]
2655
if accelerator_tree.supports_content_filtering():
2656
unchanged = [(f, p) for (f, p) in unchanged
2657
if not accelerator_tree.iter_search_rules([p]).next()]
2658
unchanged = dict(unchanged)
2659
new_desired_files = []
2661
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2662
accelerator_path = unchanged.get(file_id)
2663
if accelerator_path is None:
2664
new_desired_files.append((file_id,
2665
(trans_id, tree_path, text_sha1)))
2667
pb.update('Adding file contents', count + offset, total)
2669
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2672
contents = accelerator_tree.get_file(file_id, accelerator_path)
2673
if wt.supports_content_filtering():
2674
filters = wt._content_filter_stack(tree_path)
2675
contents = filtered_output_bytes(contents, filters,
2676
ContentFilterContext(tree_path, tree))
2678
tt.create_file(contents, trans_id, sha1=text_sha1)
2682
except AttributeError:
2683
# after filtering, contents may no longer be file-like
2687
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2688
tree.iter_files_bytes(new_desired_files)):
2689
if wt.supports_content_filtering():
2690
filters = wt._content_filter_stack(tree_path)
2691
contents = filtered_output_bytes(contents, filters,
2692
ContentFilterContext(tree_path, tree))
2693
tt.create_file(contents, trans_id, sha1=text_sha1)
2694
pb.update('Adding file contents', count + offset, total)
2697
def _reparent_children(tt, old_parent, new_parent):
2698
for child in tt.iter_tree_children(old_parent):
2699
tt.adjust_path(tt.final_name(child), new_parent, child)
2702
def _reparent_transform_children(tt, old_parent, new_parent):
2703
by_parent = tt.by_parent()
2704
for child in by_parent[old_parent]:
2705
tt.adjust_path(tt.final_name(child), new_parent, child)
2706
return by_parent[old_parent]
2709
def _content_match(tree, entry, file_id, kind, target_path):
2710
if entry.kind != kind:
2712
if entry.kind == "directory":
2714
if entry.kind == "file":
2715
f = file(target_path, 'rb')
2717
if tree.get_file_text(file_id) == f.read():
2721
elif entry.kind == "symlink":
2722
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2727
def resolve_checkout(tt, conflicts, divert):
2728
new_conflicts = set()
2729
for c_type, conflict in ((c[0], c) for c in conflicts):
2730
# Anything but a 'duplicate' would indicate programmer error
2731
if c_type != 'duplicate':
2732
raise AssertionError(c_type)
2733
# Now figure out which is new and which is old
2734
if tt.new_contents(conflict[1]):
2735
new_file = conflict[1]
2736
old_file = conflict[2]
2738
new_file = conflict[2]
2739
old_file = conflict[1]
2741
# We should only get here if the conflict wasn't completely
2743
final_parent = tt.final_parent(old_file)
2744
if new_file in divert:
2745
new_name = tt.final_name(old_file)+'.diverted'
2746
tt.adjust_path(new_name, final_parent, new_file)
2747
new_conflicts.add((c_type, 'Diverted to',
2748
new_file, old_file))
2750
new_name = tt.final_name(old_file)+'.moved'
2751
tt.adjust_path(new_name, final_parent, old_file)
2752
new_conflicts.add((c_type, 'Moved existing file to',
2753
old_file, new_file))
2754
return new_conflicts
2757
def new_by_entry(tt, entry, parent_id, tree):
2758
"""Create a new file according to its inventory entry"""
2762
contents = tree.get_file(entry.file_id).readlines()
2763
executable = tree.is_executable(entry.file_id)
2764
return tt.new_file(name, parent_id, contents, entry.file_id,
2766
elif kind in ('directory', 'tree-reference'):
2767
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2768
if kind == 'tree-reference':
2769
tt.set_tree_reference(entry.reference_revision, trans_id)
2771
elif kind == 'symlink':
2772
target = tree.get_symlink_target(entry.file_id)
2773
return tt.new_symlink(name, parent_id, target, entry.file_id)
2775
raise errors.BadFileKindError(name, kind)
2778
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2779
filter_tree_path=None):
2780
"""Create new file contents according to tree contents.
2782
:param filter_tree_path: the tree path to use to lookup
2783
content filters to apply to the bytes output in the working tree.
2784
This only applies if the working tree supports content filtering.
2786
kind = tree.kind(file_id)
2787
if kind == 'directory':
2788
tt.create_directory(trans_id)
2789
elif kind == "file":
2791
tree_file = tree.get_file(file_id)
2793
bytes = tree_file.readlines()
2797
if wt.supports_content_filtering() and filter_tree_path is not None:
2798
filters = wt._content_filter_stack(filter_tree_path)
2799
bytes = filtered_output_bytes(bytes, filters,
2800
ContentFilterContext(filter_tree_path, tree))
2801
tt.create_file(bytes, trans_id)
2802
elif kind == "symlink":
2803
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
2805
raise AssertionError('Unknown kind %r' % kind)
2808
def create_entry_executability(tt, entry, trans_id):
2809
"""Set the executability of a trans_id according to an inventory entry"""
2810
if entry.kind == "file":
2811
tt.set_executability(entry.executable, trans_id)
2814
@deprecated_function(deprecated_in((2, 3, 0)))
2815
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2816
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2819
@deprecated_function(deprecated_in((2, 3, 0)))
2820
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2821
"""Produce a backup-style name that appears to be available"""
2825
yield "%s.~%d~" % (name, counter)
2827
for new_name in name_gen():
2828
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2832
def _entry_changes(file_id, entry, working_tree):
2833
"""Determine in which ways the inventory entry has changed.
2835
Returns booleans: has_contents, content_mod, meta_mod
2836
has_contents means there are currently contents, but they differ
2837
contents_mod means contents need to be modified
2838
meta_mod means the metadata needs to be modified
2840
cur_entry = working_tree.inventory[file_id]
2842
working_kind = working_tree.kind(file_id)
2845
has_contents = False
2848
if has_contents is True:
2849
if entry.kind != working_kind:
2850
contents_mod, meta_mod = True, False
2852
cur_entry._read_tree_state(working_tree.id2path(file_id),
2854
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2855
cur_entry._forget_tree_state()
2856
return has_contents, contents_mod, meta_mod
2859
def revert(working_tree, target_tree, filenames, backups=False,
2860
pb=None, change_reporter=None):
2861
"""Revert a working tree's contents to those of a target tree."""
2862
target_tree.lock_read()
2863
pb = ui.ui_factory.nested_progress_bar()
2864
tt = TreeTransform(working_tree, pb)
2866
pp = ProgressPhase("Revert phase", 3, pb)
2867
conflicts, merge_modified = _prepare_revert_transform(
2868
working_tree, target_tree, tt, filenames, backups, pp)
2870
change_reporter = delta._ChangeReporter(
2871
unversioned_filter=working_tree.is_ignored)
2872
delta.report_changes(tt.iter_changes(), change_reporter)
2873
for conflict in conflicts:
2874
trace.warning(unicode(conflict))
2877
working_tree.set_merge_modified(merge_modified)
2879
target_tree.unlock()
2885
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2886
backups, pp, basis_tree=None,
2887
merge_modified=None):
2888
child_pb = ui.ui_factory.nested_progress_bar()
2890
if merge_modified is None:
2891
merge_modified = working_tree.merge_modified()
2892
merge_modified = _alter_files(working_tree, target_tree, tt,
2893
child_pb, filenames, backups,
2894
merge_modified, basis_tree)
2897
child_pb = ui.ui_factory.nested_progress_bar()
2899
raw_conflicts = resolve_conflicts(tt, child_pb,
2900
lambda t, c: conflict_pass(t, c, target_tree))
2903
conflicts = cook_conflicts(raw_conflicts, tt)
2904
return conflicts, merge_modified
2907
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2908
backups, merge_modified, basis_tree=None):
2909
if basis_tree is not None:
2910
basis_tree.lock_read()
2911
# We ask the working_tree for its changes relative to the target, rather
2912
# than the target changes relative to the working tree. Because WT4 has an
2913
# optimizer to compare itself to a target, but no optimizer for the
2915
change_list = working_tree.iter_changes(target_tree,
2916
specific_files=specific_files, pb=pb)
2917
if target_tree.get_root_id() is None:
2923
for id_num, (file_id, path, changed_content, versioned, parent, name,
2924
kind, executable) in enumerate(change_list):
2925
target_path, wt_path = path
2926
target_versioned, wt_versioned = versioned
2927
target_parent, wt_parent = parent
2928
target_name, wt_name = name
2929
target_kind, wt_kind = kind
2930
target_executable, wt_executable = executable
2931
if skip_root and wt_parent is None:
2933
trans_id = tt.trans_id_file_id(file_id)
2936
keep_content = False
2937
if wt_kind == 'file' and (backups or target_kind is None):
2938
wt_sha1 = working_tree.get_file_sha1(file_id)
2939
if merge_modified.get(file_id) != wt_sha1:
2940
# acquire the basis tree lazily to prevent the
2941
# expense of accessing it when it's not needed ?
2942
# (Guessing, RBC, 200702)
2943
if basis_tree is None:
2944
basis_tree = working_tree.basis_tree()
2945
basis_tree.lock_read()
2946
if basis_tree.has_id(file_id):
2947
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2949
elif target_kind is None and not target_versioned:
2951
if wt_kind is not None:
2952
if not keep_content:
2953
tt.delete_contents(trans_id)
2954
elif target_kind is not None:
2955
parent_trans_id = tt.trans_id_file_id(wt_parent)
2956
backup_name = tt._available_backup_name(
2957
wt_name, parent_trans_id)
2958
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2959
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2960
if wt_versioned and target_versioned:
2961
tt.unversion_file(trans_id)
2962
tt.version_file(file_id, new_trans_id)
2963
# New contents should have the same unix perms as old
2966
trans_id = new_trans_id
2967
if target_kind in ('directory', 'tree-reference'):
2968
tt.create_directory(trans_id)
2969
if target_kind == 'tree-reference':
2970
revision = target_tree.get_reference_revision(file_id,
2972
tt.set_tree_reference(revision, trans_id)
2973
elif target_kind == 'symlink':
2974
tt.create_symlink(target_tree.get_symlink_target(file_id),
2976
elif target_kind == 'file':
2977
deferred_files.append((file_id, (trans_id, mode_id)))
2978
if basis_tree is None:
2979
basis_tree = working_tree.basis_tree()
2980
basis_tree.lock_read()
2981
new_sha1 = target_tree.get_file_sha1(file_id)
2982
if (basis_tree.has_id(file_id) and
2983
new_sha1 == basis_tree.get_file_sha1(file_id)):
2984
if file_id in merge_modified:
2985
del merge_modified[file_id]
2987
merge_modified[file_id] = new_sha1
2989
# preserve the execute bit when backing up
2990
if keep_content and wt_executable == target_executable:
2991
tt.set_executability(target_executable, trans_id)
2992
elif target_kind is not None:
2993
raise AssertionError(target_kind)
2994
if not wt_versioned and target_versioned:
2995
tt.version_file(file_id, trans_id)
2996
if wt_versioned and not target_versioned:
2997
tt.unversion_file(trans_id)
2998
if (target_name is not None and
2999
(wt_name != target_name or wt_parent != target_parent)):
3000
if target_name == '' and target_parent is None:
3001
parent_trans = ROOT_PARENT
3003
parent_trans = tt.trans_id_file_id(target_parent)
3004
if wt_parent is None and wt_versioned:
3005
tt.adjust_root_path(target_name, parent_trans)
3007
tt.adjust_path(target_name, parent_trans, trans_id)
3008
if wt_executable != target_executable and target_kind == "file":
3009
tt.set_executability(target_executable, trans_id)
3010
if working_tree.supports_content_filtering():
3011
for index, ((trans_id, mode_id), bytes) in enumerate(
3012
target_tree.iter_files_bytes(deferred_files)):
3013
file_id = deferred_files[index][0]
3014
# We're reverting a tree to the target tree so using the
3015
# target tree to find the file path seems the best choice
3016
# here IMO - Ian C 27/Oct/2009
3017
filter_tree_path = target_tree.id2path(file_id)
3018
filters = working_tree._content_filter_stack(filter_tree_path)
3019
bytes = filtered_output_bytes(bytes, filters,
3020
ContentFilterContext(filter_tree_path, working_tree))
3021
tt.create_file(bytes, trans_id, mode_id)
3023
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3025
tt.create_file(bytes, trans_id, mode_id)
3026
tt.fixup_new_roots()
3028
if basis_tree is not None:
3030
return merge_modified
3033
def resolve_conflicts(tt, pb=None, pass_func=None):
3034
"""Make many conflict-resolution attempts, but die if they fail"""
3035
if pass_func is None:
3036
pass_func = conflict_pass
3037
new_conflicts = set()
3038
pb = ui.ui_factory.nested_progress_bar()
3041
pb.update('Resolution pass', n+1, 10)
3042
conflicts = tt.find_conflicts()
3043
if len(conflicts) == 0:
3044
return new_conflicts
3045
new_conflicts.update(pass_func(tt, conflicts))
3046
raise MalformedTransform(conflicts=conflicts)
3051
def conflict_pass(tt, conflicts, path_tree=None):
3052
"""Resolve some classes of conflicts.
3054
:param tt: The transform to resolve conflicts in
3055
:param conflicts: The conflicts to resolve
3056
:param path_tree: A Tree to get supplemental paths from
3058
new_conflicts = set()
3059
for c_type, conflict in ((c[0], c) for c in conflicts):
3060
if c_type == 'duplicate id':
3061
tt.unversion_file(conflict[1])
3062
new_conflicts.add((c_type, 'Unversioned existing file',
3063
conflict[1], conflict[2], ))
3064
elif c_type == 'duplicate':
3065
# files that were renamed take precedence
3066
final_parent = tt.final_parent(conflict[1])
3067
if tt.path_changed(conflict[1]):
3068
existing_file, new_file = conflict[2], conflict[1]
3070
existing_file, new_file = conflict[1], conflict[2]
3071
new_name = tt.final_name(existing_file)+'.moved'
3072
tt.adjust_path(new_name, final_parent, existing_file)
3073
new_conflicts.add((c_type, 'Moved existing file to',
3074
existing_file, new_file))
3075
elif c_type == 'parent loop':
3076
# break the loop by undoing one of the ops that caused the loop
3078
while not tt.path_changed(cur):
3079
cur = tt.final_parent(cur)
3080
new_conflicts.add((c_type, 'Cancelled move', cur,
3081
tt.final_parent(cur),))
3082
tt.adjust_path(tt.final_name(cur), tt.get_tree_parent(cur), cur)
3084
elif c_type == 'missing parent':
3085
trans_id = conflict[1]
3086
if trans_id in tt._removed_contents:
3087
cancel_deletion = True
3088
orphans = tt._get_potential_orphans(trans_id)
3090
cancel_deletion = False
3091
# All children are orphans
3094
tt.new_orphan(o, trans_id)
3095
except OrphaningError:
3096
# Something bad happened so we cancel the directory
3097
# deletion which will leave it in place with a
3098
# conflict. The user can deal with it from there.
3099
# Note that this also catch the case where we don't
3100
# want to create orphans and leave the directory in
3102
cancel_deletion = True
3105
# Cancel the directory deletion
3106
tt.cancel_deletion(trans_id)
3107
new_conflicts.add(('deleting parent', 'Not deleting',
3112
tt.final_name(trans_id)
3114
if path_tree is not None:
3115
file_id = tt.final_file_id(trans_id)
3117
file_id = tt.inactive_file_id(trans_id)
3118
_, entry = path_tree.iter_entries_by_dir(
3120
# special-case the other tree root (move its
3121
# children to current root)
3122
if entry.parent_id is None:
3124
moved = _reparent_transform_children(
3125
tt, trans_id, tt.root)
3127
new_conflicts.add((c_type, 'Moved to root',
3130
parent_trans_id = tt.trans_id_file_id(
3132
tt.adjust_path(entry.name, parent_trans_id,
3135
tt.create_directory(trans_id)
3136
new_conflicts.add((c_type, 'Created directory', trans_id))
3137
elif c_type == 'unversioned parent':
3138
file_id = tt.inactive_file_id(conflict[1])
3139
# special-case the other tree root (move its children instead)
3140
if path_tree and path_tree.has_id(file_id):
3141
if path_tree.path2id('') == file_id:
3142
# This is the root entry, skip it
3144
tt.version_file(file_id, conflict[1])
3145
new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
3146
elif c_type == 'non-directory parent':
3147
parent_id = conflict[1]
3148
parent_parent = tt.final_parent(parent_id)
3149
parent_name = tt.final_name(parent_id)
3150
parent_file_id = tt.final_file_id(parent_id)
3151
new_parent_id = tt.new_directory(parent_name + '.new',
3152
parent_parent, parent_file_id)
3153
_reparent_transform_children(tt, parent_id, new_parent_id)
3154
if parent_file_id is not None:
3155
tt.unversion_file(parent_id)
3156
new_conflicts.add((c_type, 'Created directory', new_parent_id))
3157
elif c_type == 'versioning no contents':
3158
tt.cancel_versioning(conflict[1])
3159
return new_conflicts
3162
def cook_conflicts(raw_conflicts, tt):
3163
"""Generate a list of cooked conflicts, sorted by file path"""
3164
conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3165
return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
3168
def iter_cook_conflicts(raw_conflicts, tt):
3170
for conflict in raw_conflicts:
3171
c_type = conflict[0]
3172
action = conflict[1]
3173
modified_path = fp.get_path(conflict[2])
3174
modified_id = tt.final_file_id(conflict[2])
3175
if len(conflict) == 3:
3176
yield conflicts.Conflict.factory(
3177
c_type, action=action, path=modified_path, file_id=modified_id)
3180
conflicting_path = fp.get_path(conflict[3])
3181
conflicting_id = tt.final_file_id(conflict[3])
3182
yield conflicts.Conflict.factory(
3183
c_type, action=action, path=modified_path,
3184
file_id=modified_id,
3185
conflict_path=conflicting_path,
3186
conflict_file_id=conflicting_id)
3189
class _FileMover(object):
3190
"""Moves and deletes files for TreeTransform, tracking operations"""
3193
self.past_renames = []
3194
self.pending_deletions = []
3196
def rename(self, from_, to):
3197
"""Rename a file from one path to another."""
3199
os.rename(from_, to)
3201
if e.errno in (errno.EEXIST, errno.ENOTEMPTY):
3202
raise errors.FileExists(to, str(e))
3203
# normal OSError doesn't include filenames so it's hard to see where
3204
# the problem is, see https://bugs.launchpad.net/bzr/+bug/491763
3205
raise errors.TransformRenameFailed(from_, to, str(e), e.errno)
3206
self.past_renames.append((from_, to))
3208
def pre_delete(self, from_, to):
3209
"""Rename a file out of the way and mark it for deletion.
3211
Unlike os.unlink, this works equally well for files and directories.
3212
:param from_: The current file path
3213
:param to: A temporary path for the file
3215
self.rename(from_, to)
3216
self.pending_deletions.append(to)
3219
"""Reverse all renames that have been performed"""
3220
for from_, to in reversed(self.past_renames):
3222
os.rename(to, from_)
3224
raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
3225
# after rollback, don't reuse _FileMover
3227
pending_deletions = None
3229
def apply_deletions(self):
3230
"""Apply all marked deletions"""
3231
for path in self.pending_deletions:
3233
# after apply_deletions, don't reuse _FileMover
3235
pending_deletions = None