~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

  • Committer: Kit Randel
  • Date: 2014-12-12 03:59:25 UTC
  • mto: This revision was merged to the branch mainline in revision 6602.
  • Revision ID: kit.randel@canonical.com-20141212035925-5nwyz5det0wtecce
fixed dirty_head logic in iter_file_patch

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
 
17
from __future__ import absolute_import
 
18
 
17
19
import os
18
20
import errno
19
21
from stat import S_ISREG, S_IEXEC
20
22
import time
21
23
 
22
 
from bzrlib.lazy_import import lazy_import
23
 
lazy_import(globals(), """
 
24
from bzrlib import (
 
25
    config as _mod_config,
 
26
    errors,
 
27
    lazy_import,
 
28
    registry,
 
29
    trace,
 
30
    tree,
 
31
    )
 
32
lazy_import.lazy_import(globals(), """
24
33
from bzrlib import (
25
34
    annotate,
26
35
    bencode,
27
 
    bzrdir,
 
36
    controldir,
28
37
    commit,
 
38
    conflicts,
29
39
    delta,
30
 
    errors,
31
40
    inventory,
32
41
    multiparent,
33
42
    osutils,
34
43
    revision as _mod_revision,
35
44
    ui,
 
45
    urlutils,
36
46
    )
 
47
from bzrlib.i18n import gettext
37
48
""")
38
 
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
 
49
from bzrlib.errors import (DuplicateKey, MalformedTransform,
39
50
                           ReusingTransform, CantMoveRoot,
40
 
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
 
51
                           ImmortalLimbo, NoFinalPath,
41
52
                           UnableCreateSymlink)
42
53
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
43
 
from bzrlib.inventory import InventoryEntry
 
54
from bzrlib.mutabletree import MutableTree
44
55
from bzrlib.osutils import (
45
56
    delete_any,
46
57
    file_kind,
47
58
    has_symlinks,
48
 
    lexists,
49
59
    pathjoin,
50
60
    sha_file,
51
61
    splitpath,
52
 
    supports_executable,
53
 
)
 
62
    )
54
63
from bzrlib.progress import ProgressPhase
55
64
from bzrlib.symbol_versioning import (
56
 
        deprecated_function,
57
 
        deprecated_in,
58
 
        )
59
 
from bzrlib.trace import mutter, warning
60
 
from bzrlib import tree
61
 
import bzrlib.ui
62
 
import bzrlib.urlutils as urlutils
 
65
    deprecated_function,
 
66
    deprecated_in,
 
67
    deprecated_method,
 
68
    )
63
69
 
64
70
 
65
71
ROOT_PARENT = "root-parent"
66
72
 
67
 
 
68
73
def unique_add(map, key, value):
69
74
    if key in map:
70
75
        raise DuplicateKey(key=key)
71
76
    map[key] = value
72
77
 
73
78
 
 
79
 
74
80
class _TransformResults(object):
75
81
    def __init__(self, modified_paths, rename_count):
76
82
        object.__init__(self)
100
106
        self._new_parent = {}
101
107
        # mapping of trans_id with new contents -> new file_kind
102
108
        self._new_contents = {}
 
109
        # mapping of trans_id => (sha1 of content, stat_value)
 
110
        self._observed_sha1s = {}
103
111
        # Set of trans_ids whose contents will be removed
104
112
        self._removed_contents = set()
105
113
        # Mapping of trans_id -> new execute-bit value
124
132
            self._new_root = self.trans_id_tree_file_id(root_id)
125
133
        else:
126
134
            self._new_root = None
127
 
        # Indictor of whether the transform has been applied
 
135
        # Indicator of whether the transform has been applied
128
136
        self._done = False
129
137
        # A progress bar
130
138
        self._pb = pb
133
141
        # A counter of how many files have been renamed
134
142
        self.rename_count = 0
135
143
 
 
144
    def __enter__(self):
 
145
        """Support Context Manager API."""
 
146
        return self
 
147
 
 
148
    def __exit__(self, exc_type, exc_val, exc_tb):
 
149
        """Support Context Manager API."""
 
150
        self.finalize()
 
151
 
136
152
    def finalize(self):
137
153
        """Release the working tree lock, if held.
138
154
 
141
157
        """
142
158
        if self._tree is None:
143
159
            return
 
160
        for hook in MutableTree.hooks['post_transform']:
 
161
            hook(self._tree, self)
144
162
        self._tree.unlock()
145
163
        self._tree = None
146
164
 
213
231
        This means that the old root trans-id becomes obsolete, so it is
214
232
        recommended only to invoke this after the root trans-id has become
215
233
        irrelevant.
 
234
 
216
235
        """
217
 
        new_roots = [k for k, v in self._new_parent.iteritems() if v is
 
236
        new_roots = [k for k, v in self._new_parent.iteritems() if v ==
218
237
                     ROOT_PARENT]
219
238
        if len(new_roots) < 1:
220
239
            return
224
243
            self._new_root = new_roots[0]
225
244
            return
226
245
        old_new_root = new_roots[0]
227
 
        # TODO: What to do if a old_new_root is present, but self._new_root is
228
 
        #       not listed as being removed? This code explicitly unversions
229
 
        #       the old root and versions it with the new file_id. Though that
230
 
        #       seems like an incomplete delta
231
 
 
232
246
        # unversion the new root's directory.
233
 
        file_id = self.final_file_id(old_new_root)
 
247
        if self.final_kind(self._new_root) is None:
 
248
            file_id = self.final_file_id(old_new_root)
 
249
        else:
 
250
            file_id = self.final_file_id(self._new_root)
234
251
        if old_new_root in self._new_id:
235
252
            self.cancel_versioning(old_new_root)
236
253
        else:
240
257
        if (self.tree_file_id(self._new_root) is not None and
241
258
            self._new_root not in self._removed_id):
242
259
            self.unversion_file(self._new_root)
243
 
        self.version_file(file_id, self._new_root)
 
260
        if file_id is not None:
 
261
            self.version_file(file_id, self._new_root)
244
262
 
245
263
        # Now move children of new root into old root directory.
246
264
        # Ensure all children are registered with the transaction, but don't
380
398
        return sorted(FinalPaths(self).get_paths(new_ids))
381
399
 
382
400
    def _inventory_altered(self):
383
 
        """Get the trans_ids and paths of files needing new inv entries."""
384
 
        new_ids = set()
385
 
        for id_set in [self._new_name, self._new_parent, self._new_id,
 
401
        """Determine which trans_ids need new Inventory entries.
 
402
 
 
403
        An new entry is needed when anything that would be reflected by an
 
404
        inventory entry changes, including file name, file_id, parent file_id,
 
405
        file kind, and the execute bit.
 
406
 
 
407
        Some care is taken to return entries with real changes, not cases
 
408
        where the value is deleted and then restored to its original value,
 
409
        but some actually unchanged values may be returned.
 
410
 
 
411
        :returns: A list of (path, trans_id) for all items requiring an
 
412
            inventory change. Ordered by path.
 
413
        """
 
414
        changed_ids = set()
 
415
        # Find entries whose file_ids are new (or changed).
 
416
        new_file_id = set(t for t in self._new_id
 
417
                          if self._new_id[t] != self.tree_file_id(t))
 
418
        for id_set in [self._new_name, self._new_parent, new_file_id,
386
419
                       self._new_executability]:
387
 
            new_ids.update(id_set)
 
420
            changed_ids.update(id_set)
 
421
        # removing implies a kind change
388
422
        changed_kind = set(self._removed_contents)
 
423
        # so does adding
389
424
        changed_kind.intersection_update(self._new_contents)
390
 
        changed_kind.difference_update(new_ids)
 
425
        # Ignore entries that are already known to have changed.
 
426
        changed_kind.difference_update(changed_ids)
 
427
        #  to keep only the truly changed ones
391
428
        changed_kind = (t for t in changed_kind
392
429
                        if self.tree_kind(t) != self.final_kind(t))
393
 
        new_ids.update(changed_kind)
394
 
        return sorted(FinalPaths(self).get_paths(new_ids))
 
430
        # all kind changes will alter the inventory
 
431
        changed_ids.update(changed_kind)
 
432
        # To find entries with changed parent_ids, find parents which existed,
 
433
        # but changed file_id.
 
434
        changed_file_id = set(t for t in new_file_id if t in self._removed_id)
 
435
        # Now add all their children to the set.
 
436
        for parent_trans_id in new_file_id:
 
437
            changed_ids.update(self.iter_tree_children(parent_trans_id))
 
438
        return sorted(FinalPaths(self).get_paths(changed_ids))
395
439
 
396
440
    def final_kind(self, trans_id):
397
441
        """Determine the final file kind, after any changes applied.
522
566
        for trans_id in self._removed_id:
523
567
            file_id = self.tree_file_id(trans_id)
524
568
            if file_id is not None:
525
 
                if self._tree.inventory[file_id].kind == 'directory':
 
569
                if self._tree.stored_kind(file_id) == 'directory':
526
570
                    parents.append(trans_id)
527
571
            elif self.tree_kind(trans_id) == 'directory':
528
572
                parents.append(trans_id)
531
575
            # ensure that all children are registered with the transaction
532
576
            list(self.iter_tree_children(parent_id))
533
577
 
534
 
    def has_named_child(self, by_parent, parent_id, name):
535
 
        try:
536
 
            children = by_parent[parent_id]
537
 
        except KeyError:
538
 
            children = []
539
 
        for child in children:
 
578
    def _has_named_child(self, name, parent_id, known_children):
 
579
        """Does a parent already have a name child.
 
580
 
 
581
        :param name: The searched for name.
 
582
 
 
583
        :param parent_id: The parent for which the check is made.
 
584
 
 
585
        :param known_children: The already known children. This should have
 
586
            been recently obtained from `self.by_parent.get(parent_id)`
 
587
            (or will be if None is passed).
 
588
        """
 
589
        if known_children is None:
 
590
            known_children = self.by_parent().get(parent_id, [])
 
591
        for child in known_children:
540
592
            if self.final_name(child) == name:
541
593
                return True
542
 
        try:
543
 
            path = self._tree_id_paths[parent_id]
544
 
        except KeyError:
 
594
        parent_path = self._tree_id_paths.get(parent_id, None)
 
595
        if parent_path is None:
 
596
            # No parent... no children
545
597
            return False
546
 
        childpath = joinpath(path, name)
547
 
        child_id = self._tree_path_ids.get(childpath)
 
598
        child_path = joinpath(parent_path, name)
 
599
        child_id = self._tree_path_ids.get(child_path, None)
548
600
        if child_id is None:
549
 
            return lexists(self._tree.abspath(childpath))
 
601
            # Not known by the tree transform yet, check the filesystem
 
602
            return osutils.lexists(self._tree.abspath(child_path))
550
603
        else:
551
 
            if self.final_parent(child_id) != parent_id:
552
 
                return False
553
 
            if child_id in self._removed_contents:
554
 
                # XXX What about dangling file-ids?
555
 
                return False
556
 
            else:
557
 
                return True
 
604
            raise AssertionError('child_id is missing: %s, %s, %s'
 
605
                                 % (name, parent_id, child_id))
 
606
 
 
607
    def _available_backup_name(self, name, target_id):
 
608
        """Find an available backup name.
 
609
 
 
610
        :param name: The basename of the file.
 
611
 
 
612
        :param target_id: The directory trans_id where the backup should 
 
613
            be placed.
 
614
        """
 
615
        known_children = self.by_parent().get(target_id, [])
 
616
        return osutils.available_backup_name(
 
617
            name,
 
618
            lambda base: self._has_named_child(
 
619
                base, target_id, known_children))
558
620
 
559
621
    def _parent_loops(self):
560
622
        """No entry should be its own ancestor"""
562
624
        for trans_id in self._new_parent:
563
625
            seen = set()
564
626
            parent_id = trans_id
565
 
            while parent_id is not ROOT_PARENT:
 
627
            while parent_id != ROOT_PARENT:
566
628
                seen.add(parent_id)
567
629
                try:
568
630
                    parent_id = self.final_parent(parent_id)
578
640
        """If parent directories are versioned, children must be versioned."""
579
641
        conflicts = []
580
642
        for parent_id, children in by_parent.iteritems():
581
 
            if parent_id is ROOT_PARENT:
 
643
            if parent_id == ROOT_PARENT:
582
644
                continue
583
645
            if self.final_file_id(parent_id) is not None:
584
646
                continue
599
661
            if kind is None:
600
662
                conflicts.append(('versioning no contents', trans_id))
601
663
                continue
602
 
            if not InventoryEntry.versionable_kind(kind):
 
664
            if not inventory.InventoryEntry.versionable_kind(kind):
603
665
                conflicts.append(('versioning bad kind', trans_id, kind))
604
666
        return conflicts
605
667
 
637
699
        if (self._new_name, self._new_parent) == ({}, {}):
638
700
            return conflicts
639
701
        for children in by_parent.itervalues():
640
 
            name_ids = [(self.final_name(t), t) for t in children]
641
 
            if not self._case_sensitive_target:
642
 
                name_ids = [(n.lower(), t) for n, t in name_ids]
 
702
            name_ids = []
 
703
            for child_tid in children:
 
704
                name = self.final_name(child_tid)
 
705
                if name is not None:
 
706
                    # Keep children only if they still exist in the end
 
707
                    if not self._case_sensitive_target:
 
708
                        name = name.lower()
 
709
                    name_ids.append((name, child_tid))
643
710
            name_ids.sort()
644
711
            last_name = None
645
712
            last_trans_id = None
669
736
        return conflicts
670
737
 
671
738
    def _parent_type_conflicts(self, by_parent):
672
 
        """parents must have directory 'contents'."""
 
739
        """Children must have a directory parent"""
673
740
        conflicts = []
674
741
        for parent_id, children in by_parent.iteritems():
675
 
            if parent_id is ROOT_PARENT:
676
 
                continue
677
 
            if not self._any_contents(children):
678
 
                continue
 
742
            if parent_id == ROOT_PARENT:
 
743
                continue
 
744
            no_children = True
 
745
            for child_id in children:
 
746
                if self.final_kind(child_id) is not None:
 
747
                    no_children = False
 
748
                    break
 
749
            if no_children:
 
750
                continue
 
751
            # There is at least a child, so we need an existing directory to
 
752
            # contain it.
679
753
            kind = self.final_kind(parent_id)
680
754
            if kind is None:
 
755
                # The directory will be deleted
681
756
                conflicts.append(('missing parent', parent_id))
682
757
            elif kind != "directory":
 
758
                # Meh, we need a *directory* to put something in it
683
759
                conflicts.append(('non-directory parent', parent_id))
684
760
        return conflicts
685
761
 
686
 
    def _any_contents(self, trans_ids):
687
 
        """Return true if any of the trans_ids, will have contents."""
688
 
        for trans_id in trans_ids:
689
 
            if self.final_kind(trans_id) is not None:
690
 
                return True
691
 
        return False
692
 
 
693
762
    def _set_executability(self, path, trans_id):
694
763
        """Set the executability of versioned files """
695
 
        if supports_executable():
 
764
        if self._tree._supports_executable():
696
765
            new_executability = self._new_executability[trans_id]
697
766
            abspath = self._tree.abspath(path)
698
767
            current_mode = os.stat(abspath).st_mode
707
776
                    to_mode |= 0010 & ~umask
708
777
            else:
709
778
                to_mode = current_mode & ~0111
710
 
            os.chmod(abspath, to_mode)
 
779
            osutils.chmod_if_possible(abspath, to_mode)
711
780
 
712
781
    def _new_entry(self, name, parent_id, file_id):
713
782
        """Helper function to create a new filesystem entry."""
717
786
        return trans_id
718
787
 
719
788
    def new_file(self, name, parent_id, contents, file_id=None,
720
 
                 executable=None):
 
789
                 executable=None, sha1=None):
721
790
        """Convenience method to create files.
722
791
 
723
792
        name is the name of the file to create.
730
799
        trans_id = self._new_entry(name, parent_id, file_id)
731
800
        # TODO: rather than scheduling a set_executable call,
732
801
        # have create_file create the file with the right mode.
733
 
        self.create_file(contents, trans_id)
 
802
        self.create_file(contents, trans_id, sha1=sha1)
734
803
        if executable is not None:
735
804
            self.set_executability(executable, trans_id)
736
805
        return trans_id
759
828
        self.create_symlink(target, trans_id)
760
829
        return trans_id
761
830
 
 
831
    def new_orphan(self, trans_id, parent_id):
 
832
        """Schedule an item to be orphaned.
 
833
 
 
834
        When a directory is about to be removed, its children, if they are not
 
835
        versioned are moved out of the way: they don't have a parent anymore.
 
836
 
 
837
        :param trans_id: The trans_id of the existing item.
 
838
        :param parent_id: The parent trans_id of the item.
 
839
        """
 
840
        raise NotImplementedError(self.new_orphan)
 
841
 
 
842
    def _get_potential_orphans(self, dir_id):
 
843
        """Find the potential orphans in a directory.
 
844
 
 
845
        A directory can't be safely deleted if there are versioned files in it.
 
846
        If all the contained files are unversioned then they can be orphaned.
 
847
 
 
848
        The 'None' return value means that the directory contains at least one
 
849
        versioned file and should not be deleted.
 
850
 
 
851
        :param dir_id: The directory trans id.
 
852
 
 
853
        :return: A list of the orphan trans ids or None if at least one
 
854
             versioned file is present.
 
855
        """
 
856
        orphans = []
 
857
        # Find the potential orphans, stop if one item should be kept
 
858
        for child_tid in self.by_parent()[dir_id]:
 
859
            if child_tid in self._removed_contents:
 
860
                # The child is removed as part of the transform. Since it was
 
861
                # versioned before, it's not an orphan
 
862
                continue
 
863
            elif self.final_file_id(child_tid) is None:
 
864
                # The child is not versioned
 
865
                orphans.append(child_tid)
 
866
            else:
 
867
                # We have a versioned file here, searching for orphans is
 
868
                # meaningless.
 
869
                orphans = None
 
870
                break
 
871
        return orphans
 
872
 
762
873
    def _affected_ids(self):
763
874
        """Return the set of transform ids affected by the transform"""
764
875
        trans_ids = set(self._removed_id)
1076
1187
        self._deletiondir = None
1077
1188
        # A mapping of transform ids to their limbo filename
1078
1189
        self._limbo_files = {}
 
1190
        self._possibly_stale_limbo_files = set()
1079
1191
        # A mapping of transform ids to a set of the transform ids of children
1080
1192
        # that their limbo directory has
1081
1193
        self._limbo_children = {}
1094
1206
        if self._tree is None:
1095
1207
            return
1096
1208
        try:
1097
 
            entries = [(self._limbo_name(t), t, k) for t, k in
1098
 
                       self._new_contents.iteritems()]
1099
 
            entries.sort(reverse=True)
1100
 
            for path, trans_id, kind in entries:
1101
 
                delete_any(path)
 
1209
            limbo_paths = self._limbo_files.values() + list(
 
1210
                self._possibly_stale_limbo_files)
 
1211
            limbo_paths = sorted(limbo_paths, reverse=True)
 
1212
            for path in limbo_paths:
 
1213
                try:
 
1214
                    delete_any(path)
 
1215
                except OSError, e:
 
1216
                    if e.errno != errno.ENOENT:
 
1217
                        raise
 
1218
                    # XXX: warn? perhaps we just got interrupted at an
 
1219
                    # inconvenient moment, but perhaps files are disappearing
 
1220
                    # from under us?
1102
1221
            try:
1103
1222
                delete_any(self._limbodir)
1104
1223
            except OSError:
1112
1231
        finally:
1113
1232
            TreeTransformBase.finalize(self)
1114
1233
 
 
1234
    def _limbo_supports_executable(self):
 
1235
        """Check if the limbo path supports the executable bit."""
 
1236
        # FIXME: Check actual file system capabilities of limbodir
 
1237
        return osutils.supports_executable()
 
1238
 
1115
1239
    def _limbo_name(self, trans_id):
1116
1240
        """Generate the limbo name of a file"""
1117
1241
        limbo_name = self._limbo_files.get(trans_id)
1153
1277
        entries from _limbo_files, because they are now stale.
1154
1278
        """
1155
1279
        for trans_id in trans_ids:
1156
 
            old_path = self._limbo_files.pop(trans_id)
 
1280
            old_path = self._limbo_files[trans_id]
 
1281
            self._possibly_stale_limbo_files.add(old_path)
 
1282
            del self._limbo_files[trans_id]
1157
1283
            if trans_id not in self._new_contents:
1158
1284
                continue
1159
1285
            new_path = self._limbo_name(trans_id)
1160
1286
            os.rename(old_path, new_path)
 
1287
            self._possibly_stale_limbo_files.remove(old_path)
1161
1288
            for descendant in self._limbo_descendants(trans_id):
1162
1289
                desc_path = self._limbo_files[descendant]
1163
1290
                desc_path = new_path + desc_path[len(old_path):]
1170
1297
            descendants.update(self._limbo_descendants(descendant))
1171
1298
        return descendants
1172
1299
 
1173
 
    def create_file(self, contents, trans_id, mode_id=None):
 
1300
    def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1174
1301
        """Schedule creation of a new file.
1175
1302
 
1176
 
        See also new_file.
1177
 
 
1178
 
        Contents is an iterator of strings, all of which will be written
1179
 
        to the target destination.
1180
 
 
1181
 
        New file takes the permissions of any existing file with that id,
1182
 
        unless mode_id is specified.
 
1303
        :seealso: new_file.
 
1304
 
 
1305
        :param contents: an iterator of strings, all of which will be written
 
1306
            to the target destination.
 
1307
        :param trans_id: TreeTransform handle
 
1308
        :param mode_id: If not None, force the mode of the target file to match
 
1309
            the mode of the object referenced by mode_id.
 
1310
            Otherwise, we will try to preserve mode bits of an existing file.
 
1311
        :param sha1: If the sha1 of this content is already known, pass it in.
 
1312
            We can use it to prevent future sha1 computations.
1183
1313
        """
1184
1314
        name = self._limbo_name(trans_id)
1185
1315
        f = open(name, 'wb')
1186
1316
        try:
1187
 
            try:
1188
 
                unique_add(self._new_contents, trans_id, 'file')
1189
 
            except:
1190
 
                # Clean up the file, it never got registered so
1191
 
                # TreeTransform.finalize() won't clean it up.
1192
 
                f.close()
1193
 
                os.unlink(name)
1194
 
                raise
1195
 
 
 
1317
            unique_add(self._new_contents, trans_id, 'file')
1196
1318
            f.writelines(contents)
1197
1319
        finally:
1198
1320
            f.close()
1199
1321
        self._set_mtime(name)
1200
1322
        self._set_mode(trans_id, mode_id, S_ISREG)
 
1323
        # It is unfortunate we have to use lstat instead of fstat, but we just
 
1324
        # used utime and chmod on the file, so we need the accurate final
 
1325
        # details.
 
1326
        if sha1 is not None:
 
1327
            self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1201
1328
 
1202
1329
    def _read_file_chunks(self, trans_id):
1203
1330
        cur_file = open(self._limbo_name(trans_id), 'rb')
1262
1389
    def cancel_creation(self, trans_id):
1263
1390
        """Cancel the creation of new file contents."""
1264
1391
        del self._new_contents[trans_id]
 
1392
        if trans_id in self._observed_sha1s:
 
1393
            del self._observed_sha1s[trans_id]
1265
1394
        children = self._limbo_children.get(trans_id)
1266
1395
        # if this is a limbo directory with children, move them before removing
1267
1396
        # the directory
1271
1400
            del self._limbo_children_names[trans_id]
1272
1401
        delete_any(self._limbo_name(trans_id))
1273
1402
 
 
1403
    def new_orphan(self, trans_id, parent_id):
 
1404
        conf = self._tree.get_config_stack()
 
1405
        handle_orphan = conf.get('bzr.transform.orphan_policy')
 
1406
        handle_orphan(self, trans_id, parent_id)
 
1407
 
 
1408
 
 
1409
class OrphaningError(errors.BzrError):
 
1410
 
 
1411
    # Only bugs could lead to such exception being seen by the user
 
1412
    internal_error = True
 
1413
    _fmt = "Error while orphaning %s in %s directory"
 
1414
 
 
1415
    def __init__(self, orphan, parent):
 
1416
        errors.BzrError.__init__(self)
 
1417
        self.orphan = orphan
 
1418
        self.parent = parent
 
1419
 
 
1420
 
 
1421
class OrphaningForbidden(OrphaningError):
 
1422
 
 
1423
    _fmt = "Policy: %s doesn't allow creating orphans."
 
1424
 
 
1425
    def __init__(self, policy):
 
1426
        errors.BzrError.__init__(self)
 
1427
        self.policy = policy
 
1428
 
 
1429
 
 
1430
def move_orphan(tt, orphan_id, parent_id):
 
1431
    """See TreeTransformBase.new_orphan.
 
1432
 
 
1433
    This creates a new orphan in the `bzr-orphans` dir at the root of the
 
1434
    `TreeTransform`.
 
1435
 
 
1436
    :param tt: The TreeTransform orphaning `trans_id`.
 
1437
 
 
1438
    :param orphan_id: The trans id that should be orphaned.
 
1439
 
 
1440
    :param parent_id: The orphan parent trans id.
 
1441
    """
 
1442
    # Add the orphan dir if it doesn't exist
 
1443
    orphan_dir_basename = 'bzr-orphans'
 
1444
    od_id = tt.trans_id_tree_path(orphan_dir_basename)
 
1445
    if tt.final_kind(od_id) is None:
 
1446
        tt.create_directory(od_id)
 
1447
    parent_path = tt._tree_id_paths[parent_id]
 
1448
    # Find a name that doesn't exist yet in the orphan dir
 
1449
    actual_name = tt.final_name(orphan_id)
 
1450
    new_name = tt._available_backup_name(actual_name, od_id)
 
1451
    tt.adjust_path(new_name, od_id, orphan_id)
 
1452
    trace.warning('%s has been orphaned in %s'
 
1453
                  % (joinpath(parent_path, actual_name), orphan_dir_basename))
 
1454
 
 
1455
 
 
1456
def refuse_orphan(tt, orphan_id, parent_id):
 
1457
    """See TreeTransformBase.new_orphan.
 
1458
 
 
1459
    This refuses to create orphan, letting the caller handle the conflict.
 
1460
    """
 
1461
    raise OrphaningForbidden('never')
 
1462
 
 
1463
 
 
1464
orphaning_registry = registry.Registry()
 
1465
orphaning_registry.register(
 
1466
    'conflict', refuse_orphan,
 
1467
    'Leave orphans in place and create a conflict on the directory.')
 
1468
orphaning_registry.register(
 
1469
    'move', move_orphan,
 
1470
    'Move orphans into the bzr-orphans directory.')
 
1471
orphaning_registry._set_default_key('conflict')
 
1472
 
 
1473
 
 
1474
opt_transform_orphan = _mod_config.RegistryOption(
 
1475
    'bzr.transform.orphan_policy', orphaning_registry,
 
1476
    help='Policy for orphaned files during transform operations.',
 
1477
    invalid='warning')
 
1478
 
1274
1479
 
1275
1480
class TreeTransform(DiskTreeTransform):
1276
1481
    """Represent a tree transformation.
1348
1553
        try:
1349
1554
            limbodir = urlutils.local_path_from_url(
1350
1555
                tree._transport.abspath('limbo'))
1351
 
            try:
1352
 
                os.mkdir(limbodir)
1353
 
            except OSError, e:
1354
 
                if e.errno == errno.EEXIST:
1355
 
                    raise ExistingLimbo(limbodir)
 
1556
            osutils.ensure_empty_directory_exists(
 
1557
                limbodir,
 
1558
                errors.ExistingLimbo)
1356
1559
            deletiondir = urlutils.local_path_from_url(
1357
1560
                tree._transport.abspath('pending-deletion'))
1358
 
            try:
1359
 
                os.mkdir(deletiondir)
1360
 
            except OSError, e:
1361
 
                if e.errno == errno.EEXIST:
1362
 
                    raise errors.ExistingPendingDeletion(deletiondir)
 
1561
            osutils.ensure_empty_directory_exists(
 
1562
                deletiondir,
 
1563
                errors.ExistingPendingDeletion)
1363
1564
        except:
1364
1565
            tree.unlock()
1365
1566
            raise
1428
1629
            else:
1429
1630
                raise
1430
1631
        if typefunc(mode):
1431
 
            os.chmod(self._limbo_name(trans_id), mode)
 
1632
            osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1432
1633
 
1433
1634
    def iter_tree_children(self, parent_id):
1434
1635
        """Iterate through the entry's tree children, if any"""
1512
1713
            calculating one.
1513
1714
        :param _mover: Supply an alternate FileMover, for testing
1514
1715
        """
 
1716
        for hook in MutableTree.hooks['pre_transform']:
 
1717
            hook(self._tree, self)
1515
1718
        if not no_conflicts:
1516
1719
            self._check_malformed()
1517
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1720
        child_pb = ui.ui_factory.nested_progress_bar()
1518
1721
        try:
1519
1722
            if precomputed_delta is None:
1520
 
                child_pb.update('Apply phase', 0, 2)
 
1723
                child_pb.update(gettext('Apply phase'), 0, 2)
1521
1724
                inventory_delta = self._generate_inventory_delta()
1522
1725
                offset = 1
1523
1726
            else:
1528
1731
            else:
1529
1732
                mover = _mover
1530
1733
            try:
1531
 
                child_pb.update('Apply phase', 0 + offset, 2 + offset)
 
1734
                child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1532
1735
                self._apply_removals(mover)
1533
 
                child_pb.update('Apply phase', 1 + offset, 2 + offset)
 
1736
                child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1534
1737
                modified_paths = self._apply_insertions(mover)
1535
1738
            except:
1536
1739
                mover.rollback()
1539
1742
                mover.apply_deletions()
1540
1743
        finally:
1541
1744
            child_pb.finished()
 
1745
        if self.final_file_id(self.root) is None:
 
1746
            inventory_delta = [e for e in inventory_delta if e[0] != '']
1542
1747
        self._tree.apply_inventory_delta(inventory_delta)
 
1748
        self._apply_observed_sha1s()
1543
1749
        self._done = True
1544
1750
        self.finalize()
1545
1751
        return _TransformResults(modified_paths, self.rename_count)
1547
1753
    def _generate_inventory_delta(self):
1548
1754
        """Generate an inventory delta for the current transform."""
1549
1755
        inventory_delta = []
1550
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1756
        child_pb = ui.ui_factory.nested_progress_bar()
1551
1757
        new_paths = self._inventory_altered()
1552
1758
        total_entries = len(new_paths) + len(self._removed_id)
1553
1759
        try:
1554
1760
            for num, trans_id in enumerate(self._removed_id):
1555
1761
                if (num % 10) == 0:
1556
 
                    child_pb.update('removing file', num, total_entries)
 
1762
                    child_pb.update(gettext('removing file'), num, total_entries)
1557
1763
                if trans_id == self._new_root:
1558
1764
                    file_id = self._tree.get_root_id()
1559
1765
                else:
1571
1777
            final_kinds = {}
1572
1778
            for num, (path, trans_id) in enumerate(new_paths):
1573
1779
                if (num % 10) == 0:
1574
 
                    child_pb.update('adding file',
 
1780
                    child_pb.update(gettext('adding file'),
1575
1781
                                    num + len(self._removed_id), total_entries)
1576
1782
                file_id = new_path_file_ids[trans_id]
1577
1783
                if file_id is None:
1615
1821
        """
1616
1822
        tree_paths = list(self._tree_path_ids.iteritems())
1617
1823
        tree_paths.sort(reverse=True)
1618
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1824
        child_pb = ui.ui_factory.nested_progress_bar()
1619
1825
        try:
1620
 
            for num, data in enumerate(tree_paths):
1621
 
                path, trans_id = data
1622
 
                child_pb.update('removing file', num, len(tree_paths))
 
1826
            for num, (path, trans_id) in enumerate(tree_paths):
 
1827
                # do not attempt to move root into a subdirectory of itself.
 
1828
                if path == '':
 
1829
                    continue
 
1830
                child_pb.update(gettext('removing file'), num, len(tree_paths))
1623
1831
                full_path = self._tree.abspath(path)
1624
1832
                if trans_id in self._removed_contents:
1625
1833
                    delete_path = os.path.join(self._deletiondir, trans_id)
1650
1858
        modified_paths = []
1651
1859
        new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1652
1860
                                 new_paths)
1653
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1861
        child_pb = ui.ui_factory.nested_progress_bar()
1654
1862
        try:
1655
1863
            for num, (path, trans_id) in enumerate(new_paths):
1656
1864
                if (num % 10) == 0:
1657
 
                    child_pb.update('adding file', num, len(new_paths))
 
1865
                    child_pb.update(gettext('adding file'), num, len(new_paths))
1658
1866
                full_path = self._tree.abspath(path)
1659
1867
                if trans_id in self._needs_rename:
1660
1868
                    try:
1665
1873
                            raise
1666
1874
                    else:
1667
1875
                        self.rename_count += 1
 
1876
                    # TODO: if trans_id in self._observed_sha1s, we should
 
1877
                    #       re-stat the final target, since ctime will be
 
1878
                    #       updated by the change.
1668
1879
                if (trans_id in self._new_contents or
1669
1880
                    self.path_changed(trans_id)):
1670
1881
                    if trans_id in self._new_contents:
1671
1882
                        modified_paths.append(full_path)
1672
1883
                if trans_id in self._new_executability:
1673
1884
                    self._set_executability(path, trans_id)
 
1885
                if trans_id in self._observed_sha1s:
 
1886
                    o_sha1, o_st_val = self._observed_sha1s[trans_id]
 
1887
                    st = osutils.lstat(full_path)
 
1888
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1674
1889
        finally:
1675
1890
            child_pb.finished()
 
1891
        for path, trans_id in new_paths:
 
1892
            # new_paths includes stuff like workingtree conflicts. Only the
 
1893
            # stuff in new_contents actually comes from limbo.
 
1894
            if trans_id in self._limbo_files:
 
1895
                del self._limbo_files[trans_id]
1676
1896
        self._new_contents.clear()
1677
1897
        return modified_paths
1678
1898
 
 
1899
    def _apply_observed_sha1s(self):
 
1900
        """After we have finished renaming everything, update observed sha1s
 
1901
 
 
1902
        This has to be done after self._tree.apply_inventory_delta, otherwise
 
1903
        it doesn't know anything about the files we are updating. Also, we want
 
1904
        to do this as late as possible, so that most entries end up cached.
 
1905
        """
 
1906
        # TODO: this doesn't update the stat information for directories. So
 
1907
        #       the first 'bzr status' will still need to rewrite
 
1908
        #       .bzr/checkout/dirstate. However, we at least don't need to
 
1909
        #       re-read all of the files.
 
1910
        # TODO: If the operation took a while, we could do a time.sleep(3) here
 
1911
        #       to allow the clock to tick over and ensure we won't have any
 
1912
        #       problems. (we could observe start time, and finish time, and if
 
1913
        #       it is less than eg 10% overhead, add a sleep call.)
 
1914
        paths = FinalPaths(self)
 
1915
        for trans_id, observed in self._observed_sha1s.iteritems():
 
1916
            path = paths.get_path(trans_id)
 
1917
            # We could get the file_id, but dirstate prefers to use the path
 
1918
            # anyway, and it is 'cheaper' to determine.
 
1919
            # file_id = self._new_id[trans_id]
 
1920
            self._tree._observed_sha1(None, path, observed)
 
1921
 
1679
1922
 
1680
1923
class TransformPreview(DiskTreeTransform):
1681
1924
    """A TreeTransform for generating preview trees.
1697
1940
        path = self._tree_id_paths.get(trans_id)
1698
1941
        if path is None:
1699
1942
            return None
1700
 
        file_id = self._tree.path2id(path)
1701
 
        try:
1702
 
            return self._tree.kind(file_id)
1703
 
        except errors.NoSuchFile:
1704
 
            return None
 
1943
        kind = self._tree.path_content_summary(path)[0]
 
1944
        if kind == 'missing':
 
1945
            kind = None
 
1946
        return kind
1705
1947
 
1706
1948
    def _set_mode(self, trans_id, mode_id, typefunc):
1707
1949
        """Set the mode of new file contents.
1727
1969
            childpath = joinpath(path, child)
1728
1970
            yield self.trans_id_tree_path(childpath)
1729
1971
 
1730
 
 
1731
 
class _PreviewTree(tree.Tree):
 
1972
    def new_orphan(self, trans_id, parent_id):
 
1973
        raise NotImplementedError(self.new_orphan)
 
1974
 
 
1975
 
 
1976
class _PreviewTree(tree.InventoryTree):
1732
1977
    """Partial implementation of Tree to support show_diff_trees"""
1733
1978
 
1734
1979
    def __init__(self, transform):
1763
2008
                yield self._get_repository().revision_tree(revision_id)
1764
2009
 
1765
2010
    def _get_file_revision(self, file_id, vf, tree_revision):
1766
 
        parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
 
2011
        parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
1767
2012
                       self._iter_parent_trees()]
1768
2013
        vf.add_lines((file_id, tree_revision), parent_keys,
1769
2014
                     self.get_file_lines(file_id))
1773
2018
            vf.fallback_versionedfiles.append(base_vf)
1774
2019
        return tree_revision
1775
2020
 
1776
 
    def _stat_limbo_file(self, file_id):
1777
 
        trans_id = self._transform.trans_id_file_id(file_id)
 
2021
    def _stat_limbo_file(self, file_id=None, trans_id=None):
 
2022
        if trans_id is None:
 
2023
            trans_id = self._transform.trans_id_file_id(file_id)
1778
2024
        name = self._transform._limbo_name(trans_id)
1779
2025
        return os.lstat(name)
1780
2026
 
1805
2051
        pass
1806
2052
 
1807
2053
    @property
 
2054
    @deprecated_method(deprecated_in((2, 5, 0)))
1808
2055
    def inventory(self):
1809
2056
        """This Tree does not use inventory as its backing data."""
1810
2057
        raise NotImplementedError(_PreviewTree.inventory)
1811
2058
 
 
2059
    @property
 
2060
    def root_inventory(self):
 
2061
        """This Tree does not use inventory as its backing data."""
 
2062
        raise NotImplementedError(_PreviewTree.root_inventory)
 
2063
 
1812
2064
    def get_root_id(self):
1813
2065
        return self._transform.final_file_id(self._transform.root)
1814
2066
 
1860
2112
        return cur_parent
1861
2113
 
1862
2114
    def path2id(self, path):
 
2115
        if isinstance(path, list):
 
2116
            if path == []:
 
2117
                path = [""]
 
2118
            path = osutils.pathjoin(*path)
1863
2119
        return self._transform.final_file_id(self._path2trans_id(path))
1864
2120
 
1865
2121
    def id2path(self, file_id):
1926
2182
                ordered_ids.append((trans_id, parent_file_id))
1927
2183
        return ordered_ids
1928
2184
 
 
2185
    def iter_child_entries(self, file_id, path=None):
 
2186
        self.id2path(file_id)
 
2187
        trans_id = self._transform.trans_id_file_id(file_id)
 
2188
        todo = [(child_trans_id, trans_id) for child_trans_id in
 
2189
                self._all_children(trans_id)]
 
2190
        for entry, trans_id in self._make_inv_entries(todo):
 
2191
            yield entry
 
2192
 
1929
2193
    def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
1930
2194
        # This may not be a maximally efficient implementation, but it is
1931
2195
        # reasonably straightforward.  An implementation that grafts the
1995
2259
 
1996
2260
    def get_file_size(self, file_id):
1997
2261
        """See Tree.get_file_size"""
 
2262
        trans_id = self._transform.trans_id_file_id(file_id)
 
2263
        kind = self._transform.final_kind(trans_id)
 
2264
        if kind != 'file':
 
2265
            return None
 
2266
        if trans_id in self._transform._new_contents:
 
2267
            return self._stat_limbo_file(trans_id=trans_id).st_size
1998
2268
        if self.kind(file_id) == 'file':
1999
2269
            return self._transform._tree.get_file_size(file_id)
2000
2270
        else:
2001
2271
            return None
2002
2272
 
 
2273
    def get_file_verifier(self, file_id, path=None, stat_value=None):
 
2274
        trans_id = self._transform.trans_id_file_id(file_id)
 
2275
        kind = self._transform._new_contents.get(trans_id)
 
2276
        if kind is None:
 
2277
            return self._transform._tree.get_file_verifier(file_id)
 
2278
        if kind == 'file':
 
2279
            fileobj = self.get_file(file_id)
 
2280
            try:
 
2281
                return ("SHA1", sha_file(fileobj))
 
2282
            finally:
 
2283
                fileobj.close()
 
2284
 
2003
2285
    def get_file_sha1(self, file_id, path=None, stat_value=None):
2004
2286
        trans_id = self._transform.trans_id_file_id(file_id)
2005
2287
        kind = self._transform._new_contents.get(trans_id)
2028
2310
            except errors.NoSuchId:
2029
2311
                return False
2030
2312
 
 
2313
    def has_filename(self, path):
 
2314
        trans_id = self._path2trans_id(path)
 
2315
        if trans_id in self._transform._new_contents:
 
2316
            return True
 
2317
        elif trans_id in self._transform._removed_contents:
 
2318
            return False
 
2319
        else:
 
2320
            return self._transform._tree.has_filename(path)
 
2321
 
2031
2322
    def path_content_summary(self, path):
2032
2323
        trans_id = self._path2trans_id(path)
2033
2324
        tt = self._transform
2046
2337
            if kind == 'file':
2047
2338
                statval = os.lstat(limbo_name)
2048
2339
                size = statval.st_size
2049
 
                if not supports_executable():
 
2340
                if not tt._limbo_supports_executable():
2050
2341
                    executable = False
2051
2342
                else:
2052
2343
                    executable = statval.st_mode & S_IEXEC
2121
2412
                                   self.get_file(file_id).readlines(),
2122
2413
                                   default_revision)
2123
2414
 
2124
 
    def get_symlink_target(self, file_id):
 
2415
    def get_symlink_target(self, file_id, path=None):
2125
2416
        """See Tree.get_symlink_target"""
2126
2417
        if not self._content_change(file_id):
2127
2418
            return self._transform._tree.get_symlink_target(file_id)
2265
2556
        if num > 0:  # more than just a root
2266
2557
            raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2267
2558
    file_trans_id = {}
2268
 
    top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2559
    top_pb = ui.ui_factory.nested_progress_bar()
2269
2560
    pp = ProgressPhase("Build phase", 2, top_pb)
2270
 
    if tree.inventory.root is not None:
 
2561
    if tree.get_root_id() is not None:
2271
2562
        # This is kind of a hack: we should be altering the root
2272
2563
        # as part of the regular tree shape diff logic.
2273
2564
        # The conditional test here is to avoid doing an
2284
2575
        pp.next_phase()
2285
2576
        file_trans_id[wt.get_root_id()] = \
2286
2577
            tt.trans_id_tree_file_id(wt.get_root_id())
2287
 
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2578
        pb = ui.ui_factory.nested_progress_bar()
2288
2579
        try:
2289
2580
            deferred_contents = []
2290
2581
            num = 0
2291
 
            total = len(tree.inventory)
 
2582
            total = len(tree.all_file_ids())
2292
2583
            if delta_from_tree:
2293
2584
                precomputed_delta = []
2294
2585
            else:
2303
2594
                for dir, files in wt.walkdirs():
2304
2595
                    existing_files.update(f[0] for f in files)
2305
2596
            for num, (tree_path, entry) in \
2306
 
                enumerate(tree.inventory.iter_entries_by_dir()):
2307
 
                pb.update("Building tree", num - len(deferred_contents), total)
 
2597
                enumerate(tree.iter_entries_by_dir()):
 
2598
                pb.update(gettext("Building tree"), num - len(deferred_contents), total)
2308
2599
                if entry.parent_id is None:
2309
2600
                    continue
2310
2601
                reparent = False
2316
2607
                    kind = file_kind(target_path)
2317
2608
                    if kind == "directory":
2318
2609
                        try:
2319
 
                            bzrdir.BzrDir.open(target_path)
 
2610
                            controldir.ControlDir.open(target_path)
2320
2611
                        except errors.NotBranchError:
2321
2612
                            pass
2322
2613
                        else:
2337
2628
                    executable = tree.is_executable(file_id, tree_path)
2338
2629
                    if executable:
2339
2630
                        tt.set_executability(executable, trans_id)
2340
 
                    trans_data = (trans_id, tree_path)
 
2631
                    trans_data = (trans_id, tree_path, entry.text_sha1)
2341
2632
                    deferred_contents.append((file_id, trans_data))
2342
2633
                else:
2343
2634
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2359
2650
            precomputed_delta = None
2360
2651
        conflicts = cook_conflicts(raw_conflicts, tt)
2361
2652
        for conflict in conflicts:
2362
 
            warning(conflict)
 
2653
            trace.warning(unicode(conflict))
2363
2654
        try:
2364
2655
            wt.add_conflicts(conflicts)
2365
2656
        except errors.UnsupportedOperation:
2388
2679
        unchanged = dict(unchanged)
2389
2680
        new_desired_files = []
2390
2681
        count = 0
2391
 
        for file_id, (trans_id, tree_path) in desired_files:
 
2682
        for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2392
2683
            accelerator_path = unchanged.get(file_id)
2393
2684
            if accelerator_path is None:
2394
 
                new_desired_files.append((file_id, (trans_id, tree_path)))
 
2685
                new_desired_files.append((file_id,
 
2686
                    (trans_id, tree_path, text_sha1)))
2395
2687
                continue
2396
 
            pb.update('Adding file contents', count + offset, total)
 
2688
            pb.update(gettext('Adding file contents'), count + offset, total)
2397
2689
            if hardlink:
2398
2690
                tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2399
2691
                                   trans_id)
2404
2696
                    contents = filtered_output_bytes(contents, filters,
2405
2697
                        ContentFilterContext(tree_path, tree))
2406
2698
                try:
2407
 
                    tt.create_file(contents, trans_id)
 
2699
                    tt.create_file(contents, trans_id, sha1=text_sha1)
2408
2700
                finally:
2409
2701
                    try:
2410
2702
                        contents.close()
2413
2705
                        pass
2414
2706
            count += 1
2415
2707
        offset += count
2416
 
    for count, ((trans_id, tree_path), contents) in enumerate(
 
2708
    for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2417
2709
            tree.iter_files_bytes(new_desired_files)):
2418
2710
        if wt.supports_content_filtering():
2419
2711
            filters = wt._content_filter_stack(tree_path)
2420
2712
            contents = filtered_output_bytes(contents, filters,
2421
2713
                ContentFilterContext(tree_path, tree))
2422
 
        tt.create_file(contents, trans_id)
2423
 
        pb.update('Adding file contents', count + offset, total)
 
2714
        tt.create_file(contents, trans_id, sha1=text_sha1)
 
2715
        pb.update(gettext('Adding file contents'), count + offset, total)
2424
2716
 
2425
2717
 
2426
2718
def _reparent_children(tt, old_parent, new_parent):
2427
2719
    for child in tt.iter_tree_children(old_parent):
2428
2720
        tt.adjust_path(tt.final_name(child), new_parent, child)
2429
2721
 
 
2722
 
2430
2723
def _reparent_transform_children(tt, old_parent, new_parent):
2431
2724
    by_parent = tt.by_parent()
2432
2725
    for child in by_parent[old_parent]:
2433
2726
        tt.adjust_path(tt.final_name(child), new_parent, child)
2434
2727
    return by_parent[old_parent]
2435
2728
 
 
2729
 
2436
2730
def _content_match(tree, entry, file_id, kind, target_path):
2437
2731
    if entry.kind != kind:
2438
2732
        return False
2538
2832
        tt.set_executability(entry.executable, trans_id)
2539
2833
 
2540
2834
 
2541
 
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2542
 
    return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2543
 
 
2544
 
 
2545
 
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2546
 
    """Produce a backup-style name that appears to be available"""
2547
 
    def name_gen():
2548
 
        counter = 1
2549
 
        while True:
2550
 
            yield "%s.~%d~" % (name, counter)
2551
 
            counter += 1
2552
 
    for new_name in name_gen():
2553
 
        if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2554
 
            return new_name
2555
 
 
2556
 
 
2557
 
def _entry_changes(file_id, entry, working_tree):
2558
 
    """Determine in which ways the inventory entry has changed.
2559
 
 
2560
 
    Returns booleans: has_contents, content_mod, meta_mod
2561
 
    has_contents means there are currently contents, but they differ
2562
 
    contents_mod means contents need to be modified
2563
 
    meta_mod means the metadata needs to be modified
2564
 
    """
2565
 
    cur_entry = working_tree.inventory[file_id]
2566
 
    try:
2567
 
        working_kind = working_tree.kind(file_id)
2568
 
        has_contents = True
2569
 
    except NoSuchFile:
2570
 
        has_contents = False
2571
 
        contents_mod = True
2572
 
        meta_mod = False
2573
 
    if has_contents is True:
2574
 
        if entry.kind != working_kind:
2575
 
            contents_mod, meta_mod = True, False
2576
 
        else:
2577
 
            cur_entry._read_tree_state(working_tree.id2path(file_id),
2578
 
                                       working_tree)
2579
 
            contents_mod, meta_mod = entry.detect_changes(cur_entry)
2580
 
            cur_entry._forget_tree_state()
2581
 
    return has_contents, contents_mod, meta_mod
2582
 
 
2583
 
 
2584
2835
def revert(working_tree, target_tree, filenames, backups=False,
2585
2836
           pb=None, change_reporter=None):
2586
2837
    """Revert a working tree's contents to those of a target tree."""
2596
2847
                unversioned_filter=working_tree.is_ignored)
2597
2848
            delta.report_changes(tt.iter_changes(), change_reporter)
2598
2849
        for conflict in conflicts:
2599
 
            warning(conflict)
 
2850
            trace.warning(unicode(conflict))
2600
2851
        pp.next_phase()
2601
2852
        tt.apply()
2602
2853
        working_tree.set_merge_modified(merge_modified)
2610
2861
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2611
2862
                              backups, pp, basis_tree=None,
2612
2863
                              merge_modified=None):
2613
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2864
    child_pb = ui.ui_factory.nested_progress_bar()
2614
2865
    try:
2615
2866
        if merge_modified is None:
2616
2867
            merge_modified = working_tree.merge_modified()
2619
2870
                                      merge_modified, basis_tree)
2620
2871
    finally:
2621
2872
        child_pb.finished()
2622
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2873
    child_pb = ui.ui_factory.nested_progress_bar()
2623
2874
    try:
2624
2875
        raw_conflicts = resolve_conflicts(tt, child_pb,
2625
2876
            lambda t, c: conflict_pass(t, c, target_tree))
2633
2884
                 backups, merge_modified, basis_tree=None):
2634
2885
    if basis_tree is not None:
2635
2886
        basis_tree.lock_read()
2636
 
    change_list = target_tree.iter_changes(working_tree,
 
2887
    # We ask the working_tree for its changes relative to the target, rather
 
2888
    # than the target changes relative to the working tree. Because WT4 has an
 
2889
    # optimizer to compare itself to a target, but no optimizer for the
 
2890
    # reverse.
 
2891
    change_list = working_tree.iter_changes(target_tree,
2637
2892
        specific_files=specific_files, pb=pb)
2638
2893
    if target_tree.get_root_id() is None:
2639
2894
        skip_root = True
2643
2898
        deferred_files = []
2644
2899
        for id_num, (file_id, path, changed_content, versioned, parent, name,
2645
2900
                kind, executable) in enumerate(change_list):
2646
 
            if skip_root and file_id[0] is not None and parent[0] is None:
 
2901
            target_path, wt_path = path
 
2902
            target_versioned, wt_versioned = versioned
 
2903
            target_parent, wt_parent = parent
 
2904
            target_name, wt_name = name
 
2905
            target_kind, wt_kind = kind
 
2906
            target_executable, wt_executable = executable
 
2907
            if skip_root and wt_parent is None:
2647
2908
                continue
2648
2909
            trans_id = tt.trans_id_file_id(file_id)
2649
2910
            mode_id = None
2650
2911
            if changed_content:
2651
2912
                keep_content = False
2652
 
                if kind[0] == 'file' and (backups or kind[1] is None):
 
2913
                if wt_kind == 'file' and (backups or target_kind is None):
2653
2914
                    wt_sha1 = working_tree.get_file_sha1(file_id)
2654
2915
                    if merge_modified.get(file_id) != wt_sha1:
2655
2916
                        # acquire the basis tree lazily to prevent the
2658
2919
                        if basis_tree is None:
2659
2920
                            basis_tree = working_tree.basis_tree()
2660
2921
                            basis_tree.lock_read()
2661
 
                        if file_id in basis_tree:
 
2922
                        if basis_tree.has_id(file_id):
2662
2923
                            if wt_sha1 != basis_tree.get_file_sha1(file_id):
2663
2924
                                keep_content = True
2664
 
                        elif kind[1] is None and not versioned[1]:
 
2925
                        elif target_kind is None and not target_versioned:
2665
2926
                            keep_content = True
2666
 
                if kind[0] is not None:
 
2927
                if wt_kind is not None:
2667
2928
                    if not keep_content:
2668
2929
                        tt.delete_contents(trans_id)
2669
 
                    elif kind[1] is not None:
2670
 
                        parent_trans_id = tt.trans_id_file_id(parent[0])
2671
 
                        by_parent = tt.by_parent()
2672
 
                        backup_name = _get_backup_name(name[0], by_parent,
2673
 
                                                       parent_trans_id, tt)
 
2930
                    elif target_kind is not None:
 
2931
                        parent_trans_id = tt.trans_id_file_id(wt_parent)
 
2932
                        backup_name = tt._available_backup_name(
 
2933
                            wt_name, parent_trans_id)
2674
2934
                        tt.adjust_path(backup_name, parent_trans_id, trans_id)
2675
 
                        new_trans_id = tt.create_path(name[0], parent_trans_id)
2676
 
                        if versioned == (True, True):
 
2935
                        new_trans_id = tt.create_path(wt_name, parent_trans_id)
 
2936
                        if wt_versioned and target_versioned:
2677
2937
                            tt.unversion_file(trans_id)
2678
2938
                            tt.version_file(file_id, new_trans_id)
2679
2939
                        # New contents should have the same unix perms as old
2680
2940
                        # contents
2681
2941
                        mode_id = trans_id
2682
2942
                        trans_id = new_trans_id
2683
 
                if kind[1] in ('directory', 'tree-reference'):
 
2943
                if target_kind in ('directory', 'tree-reference'):
2684
2944
                    tt.create_directory(trans_id)
2685
 
                    if kind[1] == 'tree-reference':
 
2945
                    if target_kind == 'tree-reference':
2686
2946
                        revision = target_tree.get_reference_revision(file_id,
2687
 
                                                                      path[1])
 
2947
                                                                      target_path)
2688
2948
                        tt.set_tree_reference(revision, trans_id)
2689
 
                elif kind[1] == 'symlink':
 
2949
                elif target_kind == 'symlink':
2690
2950
                    tt.create_symlink(target_tree.get_symlink_target(file_id),
2691
2951
                                      trans_id)
2692
 
                elif kind[1] == 'file':
 
2952
                elif target_kind == 'file':
2693
2953
                    deferred_files.append((file_id, (trans_id, mode_id)))
2694
2954
                    if basis_tree is None:
2695
2955
                        basis_tree = working_tree.basis_tree()
2696
2956
                        basis_tree.lock_read()
2697
2957
                    new_sha1 = target_tree.get_file_sha1(file_id)
2698
 
                    if (file_id in basis_tree and new_sha1 ==
2699
 
                        basis_tree.get_file_sha1(file_id)):
 
2958
                    if (basis_tree.has_id(file_id) and
 
2959
                        new_sha1 == basis_tree.get_file_sha1(file_id)):
2700
2960
                        if file_id in merge_modified:
2701
2961
                            del merge_modified[file_id]
2702
2962
                    else:
2703
2963
                        merge_modified[file_id] = new_sha1
2704
2964
 
2705
2965
                    # preserve the execute bit when backing up
2706
 
                    if keep_content and executable[0] == executable[1]:
2707
 
                        tt.set_executability(executable[1], trans_id)
2708
 
                elif kind[1] is not None:
2709
 
                    raise AssertionError(kind[1])
2710
 
            if versioned == (False, True):
 
2966
                    if keep_content and wt_executable == target_executable:
 
2967
                        tt.set_executability(target_executable, trans_id)
 
2968
                elif target_kind is not None:
 
2969
                    raise AssertionError(target_kind)
 
2970
            if not wt_versioned and target_versioned:
2711
2971
                tt.version_file(file_id, trans_id)
2712
 
            if versioned == (True, False):
 
2972
            if wt_versioned and not target_versioned:
2713
2973
                tt.unversion_file(trans_id)
2714
 
            if (name[1] is not None and
2715
 
                (name[0] != name[1] or parent[0] != parent[1])):
2716
 
                if name[1] == '' and parent[1] is None:
 
2974
            if (target_name is not None and
 
2975
                (wt_name != target_name or wt_parent != target_parent)):
 
2976
                if target_name == '' and target_parent is None:
2717
2977
                    parent_trans = ROOT_PARENT
2718
2978
                else:
2719
 
                    parent_trans = tt.trans_id_file_id(parent[1])
2720
 
                if parent[0] is None and versioned[0]:
2721
 
                    tt.adjust_root_path(name[1], parent_trans)
 
2979
                    parent_trans = tt.trans_id_file_id(target_parent)
 
2980
                if wt_parent is None and wt_versioned:
 
2981
                    tt.adjust_root_path(target_name, parent_trans)
2722
2982
                else:
2723
 
                    tt.adjust_path(name[1], parent_trans, trans_id)
2724
 
            if executable[0] != executable[1] and kind[1] == "file":
2725
 
                tt.set_executability(executable[1], trans_id)
 
2983
                    tt.adjust_path(target_name, parent_trans, trans_id)
 
2984
            if wt_executable != target_executable and target_kind == "file":
 
2985
                tt.set_executability(target_executable, trans_id)
2726
2986
        if working_tree.supports_content_filtering():
2727
2987
            for index, ((trans_id, mode_id), bytes) in enumerate(
2728
2988
                target_tree.iter_files_bytes(deferred_files)):
2754
3014
    pb = ui.ui_factory.nested_progress_bar()
2755
3015
    try:
2756
3016
        for n in range(10):
2757
 
            pb.update('Resolution pass', n+1, 10)
 
3017
            pb.update(gettext('Resolution pass'), n+1, 10)
2758
3018
            conflicts = tt.find_conflicts()
2759
3019
            if len(conflicts) == 0:
2760
3020
                return new_conflicts
2784
3044
                existing_file, new_file = conflict[2], conflict[1]
2785
3045
            else:
2786
3046
                existing_file, new_file = conflict[1], conflict[2]
2787
 
            new_name = tt.final_name(existing_file)+'.moved'
 
3047
            new_name = tt.final_name(existing_file) + '.moved'
2788
3048
            tt.adjust_path(new_name, final_parent, existing_file)
2789
3049
            new_conflicts.add((c_type, 'Moved existing file to',
2790
3050
                               existing_file, new_file))
2799
3059
 
2800
3060
        elif c_type == 'missing parent':
2801
3061
            trans_id = conflict[1]
2802
 
            try:
2803
 
                tt.cancel_deletion(trans_id)
2804
 
                new_conflicts.add(('deleting parent', 'Not deleting',
2805
 
                                   trans_id))
2806
 
            except KeyError:
 
3062
            if trans_id in tt._removed_contents:
 
3063
                cancel_deletion = True
 
3064
                orphans = tt._get_potential_orphans(trans_id)
 
3065
                if orphans:
 
3066
                    cancel_deletion = False
 
3067
                    # All children are orphans
 
3068
                    for o in orphans:
 
3069
                        try:
 
3070
                            tt.new_orphan(o, trans_id)
 
3071
                        except OrphaningError:
 
3072
                            # Something bad happened so we cancel the directory
 
3073
                            # deletion which will leave it in place with a
 
3074
                            # conflict. The user can deal with it from there.
 
3075
                            # Note that this also catch the case where we don't
 
3076
                            # want to create orphans and leave the directory in
 
3077
                            # place.
 
3078
                            cancel_deletion = True
 
3079
                            break
 
3080
                if cancel_deletion:
 
3081
                    # Cancel the directory deletion
 
3082
                    tt.cancel_deletion(trans_id)
 
3083
                    new_conflicts.add(('deleting parent', 'Not deleting',
 
3084
                                       trans_id))
 
3085
            else:
2807
3086
                create = True
2808
3087
                try:
2809
3088
                    tt.final_name(trans_id)
2812
3091
                        file_id = tt.final_file_id(trans_id)
2813
3092
                        if file_id is None:
2814
3093
                            file_id = tt.inactive_file_id(trans_id)
2815
 
                        entry = path_tree.inventory[file_id]
 
3094
                        _, entry = path_tree.iter_entries_by_dir(
 
3095
                            [file_id]).next()
2816
3096
                        # special-case the other tree root (move its
2817
3097
                        # children to current root)
2818
3098
                        if entry.parent_id is None:
2833
3113
        elif c_type == 'unversioned parent':
2834
3114
            file_id = tt.inactive_file_id(conflict[1])
2835
3115
            # special-case the other tree root (move its children instead)
2836
 
            if path_tree and file_id in path_tree:
2837
 
                if path_tree.inventory[file_id].parent_id is None:
 
3116
            if path_tree and path_tree.has_id(file_id):
 
3117
                if path_tree.path2id('') == file_id:
 
3118
                    # This is the root entry, skip it
2838
3119
                    continue
2839
3120
            tt.version_file(file_id, conflict[1])
2840
3121
            new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
2856
3137
 
2857
3138
def cook_conflicts(raw_conflicts, tt):
2858
3139
    """Generate a list of cooked conflicts, sorted by file path"""
2859
 
    from bzrlib.conflicts import Conflict
2860
3140
    conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
2861
 
    return sorted(conflict_iter, key=Conflict.sort_key)
 
3141
    return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
2862
3142
 
2863
3143
 
2864
3144
def iter_cook_conflicts(raw_conflicts, tt):
2865
 
    from bzrlib.conflicts import Conflict
2866
3145
    fp = FinalPaths(tt)
2867
3146
    for conflict in raw_conflicts:
2868
3147
        c_type = conflict[0]
2870
3149
        modified_path = fp.get_path(conflict[2])
2871
3150
        modified_id = tt.final_file_id(conflict[2])
2872
3151
        if len(conflict) == 3:
2873
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
2874
 
                                     file_id=modified_id)
 
3152
            yield conflicts.Conflict.factory(
 
3153
                c_type, action=action, path=modified_path, file_id=modified_id)
2875
3154
 
2876
3155
        else:
2877
3156
            conflicting_path = fp.get_path(conflict[3])
2878
3157
            conflicting_id = tt.final_file_id(conflict[3])
2879
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
2880
 
                                   file_id=modified_id,
2881
 
                                   conflict_path=conflicting_path,
2882
 
                                   conflict_file_id=conflicting_id)
 
3158
            yield conflicts.Conflict.factory(
 
3159
                c_type, action=action, path=modified_path,
 
3160
                file_id=modified_id,
 
3161
                conflict_path=conflicting_path,
 
3162
                conflict_file_id=conflicting_id)
2883
3163
 
2884
3164
 
2885
3165
class _FileMover(object):
2917
3197
            try:
2918
3198
                os.rename(to, from_)
2919
3199
            except OSError, e:
2920
 
                raise errors.TransformRenameFailed(to, from_, str(e), e.errno)                
 
3200
                raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
2921
3201
        # after rollback, don't reuse _FileMover
2922
3202
        past_renames = None
2923
3203
        pending_deletions = None