~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

  • Committer: Patch Queue Manager
  • Date: 2011-09-22 14:12:18 UTC
  • mfrom: (6155.3.1 jam)
  • Revision ID: pqm@pqm.ubuntu.com-20110922141218-86s4uu6nqvourw4f
(jameinel) Cleanup comments bzrlib/smart/__init__.py (John A Meinel)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
19
19
from stat import S_ISREG, S_IEXEC
20
20
import time
21
21
 
22
 
from bzrlib.lazy_import import lazy_import
23
 
lazy_import(globals(), """
 
22
from bzrlib import (
 
23
    errors,
 
24
    lazy_import,
 
25
    registry,
 
26
    trace,
 
27
    tree,
 
28
    )
 
29
lazy_import.lazy_import(globals(), """
24
30
from bzrlib import (
25
31
    annotate,
26
32
    bencode,
27
33
    bzrdir,
28
34
    commit,
 
35
    conflicts,
29
36
    delta,
30
37
    errors,
31
38
    inventory,
33
40
    osutils,
34
41
    revision as _mod_revision,
35
42
    ui,
 
43
    urlutils,
36
44
    )
 
45
from bzrlib.i18n import gettext
37
46
""")
38
47
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
39
48
                           ReusingTransform, CantMoveRoot,
40
49
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
41
50
                           UnableCreateSymlink)
42
51
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
43
 
from bzrlib.inventory import InventoryEntry
44
52
from bzrlib.osutils import (
45
53
    delete_any,
46
54
    file_kind,
47
55
    has_symlinks,
48
 
    lexists,
49
56
    pathjoin,
50
57
    sha_file,
51
58
    splitpath,
52
59
    supports_executable,
53
 
)
 
60
    )
54
61
from bzrlib.progress import ProgressPhase
55
62
from bzrlib.symbol_versioning import (
56
 
        deprecated_function,
57
 
        deprecated_in,
58
 
        )
59
 
from bzrlib.trace import mutter, warning
60
 
from bzrlib import tree
61
 
import bzrlib.ui
62
 
import bzrlib.urlutils as urlutils
 
63
    deprecated_function,
 
64
    deprecated_in,
 
65
    deprecated_method,
 
66
    )
63
67
 
64
68
 
65
69
ROOT_PARENT = "root-parent"
66
70
 
67
 
 
68
71
def unique_add(map, key, value):
69
72
    if key in map:
70
73
        raise DuplicateKey(key=key)
71
74
    map[key] = value
72
75
 
73
76
 
 
77
 
74
78
class _TransformResults(object):
75
79
    def __init__(self, modified_paths, rename_count):
76
80
        object.__init__(self)
100
104
        self._new_parent = {}
101
105
        # mapping of trans_id with new contents -> new file_kind
102
106
        self._new_contents = {}
 
107
        # mapping of trans_id => (sha1 of content, stat_value)
 
108
        self._observed_sha1s = {}
103
109
        # Set of trans_ids whose contents will be removed
104
110
        self._removed_contents = set()
105
111
        # Mapping of trans_id -> new execute-bit value
124
130
            self._new_root = self.trans_id_tree_file_id(root_id)
125
131
        else:
126
132
            self._new_root = None
127
 
        # Indictor of whether the transform has been applied
 
133
        # Indicator of whether the transform has been applied
128
134
        self._done = False
129
135
        # A progress bar
130
136
        self._pb = pb
133
139
        # A counter of how many files have been renamed
134
140
        self.rename_count = 0
135
141
 
 
142
    def __enter__(self):
 
143
        """Support Context Manager API."""
 
144
        return self
 
145
 
 
146
    def __exit__(self, exc_type, exc_val, exc_tb):
 
147
        """Support Context Manager API."""
 
148
        self.finalize()
 
149
 
136
150
    def finalize(self):
137
151
        """Release the working tree lock, if held.
138
152
 
213
227
        This means that the old root trans-id becomes obsolete, so it is
214
228
        recommended only to invoke this after the root trans-id has become
215
229
        irrelevant.
 
230
 
216
231
        """
217
232
        new_roots = [k for k, v in self._new_parent.iteritems() if v is
218
233
                     ROOT_PARENT]
224
239
            self._new_root = new_roots[0]
225
240
            return
226
241
        old_new_root = new_roots[0]
227
 
        # TODO: What to do if a old_new_root is present, but self._new_root is
228
 
        #       not listed as being removed? This code explicitly unversions
229
 
        #       the old root and versions it with the new file_id. Though that
230
 
        #       seems like an incomplete delta
231
 
 
232
242
        # unversion the new root's directory.
233
 
        file_id = self.final_file_id(old_new_root)
 
243
        if self.final_kind(self._new_root) is None:
 
244
            file_id = self.final_file_id(old_new_root)
 
245
        else:
 
246
            file_id = self.final_file_id(self._new_root)
234
247
        if old_new_root in self._new_id:
235
248
            self.cancel_versioning(old_new_root)
236
249
        else:
240
253
        if (self.tree_file_id(self._new_root) is not None and
241
254
            self._new_root not in self._removed_id):
242
255
            self.unversion_file(self._new_root)
243
 
        self.version_file(file_id, self._new_root)
 
256
        if file_id is not None:
 
257
            self.version_file(file_id, self._new_root)
244
258
 
245
259
        # Now move children of new root into old root directory.
246
260
        # Ensure all children are registered with the transaction, but don't
380
394
        return sorted(FinalPaths(self).get_paths(new_ids))
381
395
 
382
396
    def _inventory_altered(self):
383
 
        """Get the trans_ids and paths of files needing new inv entries."""
384
 
        new_ids = set()
385
 
        for id_set in [self._new_name, self._new_parent, self._new_id,
 
397
        """Determine which trans_ids need new Inventory entries.
 
398
 
 
399
        An new entry is needed when anything that would be reflected by an
 
400
        inventory entry changes, including file name, file_id, parent file_id,
 
401
        file kind, and the execute bit.
 
402
 
 
403
        Some care is taken to return entries with real changes, not cases
 
404
        where the value is deleted and then restored to its original value,
 
405
        but some actually unchanged values may be returned.
 
406
 
 
407
        :returns: A list of (path, trans_id) for all items requiring an
 
408
            inventory change. Ordered by path.
 
409
        """
 
410
        changed_ids = set()
 
411
        # Find entries whose file_ids are new (or changed).
 
412
        new_file_id = set(t for t in self._new_id
 
413
                          if self._new_id[t] != self.tree_file_id(t))
 
414
        for id_set in [self._new_name, self._new_parent, new_file_id,
386
415
                       self._new_executability]:
387
 
            new_ids.update(id_set)
 
416
            changed_ids.update(id_set)
 
417
        # removing implies a kind change
388
418
        changed_kind = set(self._removed_contents)
 
419
        # so does adding
389
420
        changed_kind.intersection_update(self._new_contents)
390
 
        changed_kind.difference_update(new_ids)
 
421
        # Ignore entries that are already known to have changed.
 
422
        changed_kind.difference_update(changed_ids)
 
423
        #  to keep only the truly changed ones
391
424
        changed_kind = (t for t in changed_kind
392
425
                        if self.tree_kind(t) != self.final_kind(t))
393
 
        new_ids.update(changed_kind)
394
 
        return sorted(FinalPaths(self).get_paths(new_ids))
 
426
        # all kind changes will alter the inventory
 
427
        changed_ids.update(changed_kind)
 
428
        # To find entries with changed parent_ids, find parents which existed,
 
429
        # but changed file_id.
 
430
        changed_file_id = set(t for t in new_file_id if t in self._removed_id)
 
431
        # Now add all their children to the set.
 
432
        for parent_trans_id in new_file_id:
 
433
            changed_ids.update(self.iter_tree_children(parent_trans_id))
 
434
        return sorted(FinalPaths(self).get_paths(changed_ids))
395
435
 
396
436
    def final_kind(self, trans_id):
397
437
        """Determine the final file kind, after any changes applied.
522
562
        for trans_id in self._removed_id:
523
563
            file_id = self.tree_file_id(trans_id)
524
564
            if file_id is not None:
 
565
                # XXX: This seems like something that should go via a different
 
566
                #      indirection.
525
567
                if self._tree.inventory[file_id].kind == 'directory':
526
568
                    parents.append(trans_id)
527
569
            elif self.tree_kind(trans_id) == 'directory':
531
573
            # ensure that all children are registered with the transaction
532
574
            list(self.iter_tree_children(parent_id))
533
575
 
 
576
    @deprecated_method(deprecated_in((2, 3, 0)))
534
577
    def has_named_child(self, by_parent, parent_id, name):
535
 
        try:
536
 
            children = by_parent[parent_id]
537
 
        except KeyError:
538
 
            children = []
539
 
        for child in children:
 
578
        return self._has_named_child(
 
579
            name, parent_id, known_children=by_parent.get(parent_id, []))
 
580
 
 
581
    def _has_named_child(self, name, parent_id, known_children):
 
582
        """Does a parent already have a name child.
 
583
 
 
584
        :param name: The searched for name.
 
585
 
 
586
        :param parent_id: The parent for which the check is made.
 
587
 
 
588
        :param known_children: The already known children. This should have
 
589
            been recently obtained from `self.by_parent.get(parent_id)`
 
590
            (or will be if None is passed).
 
591
        """
 
592
        if known_children is None:
 
593
            known_children = self.by_parent().get(parent_id, [])
 
594
        for child in known_children:
540
595
            if self.final_name(child) == name:
541
596
                return True
542
 
        try:
543
 
            path = self._tree_id_paths[parent_id]
544
 
        except KeyError:
 
597
        parent_path = self._tree_id_paths.get(parent_id, None)
 
598
        if parent_path is None:
 
599
            # No parent... no children
545
600
            return False
546
 
        childpath = joinpath(path, name)
547
 
        child_id = self._tree_path_ids.get(childpath)
 
601
        child_path = joinpath(parent_path, name)
 
602
        child_id = self._tree_path_ids.get(child_path, None)
548
603
        if child_id is None:
549
 
            return lexists(self._tree.abspath(childpath))
 
604
            # Not known by the tree transform yet, check the filesystem
 
605
            return osutils.lexists(self._tree.abspath(child_path))
550
606
        else:
551
 
            if self.final_parent(child_id) != parent_id:
552
 
                return False
553
 
            if child_id in self._removed_contents:
554
 
                # XXX What about dangling file-ids?
555
 
                return False
556
 
            else:
557
 
                return True
 
607
            raise AssertionError('child_id is missing: %s, %s, %s'
 
608
                                 % (name, parent_id, child_id))
 
609
 
 
610
    def _available_backup_name(self, name, target_id):
 
611
        """Find an available backup name.
 
612
 
 
613
        :param name: The basename of the file.
 
614
 
 
615
        :param target_id: The directory trans_id where the backup should 
 
616
            be placed.
 
617
        """
 
618
        known_children = self.by_parent().get(target_id, [])
 
619
        return osutils.available_backup_name(
 
620
            name,
 
621
            lambda base: self._has_named_child(
 
622
                base, target_id, known_children))
558
623
 
559
624
    def _parent_loops(self):
560
625
        """No entry should be its own ancestor"""
599
664
            if kind is None:
600
665
                conflicts.append(('versioning no contents', trans_id))
601
666
                continue
602
 
            if not InventoryEntry.versionable_kind(kind):
 
667
            if not inventory.InventoryEntry.versionable_kind(kind):
603
668
                conflicts.append(('versioning bad kind', trans_id, kind))
604
669
        return conflicts
605
670
 
637
702
        if (self._new_name, self._new_parent) == ({}, {}):
638
703
            return conflicts
639
704
        for children in by_parent.itervalues():
640
 
            name_ids = [(self.final_name(t), t) for t in children]
641
 
            if not self._case_sensitive_target:
642
 
                name_ids = [(n.lower(), t) for n, t in name_ids]
 
705
            name_ids = []
 
706
            for child_tid in children:
 
707
                name = self.final_name(child_tid)
 
708
                if name is not None:
 
709
                    # Keep children only if they still exist in the end
 
710
                    if not self._case_sensitive_target:
 
711
                        name = name.lower()
 
712
                    name_ids.append((name, child_tid))
643
713
            name_ids.sort()
644
714
            last_name = None
645
715
            last_trans_id = None
669
739
        return conflicts
670
740
 
671
741
    def _parent_type_conflicts(self, by_parent):
672
 
        """parents must have directory 'contents'."""
 
742
        """Children must have a directory parent"""
673
743
        conflicts = []
674
744
        for parent_id, children in by_parent.iteritems():
675
745
            if parent_id is ROOT_PARENT:
676
746
                continue
677
 
            if not self._any_contents(children):
 
747
            no_children = True
 
748
            for child_id in children:
 
749
                if self.final_kind(child_id) is not None:
 
750
                    no_children = False
 
751
                    break
 
752
            if no_children:
678
753
                continue
 
754
            # There is at least a child, so we need an existing directory to
 
755
            # contain it.
679
756
            kind = self.final_kind(parent_id)
680
757
            if kind is None:
 
758
                # The directory will be deleted
681
759
                conflicts.append(('missing parent', parent_id))
682
760
            elif kind != "directory":
 
761
                # Meh, we need a *directory* to put something in it
683
762
                conflicts.append(('non-directory parent', parent_id))
684
763
        return conflicts
685
764
 
686
 
    def _any_contents(self, trans_ids):
687
 
        """Return true if any of the trans_ids, will have contents."""
688
 
        for trans_id in trans_ids:
689
 
            if self.final_kind(trans_id) is not None:
690
 
                return True
691
 
        return False
692
 
 
693
765
    def _set_executability(self, path, trans_id):
694
766
        """Set the executability of versioned files """
695
767
        if supports_executable():
717
789
        return trans_id
718
790
 
719
791
    def new_file(self, name, parent_id, contents, file_id=None,
720
 
                 executable=None):
 
792
                 executable=None, sha1=None):
721
793
        """Convenience method to create files.
722
794
 
723
795
        name is the name of the file to create.
730
802
        trans_id = self._new_entry(name, parent_id, file_id)
731
803
        # TODO: rather than scheduling a set_executable call,
732
804
        # have create_file create the file with the right mode.
733
 
        self.create_file(contents, trans_id)
 
805
        self.create_file(contents, trans_id, sha1=sha1)
734
806
        if executable is not None:
735
807
            self.set_executability(executable, trans_id)
736
808
        return trans_id
759
831
        self.create_symlink(target, trans_id)
760
832
        return trans_id
761
833
 
 
834
    def new_orphan(self, trans_id, parent_id):
 
835
        """Schedule an item to be orphaned.
 
836
 
 
837
        When a directory is about to be removed, its children, if they are not
 
838
        versioned are moved out of the way: they don't have a parent anymore.
 
839
 
 
840
        :param trans_id: The trans_id of the existing item.
 
841
        :param parent_id: The parent trans_id of the item.
 
842
        """
 
843
        raise NotImplementedError(self.new_orphan)
 
844
 
 
845
    def _get_potential_orphans(self, dir_id):
 
846
        """Find the potential orphans in a directory.
 
847
 
 
848
        A directory can't be safely deleted if there are versioned files in it.
 
849
        If all the contained files are unversioned then they can be orphaned.
 
850
 
 
851
        The 'None' return value means that the directory contains at least one
 
852
        versioned file and should not be deleted.
 
853
 
 
854
        :param dir_id: The directory trans id.
 
855
 
 
856
        :return: A list of the orphan trans ids or None if at least one
 
857
             versioned file is present.
 
858
        """
 
859
        orphans = []
 
860
        # Find the potential orphans, stop if one item should be kept
 
861
        for child_tid in self.by_parent()[dir_id]:
 
862
            if child_tid in self._removed_contents:
 
863
                # The child is removed as part of the transform. Since it was
 
864
                # versioned before, it's not an orphan
 
865
                continue
 
866
            elif self.final_file_id(child_tid) is None:
 
867
                # The child is not versioned
 
868
                orphans.append(child_tid)
 
869
            else:
 
870
                # We have a versioned file here, searching for orphans is
 
871
                # meaningless.
 
872
                orphans = None
 
873
                break
 
874
        return orphans
 
875
 
762
876
    def _affected_ids(self):
763
877
        """Return the set of transform ids affected by the transform"""
764
878
        trans_ids = set(self._removed_id)
1076
1190
        self._deletiondir = None
1077
1191
        # A mapping of transform ids to their limbo filename
1078
1192
        self._limbo_files = {}
 
1193
        self._possibly_stale_limbo_files = set()
1079
1194
        # A mapping of transform ids to a set of the transform ids of children
1080
1195
        # that their limbo directory has
1081
1196
        self._limbo_children = {}
1094
1209
        if self._tree is None:
1095
1210
            return
1096
1211
        try:
1097
 
            entries = [(self._limbo_name(t), t, k) for t, k in
1098
 
                       self._new_contents.iteritems()]
1099
 
            entries.sort(reverse=True)
1100
 
            for path, trans_id, kind in entries:
1101
 
                delete_any(path)
 
1212
            limbo_paths = self._limbo_files.values() + list(
 
1213
                self._possibly_stale_limbo_files)
 
1214
            limbo_paths = sorted(limbo_paths, reverse=True)
 
1215
            for path in limbo_paths:
 
1216
                try:
 
1217
                    delete_any(path)
 
1218
                except OSError, e:
 
1219
                    if e.errno != errno.ENOENT:
 
1220
                        raise
 
1221
                    # XXX: warn? perhaps we just got interrupted at an
 
1222
                    # inconvenient moment, but perhaps files are disappearing
 
1223
                    # from under us?
1102
1224
            try:
1103
1225
                delete_any(self._limbodir)
1104
1226
            except OSError:
1153
1275
        entries from _limbo_files, because they are now stale.
1154
1276
        """
1155
1277
        for trans_id in trans_ids:
1156
 
            old_path = self._limbo_files.pop(trans_id)
 
1278
            old_path = self._limbo_files[trans_id]
 
1279
            self._possibly_stale_limbo_files.add(old_path)
 
1280
            del self._limbo_files[trans_id]
1157
1281
            if trans_id not in self._new_contents:
1158
1282
                continue
1159
1283
            new_path = self._limbo_name(trans_id)
1160
1284
            os.rename(old_path, new_path)
 
1285
            self._possibly_stale_limbo_files.remove(old_path)
1161
1286
            for descendant in self._limbo_descendants(trans_id):
1162
1287
                desc_path = self._limbo_files[descendant]
1163
1288
                desc_path = new_path + desc_path[len(old_path):]
1170
1295
            descendants.update(self._limbo_descendants(descendant))
1171
1296
        return descendants
1172
1297
 
1173
 
    def create_file(self, contents, trans_id, mode_id=None):
 
1298
    def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1174
1299
        """Schedule creation of a new file.
1175
1300
 
1176
 
        See also new_file.
1177
 
 
1178
 
        Contents is an iterator of strings, all of which will be written
1179
 
        to the target destination.
1180
 
 
1181
 
        New file takes the permissions of any existing file with that id,
1182
 
        unless mode_id is specified.
 
1301
        :seealso: new_file.
 
1302
 
 
1303
        :param contents: an iterator of strings, all of which will be written
 
1304
            to the target destination.
 
1305
        :param trans_id: TreeTransform handle
 
1306
        :param mode_id: If not None, force the mode of the target file to match
 
1307
            the mode of the object referenced by mode_id.
 
1308
            Otherwise, we will try to preserve mode bits of an existing file.
 
1309
        :param sha1: If the sha1 of this content is already known, pass it in.
 
1310
            We can use it to prevent future sha1 computations.
1183
1311
        """
1184
1312
        name = self._limbo_name(trans_id)
1185
1313
        f = open(name, 'wb')
1186
1314
        try:
1187
 
            try:
1188
 
                unique_add(self._new_contents, trans_id, 'file')
1189
 
            except:
1190
 
                # Clean up the file, it never got registered so
1191
 
                # TreeTransform.finalize() won't clean it up.
1192
 
                f.close()
1193
 
                os.unlink(name)
1194
 
                raise
1195
 
 
 
1315
            unique_add(self._new_contents, trans_id, 'file')
1196
1316
            f.writelines(contents)
1197
1317
        finally:
1198
1318
            f.close()
1199
1319
        self._set_mtime(name)
1200
1320
        self._set_mode(trans_id, mode_id, S_ISREG)
 
1321
        # It is unfortunate we have to use lstat instead of fstat, but we just
 
1322
        # used utime and chmod on the file, so we need the accurate final
 
1323
        # details.
 
1324
        if sha1 is not None:
 
1325
            self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1201
1326
 
1202
1327
    def _read_file_chunks(self, trans_id):
1203
1328
        cur_file = open(self._limbo_name(trans_id), 'rb')
1262
1387
    def cancel_creation(self, trans_id):
1263
1388
        """Cancel the creation of new file contents."""
1264
1389
        del self._new_contents[trans_id]
 
1390
        if trans_id in self._observed_sha1s:
 
1391
            del self._observed_sha1s[trans_id]
1265
1392
        children = self._limbo_children.get(trans_id)
1266
1393
        # if this is a limbo directory with children, move them before removing
1267
1394
        # the directory
1271
1398
            del self._limbo_children_names[trans_id]
1272
1399
        delete_any(self._limbo_name(trans_id))
1273
1400
 
 
1401
    def new_orphan(self, trans_id, parent_id):
 
1402
        # FIXME: There is no tree config, so we use the branch one (it's weird
 
1403
        # to define it this way as orphaning can only occur in a working tree,
 
1404
        # but that's all we have (for now). It will find the option in
 
1405
        # locations.conf or bazaar.conf though) -- vila 20100916
 
1406
        conf = self._tree.branch.get_config()
 
1407
        conf_var_name = 'bzr.transform.orphan_policy'
 
1408
        orphan_policy = conf.get_user_option(conf_var_name)
 
1409
        default_policy = orphaning_registry.default_key
 
1410
        if orphan_policy is None:
 
1411
            orphan_policy = default_policy
 
1412
        if orphan_policy not in orphaning_registry:
 
1413
            trace.warning('%s (from %s) is not a known policy, defaulting '
 
1414
                'to %s' % (orphan_policy, conf_var_name, default_policy))
 
1415
            orphan_policy = default_policy
 
1416
        handle_orphan = orphaning_registry.get(orphan_policy)
 
1417
        handle_orphan(self, trans_id, parent_id)
 
1418
 
 
1419
 
 
1420
class OrphaningError(errors.BzrError):
 
1421
 
 
1422
    # Only bugs could lead to such exception being seen by the user
 
1423
    internal_error = True
 
1424
    _fmt = "Error while orphaning %s in %s directory"
 
1425
 
 
1426
    def __init__(self, orphan, parent):
 
1427
        errors.BzrError.__init__(self)
 
1428
        self.orphan = orphan
 
1429
        self.parent = parent
 
1430
 
 
1431
 
 
1432
class OrphaningForbidden(OrphaningError):
 
1433
 
 
1434
    _fmt = "Policy: %s doesn't allow creating orphans."
 
1435
 
 
1436
    def __init__(self, policy):
 
1437
        errors.BzrError.__init__(self)
 
1438
        self.policy = policy
 
1439
 
 
1440
 
 
1441
def move_orphan(tt, orphan_id, parent_id):
 
1442
    """See TreeTransformBase.new_orphan.
 
1443
 
 
1444
    This creates a new orphan in the `bzr-orphans` dir at the root of the
 
1445
    `TreeTransform`.
 
1446
 
 
1447
    :param tt: The TreeTransform orphaning `trans_id`.
 
1448
 
 
1449
    :param orphan_id: The trans id that should be orphaned.
 
1450
 
 
1451
    :param parent_id: The orphan parent trans id.
 
1452
    """
 
1453
    # Add the orphan dir if it doesn't exist
 
1454
    orphan_dir_basename = 'bzr-orphans'
 
1455
    od_id = tt.trans_id_tree_path(orphan_dir_basename)
 
1456
    if tt.final_kind(od_id) is None:
 
1457
        tt.create_directory(od_id)
 
1458
    parent_path = tt._tree_id_paths[parent_id]
 
1459
    # Find a name that doesn't exist yet in the orphan dir
 
1460
    actual_name = tt.final_name(orphan_id)
 
1461
    new_name = tt._available_backup_name(actual_name, od_id)
 
1462
    tt.adjust_path(new_name, od_id, orphan_id)
 
1463
    trace.warning('%s has been orphaned in %s'
 
1464
                  % (joinpath(parent_path, actual_name), orphan_dir_basename))
 
1465
 
 
1466
 
 
1467
def refuse_orphan(tt, orphan_id, parent_id):
 
1468
    """See TreeTransformBase.new_orphan.
 
1469
 
 
1470
    This refuses to create orphan, letting the caller handle the conflict.
 
1471
    """
 
1472
    raise OrphaningForbidden('never')
 
1473
 
 
1474
 
 
1475
orphaning_registry = registry.Registry()
 
1476
orphaning_registry.register(
 
1477
    'conflict', refuse_orphan,
 
1478
    'Leave orphans in place and create a conflict on the directory.')
 
1479
orphaning_registry.register(
 
1480
    'move', move_orphan,
 
1481
    'Move orphans into the bzr-orphans directory.')
 
1482
orphaning_registry._set_default_key('conflict')
 
1483
 
1274
1484
 
1275
1485
class TreeTransform(DiskTreeTransform):
1276
1486
    """Represent a tree transformation.
1514
1724
        """
1515
1725
        if not no_conflicts:
1516
1726
            self._check_malformed()
1517
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1727
        child_pb = ui.ui_factory.nested_progress_bar()
1518
1728
        try:
1519
1729
            if precomputed_delta is None:
1520
 
                child_pb.update('Apply phase', 0, 2)
 
1730
                child_pb.update(gettext('Apply phase'), 0, 2)
1521
1731
                inventory_delta = self._generate_inventory_delta()
1522
1732
                offset = 1
1523
1733
            else:
1528
1738
            else:
1529
1739
                mover = _mover
1530
1740
            try:
1531
 
                child_pb.update('Apply phase', 0 + offset, 2 + offset)
 
1741
                child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1532
1742
                self._apply_removals(mover)
1533
 
                child_pb.update('Apply phase', 1 + offset, 2 + offset)
 
1743
                child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1534
1744
                modified_paths = self._apply_insertions(mover)
1535
1745
            except:
1536
1746
                mover.rollback()
1539
1749
                mover.apply_deletions()
1540
1750
        finally:
1541
1751
            child_pb.finished()
 
1752
        if self.final_file_id(self.root) is None:
 
1753
            inventory_delta = [e for e in inventory_delta if e[0] != '']
1542
1754
        self._tree.apply_inventory_delta(inventory_delta)
 
1755
        self._apply_observed_sha1s()
1543
1756
        self._done = True
1544
1757
        self.finalize()
1545
1758
        return _TransformResults(modified_paths, self.rename_count)
1547
1760
    def _generate_inventory_delta(self):
1548
1761
        """Generate an inventory delta for the current transform."""
1549
1762
        inventory_delta = []
1550
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1763
        child_pb = ui.ui_factory.nested_progress_bar()
1551
1764
        new_paths = self._inventory_altered()
1552
1765
        total_entries = len(new_paths) + len(self._removed_id)
1553
1766
        try:
1554
1767
            for num, trans_id in enumerate(self._removed_id):
1555
1768
                if (num % 10) == 0:
1556
 
                    child_pb.update('removing file', num, total_entries)
 
1769
                    child_pb.update(gettext('removing file'), num, total_entries)
1557
1770
                if trans_id == self._new_root:
1558
1771
                    file_id = self._tree.get_root_id()
1559
1772
                else:
1571
1784
            final_kinds = {}
1572
1785
            for num, (path, trans_id) in enumerate(new_paths):
1573
1786
                if (num % 10) == 0:
1574
 
                    child_pb.update('adding file',
 
1787
                    child_pb.update(gettext('adding file'),
1575
1788
                                    num + len(self._removed_id), total_entries)
1576
1789
                file_id = new_path_file_ids[trans_id]
1577
1790
                if file_id is None:
1615
1828
        """
1616
1829
        tree_paths = list(self._tree_path_ids.iteritems())
1617
1830
        tree_paths.sort(reverse=True)
1618
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1831
        child_pb = ui.ui_factory.nested_progress_bar()
1619
1832
        try:
1620
 
            for num, data in enumerate(tree_paths):
1621
 
                path, trans_id = data
1622
 
                child_pb.update('removing file', num, len(tree_paths))
 
1833
            for num, (path, trans_id) in enumerate(tree_paths):
 
1834
                # do not attempt to move root into a subdirectory of itself.
 
1835
                if path == '':
 
1836
                    continue
 
1837
                child_pb.update(gettext('removing file'), num, len(tree_paths))
1623
1838
                full_path = self._tree.abspath(path)
1624
1839
                if trans_id in self._removed_contents:
1625
1840
                    delete_path = os.path.join(self._deletiondir, trans_id)
1650
1865
        modified_paths = []
1651
1866
        new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1652
1867
                                 new_paths)
1653
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1868
        child_pb = ui.ui_factory.nested_progress_bar()
1654
1869
        try:
1655
1870
            for num, (path, trans_id) in enumerate(new_paths):
1656
1871
                if (num % 10) == 0:
1657
 
                    child_pb.update('adding file', num, len(new_paths))
 
1872
                    child_pb.update(gettext('adding file'), num, len(new_paths))
1658
1873
                full_path = self._tree.abspath(path)
1659
1874
                if trans_id in self._needs_rename:
1660
1875
                    try:
1665
1880
                            raise
1666
1881
                    else:
1667
1882
                        self.rename_count += 1
 
1883
                    # TODO: if trans_id in self._observed_sha1s, we should
 
1884
                    #       re-stat the final target, since ctime will be
 
1885
                    #       updated by the change.
1668
1886
                if (trans_id in self._new_contents or
1669
1887
                    self.path_changed(trans_id)):
1670
1888
                    if trans_id in self._new_contents:
1671
1889
                        modified_paths.append(full_path)
1672
1890
                if trans_id in self._new_executability:
1673
1891
                    self._set_executability(path, trans_id)
 
1892
                if trans_id in self._observed_sha1s:
 
1893
                    o_sha1, o_st_val = self._observed_sha1s[trans_id]
 
1894
                    st = osutils.lstat(full_path)
 
1895
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1674
1896
        finally:
1675
1897
            child_pb.finished()
 
1898
        for path, trans_id in new_paths:
 
1899
            # new_paths includes stuff like workingtree conflicts. Only the
 
1900
            # stuff in new_contents actually comes from limbo.
 
1901
            if trans_id in self._limbo_files:
 
1902
                del self._limbo_files[trans_id]
1676
1903
        self._new_contents.clear()
1677
1904
        return modified_paths
1678
1905
 
 
1906
    def _apply_observed_sha1s(self):
 
1907
        """After we have finished renaming everything, update observed sha1s
 
1908
 
 
1909
        This has to be done after self._tree.apply_inventory_delta, otherwise
 
1910
        it doesn't know anything about the files we are updating. Also, we want
 
1911
        to do this as late as possible, so that most entries end up cached.
 
1912
        """
 
1913
        # TODO: this doesn't update the stat information for directories. So
 
1914
        #       the first 'bzr status' will still need to rewrite
 
1915
        #       .bzr/checkout/dirstate. However, we at least don't need to
 
1916
        #       re-read all of the files.
 
1917
        # TODO: If the operation took a while, we could do a time.sleep(3) here
 
1918
        #       to allow the clock to tick over and ensure we won't have any
 
1919
        #       problems. (we could observe start time, and finish time, and if
 
1920
        #       it is less than eg 10% overhead, add a sleep call.)
 
1921
        paths = FinalPaths(self)
 
1922
        for trans_id, observed in self._observed_sha1s.iteritems():
 
1923
            path = paths.get_path(trans_id)
 
1924
            # We could get the file_id, but dirstate prefers to use the path
 
1925
            # anyway, and it is 'cheaper' to determine.
 
1926
            # file_id = self._new_id[trans_id]
 
1927
            self._tree._observed_sha1(None, path, observed)
 
1928
 
1679
1929
 
1680
1930
class TransformPreview(DiskTreeTransform):
1681
1931
    """A TreeTransform for generating preview trees.
1697
1947
        path = self._tree_id_paths.get(trans_id)
1698
1948
        if path is None:
1699
1949
            return None
1700
 
        file_id = self._tree.path2id(path)
1701
 
        try:
1702
 
            return self._tree.kind(file_id)
1703
 
        except errors.NoSuchFile:
1704
 
            return None
 
1950
        kind = self._tree.path_content_summary(path)[0]
 
1951
        if kind == 'missing':
 
1952
            kind = None
 
1953
        return kind
1705
1954
 
1706
1955
    def _set_mode(self, trans_id, mode_id, typefunc):
1707
1956
        """Set the mode of new file contents.
1727
1976
            childpath = joinpath(path, child)
1728
1977
            yield self.trans_id_tree_path(childpath)
1729
1978
 
1730
 
 
1731
 
class _PreviewTree(tree.Tree):
 
1979
    def new_orphan(self, trans_id, parent_id):
 
1980
        raise NotImplementedError(self.new_orphan)
 
1981
 
 
1982
 
 
1983
class _PreviewTree(tree.InventoryTree):
1732
1984
    """Partial implementation of Tree to support show_diff_trees"""
1733
1985
 
1734
1986
    def __init__(self, transform):
1763
2015
                yield self._get_repository().revision_tree(revision_id)
1764
2016
 
1765
2017
    def _get_file_revision(self, file_id, vf, tree_revision):
1766
 
        parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
 
2018
        parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
1767
2019
                       self._iter_parent_trees()]
1768
2020
        vf.add_lines((file_id, tree_revision), parent_keys,
1769
2021
                     self.get_file_lines(file_id))
1773
2025
            vf.fallback_versionedfiles.append(base_vf)
1774
2026
        return tree_revision
1775
2027
 
1776
 
    def _stat_limbo_file(self, file_id):
1777
 
        trans_id = self._transform.trans_id_file_id(file_id)
 
2028
    def _stat_limbo_file(self, file_id=None, trans_id=None):
 
2029
        if trans_id is None:
 
2030
            trans_id = self._transform.trans_id_file_id(file_id)
1778
2031
        name = self._transform._limbo_name(trans_id)
1779
2032
        return os.lstat(name)
1780
2033
 
1995
2248
 
1996
2249
    def get_file_size(self, file_id):
1997
2250
        """See Tree.get_file_size"""
 
2251
        trans_id = self._transform.trans_id_file_id(file_id)
 
2252
        kind = self._transform.final_kind(trans_id)
 
2253
        if kind != 'file':
 
2254
            return None
 
2255
        if trans_id in self._transform._new_contents:
 
2256
            return self._stat_limbo_file(trans_id=trans_id).st_size
1998
2257
        if self.kind(file_id) == 'file':
1999
2258
            return self._transform._tree.get_file_size(file_id)
2000
2259
        else:
2001
2260
            return None
2002
2261
 
 
2262
    def get_file_verifier(self, file_id, path=None, stat_value=None):
 
2263
        trans_id = self._transform.trans_id_file_id(file_id)
 
2264
        kind = self._transform._new_contents.get(trans_id)
 
2265
        if kind is None:
 
2266
            return self._transform._tree.get_file_verifier(file_id)
 
2267
        if kind == 'file':
 
2268
            fileobj = self.get_file(file_id)
 
2269
            try:
 
2270
                return ("SHA1", sha_file(fileobj))
 
2271
            finally:
 
2272
                fileobj.close()
 
2273
 
2003
2274
    def get_file_sha1(self, file_id, path=None, stat_value=None):
2004
2275
        trans_id = self._transform.trans_id_file_id(file_id)
2005
2276
        kind = self._transform._new_contents.get(trans_id)
2028
2299
            except errors.NoSuchId:
2029
2300
                return False
2030
2301
 
 
2302
    def has_filename(self, path):
 
2303
        trans_id = self._path2trans_id(path)
 
2304
        if trans_id in self._transform._new_contents:
 
2305
            return True
 
2306
        elif trans_id in self._transform._removed_contents:
 
2307
            return False
 
2308
        else:
 
2309
            return self._transform._tree.has_filename(path)
 
2310
 
2031
2311
    def path_content_summary(self, path):
2032
2312
        trans_id = self._path2trans_id(path)
2033
2313
        tt = self._transform
2121
2401
                                   self.get_file(file_id).readlines(),
2122
2402
                                   default_revision)
2123
2403
 
2124
 
    def get_symlink_target(self, file_id):
 
2404
    def get_symlink_target(self, file_id, path=None):
2125
2405
        """See Tree.get_symlink_target"""
2126
2406
        if not self._content_change(file_id):
2127
2407
            return self._transform._tree.get_symlink_target(file_id)
2265
2545
        if num > 0:  # more than just a root
2266
2546
            raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2267
2547
    file_trans_id = {}
2268
 
    top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2548
    top_pb = ui.ui_factory.nested_progress_bar()
2269
2549
    pp = ProgressPhase("Build phase", 2, top_pb)
2270
2550
    if tree.inventory.root is not None:
2271
2551
        # This is kind of a hack: we should be altering the root
2284
2564
        pp.next_phase()
2285
2565
        file_trans_id[wt.get_root_id()] = \
2286
2566
            tt.trans_id_tree_file_id(wt.get_root_id())
2287
 
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2567
        pb = ui.ui_factory.nested_progress_bar()
2288
2568
        try:
2289
2569
            deferred_contents = []
2290
2570
            num = 0
2304
2584
                    existing_files.update(f[0] for f in files)
2305
2585
            for num, (tree_path, entry) in \
2306
2586
                enumerate(tree.inventory.iter_entries_by_dir()):
2307
 
                pb.update("Building tree", num - len(deferred_contents), total)
 
2587
                pb.update(gettext("Building tree"), num - len(deferred_contents), total)
2308
2588
                if entry.parent_id is None:
2309
2589
                    continue
2310
2590
                reparent = False
2337
2617
                    executable = tree.is_executable(file_id, tree_path)
2338
2618
                    if executable:
2339
2619
                        tt.set_executability(executable, trans_id)
2340
 
                    trans_data = (trans_id, tree_path)
 
2620
                    trans_data = (trans_id, tree_path, entry.text_sha1)
2341
2621
                    deferred_contents.append((file_id, trans_data))
2342
2622
                else:
2343
2623
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2359
2639
            precomputed_delta = None
2360
2640
        conflicts = cook_conflicts(raw_conflicts, tt)
2361
2641
        for conflict in conflicts:
2362
 
            warning(conflict)
 
2642
            trace.warning(unicode(conflict))
2363
2643
        try:
2364
2644
            wt.add_conflicts(conflicts)
2365
2645
        except errors.UnsupportedOperation:
2388
2668
        unchanged = dict(unchanged)
2389
2669
        new_desired_files = []
2390
2670
        count = 0
2391
 
        for file_id, (trans_id, tree_path) in desired_files:
 
2671
        for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2392
2672
            accelerator_path = unchanged.get(file_id)
2393
2673
            if accelerator_path is None:
2394
 
                new_desired_files.append((file_id, (trans_id, tree_path)))
 
2674
                new_desired_files.append((file_id,
 
2675
                    (trans_id, tree_path, text_sha1)))
2395
2676
                continue
2396
 
            pb.update('Adding file contents', count + offset, total)
 
2677
            pb.update(gettext('Adding file contents'), count + offset, total)
2397
2678
            if hardlink:
2398
2679
                tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2399
2680
                                   trans_id)
2404
2685
                    contents = filtered_output_bytes(contents, filters,
2405
2686
                        ContentFilterContext(tree_path, tree))
2406
2687
                try:
2407
 
                    tt.create_file(contents, trans_id)
 
2688
                    tt.create_file(contents, trans_id, sha1=text_sha1)
2408
2689
                finally:
2409
2690
                    try:
2410
2691
                        contents.close()
2413
2694
                        pass
2414
2695
            count += 1
2415
2696
        offset += count
2416
 
    for count, ((trans_id, tree_path), contents) in enumerate(
 
2697
    for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2417
2698
            tree.iter_files_bytes(new_desired_files)):
2418
2699
        if wt.supports_content_filtering():
2419
2700
            filters = wt._content_filter_stack(tree_path)
2420
2701
            contents = filtered_output_bytes(contents, filters,
2421
2702
                ContentFilterContext(tree_path, tree))
2422
 
        tt.create_file(contents, trans_id)
2423
 
        pb.update('Adding file contents', count + offset, total)
 
2703
        tt.create_file(contents, trans_id, sha1=text_sha1)
 
2704
        pb.update(gettext('Adding file contents'), count + offset, total)
2424
2705
 
2425
2706
 
2426
2707
def _reparent_children(tt, old_parent, new_parent):
2427
2708
    for child in tt.iter_tree_children(old_parent):
2428
2709
        tt.adjust_path(tt.final_name(child), new_parent, child)
2429
2710
 
 
2711
 
2430
2712
def _reparent_transform_children(tt, old_parent, new_parent):
2431
2713
    by_parent = tt.by_parent()
2432
2714
    for child in by_parent[old_parent]:
2433
2715
        tt.adjust_path(tt.final_name(child), new_parent, child)
2434
2716
    return by_parent[old_parent]
2435
2717
 
 
2718
 
2436
2719
def _content_match(tree, entry, file_id, kind, target_path):
2437
2720
    if entry.kind != kind:
2438
2721
        return False
2538
2821
        tt.set_executability(entry.executable, trans_id)
2539
2822
 
2540
2823
 
 
2824
@deprecated_function(deprecated_in((2, 3, 0)))
2541
2825
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2542
2826
    return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2543
2827
 
2544
2828
 
 
2829
@deprecated_function(deprecated_in((2, 3, 0)))
2545
2830
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2546
2831
    """Produce a backup-style name that appears to be available"""
2547
2832
    def name_gen():
2596
2881
                unversioned_filter=working_tree.is_ignored)
2597
2882
            delta.report_changes(tt.iter_changes(), change_reporter)
2598
2883
        for conflict in conflicts:
2599
 
            warning(conflict)
 
2884
            trace.warning(unicode(conflict))
2600
2885
        pp.next_phase()
2601
2886
        tt.apply()
2602
2887
        working_tree.set_merge_modified(merge_modified)
2610
2895
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2611
2896
                              backups, pp, basis_tree=None,
2612
2897
                              merge_modified=None):
2613
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2898
    child_pb = ui.ui_factory.nested_progress_bar()
2614
2899
    try:
2615
2900
        if merge_modified is None:
2616
2901
            merge_modified = working_tree.merge_modified()
2619
2904
                                      merge_modified, basis_tree)
2620
2905
    finally:
2621
2906
        child_pb.finished()
2622
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2907
    child_pb = ui.ui_factory.nested_progress_bar()
2623
2908
    try:
2624
2909
        raw_conflicts = resolve_conflicts(tt, child_pb,
2625
2910
            lambda t, c: conflict_pass(t, c, target_tree))
2633
2918
                 backups, merge_modified, basis_tree=None):
2634
2919
    if basis_tree is not None:
2635
2920
        basis_tree.lock_read()
2636
 
    change_list = target_tree.iter_changes(working_tree,
 
2921
    # We ask the working_tree for its changes relative to the target, rather
 
2922
    # than the target changes relative to the working tree. Because WT4 has an
 
2923
    # optimizer to compare itself to a target, but no optimizer for the
 
2924
    # reverse.
 
2925
    change_list = working_tree.iter_changes(target_tree,
2637
2926
        specific_files=specific_files, pb=pb)
2638
2927
    if target_tree.get_root_id() is None:
2639
2928
        skip_root = True
2643
2932
        deferred_files = []
2644
2933
        for id_num, (file_id, path, changed_content, versioned, parent, name,
2645
2934
                kind, executable) in enumerate(change_list):
2646
 
            if skip_root and file_id[0] is not None and parent[0] is None:
 
2935
            target_path, wt_path = path
 
2936
            target_versioned, wt_versioned = versioned
 
2937
            target_parent, wt_parent = parent
 
2938
            target_name, wt_name = name
 
2939
            target_kind, wt_kind = kind
 
2940
            target_executable, wt_executable = executable
 
2941
            if skip_root and wt_parent is None:
2647
2942
                continue
2648
2943
            trans_id = tt.trans_id_file_id(file_id)
2649
2944
            mode_id = None
2650
2945
            if changed_content:
2651
2946
                keep_content = False
2652
 
                if kind[0] == 'file' and (backups or kind[1] is None):
 
2947
                if wt_kind == 'file' and (backups or target_kind is None):
2653
2948
                    wt_sha1 = working_tree.get_file_sha1(file_id)
2654
2949
                    if merge_modified.get(file_id) != wt_sha1:
2655
2950
                        # acquire the basis tree lazily to prevent the
2658
2953
                        if basis_tree is None:
2659
2954
                            basis_tree = working_tree.basis_tree()
2660
2955
                            basis_tree.lock_read()
2661
 
                        if file_id in basis_tree:
 
2956
                        if basis_tree.has_id(file_id):
2662
2957
                            if wt_sha1 != basis_tree.get_file_sha1(file_id):
2663
2958
                                keep_content = True
2664
 
                        elif kind[1] is None and not versioned[1]:
 
2959
                        elif target_kind is None and not target_versioned:
2665
2960
                            keep_content = True
2666
 
                if kind[0] is not None:
 
2961
                if wt_kind is not None:
2667
2962
                    if not keep_content:
2668
2963
                        tt.delete_contents(trans_id)
2669
 
                    elif kind[1] is not None:
2670
 
                        parent_trans_id = tt.trans_id_file_id(parent[0])
2671
 
                        by_parent = tt.by_parent()
2672
 
                        backup_name = _get_backup_name(name[0], by_parent,
2673
 
                                                       parent_trans_id, tt)
 
2964
                    elif target_kind is not None:
 
2965
                        parent_trans_id = tt.trans_id_file_id(wt_parent)
 
2966
                        backup_name = tt._available_backup_name(
 
2967
                            wt_name, parent_trans_id)
2674
2968
                        tt.adjust_path(backup_name, parent_trans_id, trans_id)
2675
 
                        new_trans_id = tt.create_path(name[0], parent_trans_id)
2676
 
                        if versioned == (True, True):
 
2969
                        new_trans_id = tt.create_path(wt_name, parent_trans_id)
 
2970
                        if wt_versioned and target_versioned:
2677
2971
                            tt.unversion_file(trans_id)
2678
2972
                            tt.version_file(file_id, new_trans_id)
2679
2973
                        # New contents should have the same unix perms as old
2680
2974
                        # contents
2681
2975
                        mode_id = trans_id
2682
2976
                        trans_id = new_trans_id
2683
 
                if kind[1] in ('directory', 'tree-reference'):
 
2977
                if target_kind in ('directory', 'tree-reference'):
2684
2978
                    tt.create_directory(trans_id)
2685
 
                    if kind[1] == 'tree-reference':
 
2979
                    if target_kind == 'tree-reference':
2686
2980
                        revision = target_tree.get_reference_revision(file_id,
2687
 
                                                                      path[1])
 
2981
                                                                      target_path)
2688
2982
                        tt.set_tree_reference(revision, trans_id)
2689
 
                elif kind[1] == 'symlink':
 
2983
                elif target_kind == 'symlink':
2690
2984
                    tt.create_symlink(target_tree.get_symlink_target(file_id),
2691
2985
                                      trans_id)
2692
 
                elif kind[1] == 'file':
 
2986
                elif target_kind == 'file':
2693
2987
                    deferred_files.append((file_id, (trans_id, mode_id)))
2694
2988
                    if basis_tree is None:
2695
2989
                        basis_tree = working_tree.basis_tree()
2696
2990
                        basis_tree.lock_read()
2697
2991
                    new_sha1 = target_tree.get_file_sha1(file_id)
2698
 
                    if (file_id in basis_tree and new_sha1 ==
2699
 
                        basis_tree.get_file_sha1(file_id)):
 
2992
                    if (basis_tree.has_id(file_id) and
 
2993
                        new_sha1 == basis_tree.get_file_sha1(file_id)):
2700
2994
                        if file_id in merge_modified:
2701
2995
                            del merge_modified[file_id]
2702
2996
                    else:
2703
2997
                        merge_modified[file_id] = new_sha1
2704
2998
 
2705
2999
                    # preserve the execute bit when backing up
2706
 
                    if keep_content and executable[0] == executable[1]:
2707
 
                        tt.set_executability(executable[1], trans_id)
2708
 
                elif kind[1] is not None:
2709
 
                    raise AssertionError(kind[1])
2710
 
            if versioned == (False, True):
 
3000
                    if keep_content and wt_executable == target_executable:
 
3001
                        tt.set_executability(target_executable, trans_id)
 
3002
                elif target_kind is not None:
 
3003
                    raise AssertionError(target_kind)
 
3004
            if not wt_versioned and target_versioned:
2711
3005
                tt.version_file(file_id, trans_id)
2712
 
            if versioned == (True, False):
 
3006
            if wt_versioned and not target_versioned:
2713
3007
                tt.unversion_file(trans_id)
2714
 
            if (name[1] is not None and
2715
 
                (name[0] != name[1] or parent[0] != parent[1])):
2716
 
                if name[1] == '' and parent[1] is None:
 
3008
            if (target_name is not None and
 
3009
                (wt_name != target_name or wt_parent != target_parent)):
 
3010
                if target_name == '' and target_parent is None:
2717
3011
                    parent_trans = ROOT_PARENT
2718
3012
                else:
2719
 
                    parent_trans = tt.trans_id_file_id(parent[1])
2720
 
                if parent[0] is None and versioned[0]:
2721
 
                    tt.adjust_root_path(name[1], parent_trans)
 
3013
                    parent_trans = tt.trans_id_file_id(target_parent)
 
3014
                if wt_parent is None and wt_versioned:
 
3015
                    tt.adjust_root_path(target_name, parent_trans)
2722
3016
                else:
2723
 
                    tt.adjust_path(name[1], parent_trans, trans_id)
2724
 
            if executable[0] != executable[1] and kind[1] == "file":
2725
 
                tt.set_executability(executable[1], trans_id)
 
3017
                    tt.adjust_path(target_name, parent_trans, trans_id)
 
3018
            if wt_executable != target_executable and target_kind == "file":
 
3019
                tt.set_executability(target_executable, trans_id)
2726
3020
        if working_tree.supports_content_filtering():
2727
3021
            for index, ((trans_id, mode_id), bytes) in enumerate(
2728
3022
                target_tree.iter_files_bytes(deferred_files)):
2754
3048
    pb = ui.ui_factory.nested_progress_bar()
2755
3049
    try:
2756
3050
        for n in range(10):
2757
 
            pb.update('Resolution pass', n+1, 10)
 
3051
            pb.update(gettext('Resolution pass'), n+1, 10)
2758
3052
            conflicts = tt.find_conflicts()
2759
3053
            if len(conflicts) == 0:
2760
3054
                return new_conflicts
2799
3093
 
2800
3094
        elif c_type == 'missing parent':
2801
3095
            trans_id = conflict[1]
2802
 
            try:
2803
 
                tt.cancel_deletion(trans_id)
2804
 
                new_conflicts.add(('deleting parent', 'Not deleting',
2805
 
                                   trans_id))
2806
 
            except KeyError:
 
3096
            if trans_id in tt._removed_contents:
 
3097
                cancel_deletion = True
 
3098
                orphans = tt._get_potential_orphans(trans_id)
 
3099
                if orphans:
 
3100
                    cancel_deletion = False
 
3101
                    # All children are orphans
 
3102
                    for o in orphans:
 
3103
                        try:
 
3104
                            tt.new_orphan(o, trans_id)
 
3105
                        except OrphaningError:
 
3106
                            # Something bad happened so we cancel the directory
 
3107
                            # deletion which will leave it in place with a
 
3108
                            # conflict. The user can deal with it from there.
 
3109
                            # Note that this also catch the case where we don't
 
3110
                            # want to create orphans and leave the directory in
 
3111
                            # place.
 
3112
                            cancel_deletion = True
 
3113
                            break
 
3114
                if cancel_deletion:
 
3115
                    # Cancel the directory deletion
 
3116
                    tt.cancel_deletion(trans_id)
 
3117
                    new_conflicts.add(('deleting parent', 'Not deleting',
 
3118
                                       trans_id))
 
3119
            else:
2807
3120
                create = True
2808
3121
                try:
2809
3122
                    tt.final_name(trans_id)
2812
3125
                        file_id = tt.final_file_id(trans_id)
2813
3126
                        if file_id is None:
2814
3127
                            file_id = tt.inactive_file_id(trans_id)
2815
 
                        entry = path_tree.inventory[file_id]
 
3128
                        _, entry = path_tree.iter_entries_by_dir(
 
3129
                            [file_id]).next()
2816
3130
                        # special-case the other tree root (move its
2817
3131
                        # children to current root)
2818
3132
                        if entry.parent_id is None:
2833
3147
        elif c_type == 'unversioned parent':
2834
3148
            file_id = tt.inactive_file_id(conflict[1])
2835
3149
            # special-case the other tree root (move its children instead)
2836
 
            if path_tree and file_id in path_tree:
2837
 
                if path_tree.inventory[file_id].parent_id is None:
 
3150
            if path_tree and path_tree.has_id(file_id):
 
3151
                if path_tree.path2id('') == file_id:
 
3152
                    # This is the root entry, skip it
2838
3153
                    continue
2839
3154
            tt.version_file(file_id, conflict[1])
2840
3155
            new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
2856
3171
 
2857
3172
def cook_conflicts(raw_conflicts, tt):
2858
3173
    """Generate a list of cooked conflicts, sorted by file path"""
2859
 
    from bzrlib.conflicts import Conflict
2860
3174
    conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
2861
 
    return sorted(conflict_iter, key=Conflict.sort_key)
 
3175
    return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
2862
3176
 
2863
3177
 
2864
3178
def iter_cook_conflicts(raw_conflicts, tt):
2865
 
    from bzrlib.conflicts import Conflict
2866
3179
    fp = FinalPaths(tt)
2867
3180
    for conflict in raw_conflicts:
2868
3181
        c_type = conflict[0]
2870
3183
        modified_path = fp.get_path(conflict[2])
2871
3184
        modified_id = tt.final_file_id(conflict[2])
2872
3185
        if len(conflict) == 3:
2873
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
2874
 
                                     file_id=modified_id)
 
3186
            yield conflicts.Conflict.factory(
 
3187
                c_type, action=action, path=modified_path, file_id=modified_id)
2875
3188
 
2876
3189
        else:
2877
3190
            conflicting_path = fp.get_path(conflict[3])
2878
3191
            conflicting_id = tt.final_file_id(conflict[3])
2879
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
2880
 
                                   file_id=modified_id,
2881
 
                                   conflict_path=conflicting_path,
2882
 
                                   conflict_file_id=conflicting_id)
 
3192
            yield conflicts.Conflict.factory(
 
3193
                c_type, action=action, path=modified_path,
 
3194
                file_id=modified_id,
 
3195
                conflict_path=conflicting_path,
 
3196
                conflict_file_id=conflicting_id)
2883
3197
 
2884
3198
 
2885
3199
class _FileMover(object):
2917
3231
            try:
2918
3232
                os.rename(to, from_)
2919
3233
            except OSError, e:
2920
 
                raise errors.TransformRenameFailed(to, from_, str(e), e.errno)                
 
3234
                raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
2921
3235
        # after rollback, don't reuse _FileMover
2922
3236
        past_renames = None
2923
3237
        pending_deletions = None