~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

  • Committer: Martin Packman
  • Date: 2011-10-06 16:41:45 UTC
  • mfrom: (6015.33.10 2.4)
  • mto: This revision was merged to the branch mainline in revision 6202.
  • Revision ID: martin.packman@canonical.com-20111006164145-o98oqn32440extgt
Merge 2.4 into dev

Show diffs side-by-side

added added

removed removed

Lines of Context:
19
19
from stat import S_ISREG, S_IEXEC
20
20
import time
21
21
 
22
 
import bzrlib
23
22
from bzrlib import (
24
23
    errors,
25
24
    lazy_import,
26
25
    registry,
 
26
    trace,
27
27
    tree,
28
28
    )
29
29
lazy_import.lazy_import(globals(), """
32
32
    bencode,
33
33
    bzrdir,
34
34
    commit,
 
35
    conflicts,
35
36
    delta,
36
37
    errors,
37
38
    inventory,
38
39
    multiparent,
39
40
    osutils,
40
41
    revision as _mod_revision,
41
 
    trace,
42
42
    ui,
43
43
    urlutils,
44
44
    )
 
45
from bzrlib.i18n import gettext
45
46
""")
46
47
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
47
48
                           ReusingTransform, CantMoveRoot,
48
49
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
49
50
                           UnableCreateSymlink)
50
51
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
51
 
from bzrlib.inventory import InventoryEntry
52
52
from bzrlib.osutils import (
53
53
    delete_any,
54
54
    file_kind,
64
64
    deprecated_in,
65
65
    deprecated_method,
66
66
    )
67
 
from bzrlib.trace import warning
68
67
 
69
68
 
70
69
ROOT_PARENT = "root-parent"
105
104
        self._new_parent = {}
106
105
        # mapping of trans_id with new contents -> new file_kind
107
106
        self._new_contents = {}
 
107
        # mapping of trans_id => (sha1 of content, stat_value)
 
108
        self._observed_sha1s = {}
108
109
        # Set of trans_ids whose contents will be removed
109
110
        self._removed_contents = set()
110
111
        # Mapping of trans_id -> new execute-bit value
138
139
        # A counter of how many files have been renamed
139
140
        self.rename_count = 0
140
141
 
 
142
    def __enter__(self):
 
143
        """Support Context Manager API."""
 
144
        return self
 
145
 
 
146
    def __exit__(self, exc_type, exc_val, exc_tb):
 
147
        """Support Context Manager API."""
 
148
        self.finalize()
 
149
 
141
150
    def finalize(self):
142
151
        """Release the working tree lock, if held.
143
152
 
218
227
        This means that the old root trans-id becomes obsolete, so it is
219
228
        recommended only to invoke this after the root trans-id has become
220
229
        irrelevant.
 
230
 
221
231
        """
222
232
        new_roots = [k for k, v in self._new_parent.iteritems() if v is
223
233
                     ROOT_PARENT]
229
239
            self._new_root = new_roots[0]
230
240
            return
231
241
        old_new_root = new_roots[0]
232
 
        # TODO: What to do if a old_new_root is present, but self._new_root is
233
 
        #       not listed as being removed? This code explicitly unversions
234
 
        #       the old root and versions it with the new file_id. Though that
235
 
        #       seems like an incomplete delta
236
 
 
237
242
        # unversion the new root's directory.
238
 
        file_id = self.final_file_id(old_new_root)
 
243
        if self.final_kind(self._new_root) is None:
 
244
            file_id = self.final_file_id(old_new_root)
 
245
        else:
 
246
            file_id = self.final_file_id(self._new_root)
239
247
        if old_new_root in self._new_id:
240
248
            self.cancel_versioning(old_new_root)
241
249
        else:
245
253
        if (self.tree_file_id(self._new_root) is not None and
246
254
            self._new_root not in self._removed_id):
247
255
            self.unversion_file(self._new_root)
248
 
        self.version_file(file_id, self._new_root)
 
256
        if file_id is not None:
 
257
            self.version_file(file_id, self._new_root)
249
258
 
250
259
        # Now move children of new root into old root directory.
251
260
        # Ensure all children are registered with the transaction, but don't
385
394
        return sorted(FinalPaths(self).get_paths(new_ids))
386
395
 
387
396
    def _inventory_altered(self):
388
 
        """Get the trans_ids and paths of files needing new inv entries."""
389
 
        new_ids = set()
390
 
        for id_set in [self._new_name, self._new_parent, self._new_id,
 
397
        """Determine which trans_ids need new Inventory entries.
 
398
 
 
399
        An new entry is needed when anything that would be reflected by an
 
400
        inventory entry changes, including file name, file_id, parent file_id,
 
401
        file kind, and the execute bit.
 
402
 
 
403
        Some care is taken to return entries with real changes, not cases
 
404
        where the value is deleted and then restored to its original value,
 
405
        but some actually unchanged values may be returned.
 
406
 
 
407
        :returns: A list of (path, trans_id) for all items requiring an
 
408
            inventory change. Ordered by path.
 
409
        """
 
410
        changed_ids = set()
 
411
        # Find entries whose file_ids are new (or changed).
 
412
        new_file_id = set(t for t in self._new_id
 
413
                          if self._new_id[t] != self.tree_file_id(t))
 
414
        for id_set in [self._new_name, self._new_parent, new_file_id,
391
415
                       self._new_executability]:
392
 
            new_ids.update(id_set)
 
416
            changed_ids.update(id_set)
 
417
        # removing implies a kind change
393
418
        changed_kind = set(self._removed_contents)
 
419
        # so does adding
394
420
        changed_kind.intersection_update(self._new_contents)
395
 
        changed_kind.difference_update(new_ids)
 
421
        # Ignore entries that are already known to have changed.
 
422
        changed_kind.difference_update(changed_ids)
 
423
        #  to keep only the truly changed ones
396
424
        changed_kind = (t for t in changed_kind
397
425
                        if self.tree_kind(t) != self.final_kind(t))
398
 
        new_ids.update(changed_kind)
399
 
        return sorted(FinalPaths(self).get_paths(new_ids))
 
426
        # all kind changes will alter the inventory
 
427
        changed_ids.update(changed_kind)
 
428
        # To find entries with changed parent_ids, find parents which existed,
 
429
        # but changed file_id.
 
430
        changed_file_id = set(t for t in new_file_id if t in self._removed_id)
 
431
        # Now add all their children to the set.
 
432
        for parent_trans_id in new_file_id:
 
433
            changed_ids.update(self.iter_tree_children(parent_trans_id))
 
434
        return sorted(FinalPaths(self).get_paths(changed_ids))
400
435
 
401
436
    def final_kind(self, trans_id):
402
437
        """Determine the final file kind, after any changes applied.
629
664
            if kind is None:
630
665
                conflicts.append(('versioning no contents', trans_id))
631
666
                continue
632
 
            if not InventoryEntry.versionable_kind(kind):
 
667
            if not inventory.InventoryEntry.versionable_kind(kind):
633
668
                conflicts.append(('versioning bad kind', trans_id, kind))
634
669
        return conflicts
635
670
 
754
789
        return trans_id
755
790
 
756
791
    def new_file(self, name, parent_id, contents, file_id=None,
757
 
                 executable=None):
 
792
                 executable=None, sha1=None):
758
793
        """Convenience method to create files.
759
794
 
760
795
        name is the name of the file to create.
767
802
        trans_id = self._new_entry(name, parent_id, file_id)
768
803
        # TODO: rather than scheduling a set_executable call,
769
804
        # have create_file create the file with the right mode.
770
 
        self.create_file(contents, trans_id)
 
805
        self.create_file(contents, trans_id, sha1=sha1)
771
806
        if executable is not None:
772
807
            self.set_executability(executable, trans_id)
773
808
        return trans_id
1155
1190
        self._deletiondir = None
1156
1191
        # A mapping of transform ids to their limbo filename
1157
1192
        self._limbo_files = {}
 
1193
        self._possibly_stale_limbo_files = set()
1158
1194
        # A mapping of transform ids to a set of the transform ids of children
1159
1195
        # that their limbo directory has
1160
1196
        self._limbo_children = {}
1173
1209
        if self._tree is None:
1174
1210
            return
1175
1211
        try:
1176
 
            entries = [(self._limbo_name(t), t, k) for t, k in
1177
 
                       self._new_contents.iteritems()]
1178
 
            entries.sort(reverse=True)
1179
 
            for path, trans_id, kind in entries:
1180
 
                delete_any(path)
 
1212
            limbo_paths = self._limbo_files.values() + list(
 
1213
                self._possibly_stale_limbo_files)
 
1214
            limbo_paths = sorted(limbo_paths, reverse=True)
 
1215
            for path in limbo_paths:
 
1216
                try:
 
1217
                    delete_any(path)
 
1218
                except OSError, e:
 
1219
                    if e.errno != errno.ENOENT:
 
1220
                        raise
 
1221
                    # XXX: warn? perhaps we just got interrupted at an
 
1222
                    # inconvenient moment, but perhaps files are disappearing
 
1223
                    # from under us?
1181
1224
            try:
1182
1225
                delete_any(self._limbodir)
1183
1226
            except OSError:
1232
1275
        entries from _limbo_files, because they are now stale.
1233
1276
        """
1234
1277
        for trans_id in trans_ids:
1235
 
            old_path = self._limbo_files.pop(trans_id)
 
1278
            old_path = self._limbo_files[trans_id]
 
1279
            self._possibly_stale_limbo_files.add(old_path)
 
1280
            del self._limbo_files[trans_id]
1236
1281
            if trans_id not in self._new_contents:
1237
1282
                continue
1238
1283
            new_path = self._limbo_name(trans_id)
1239
1284
            os.rename(old_path, new_path)
 
1285
            self._possibly_stale_limbo_files.remove(old_path)
1240
1286
            for descendant in self._limbo_descendants(trans_id):
1241
1287
                desc_path = self._limbo_files[descendant]
1242
1288
                desc_path = new_path + desc_path[len(old_path):]
1249
1295
            descendants.update(self._limbo_descendants(descendant))
1250
1296
        return descendants
1251
1297
 
1252
 
    def create_file(self, contents, trans_id, mode_id=None):
 
1298
    def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1253
1299
        """Schedule creation of a new file.
1254
1300
 
1255
 
        See also new_file.
1256
 
 
1257
 
        Contents is an iterator of strings, all of which will be written
1258
 
        to the target destination.
1259
 
 
1260
 
        New file takes the permissions of any existing file with that id,
1261
 
        unless mode_id is specified.
 
1301
        :seealso: new_file.
 
1302
 
 
1303
        :param contents: an iterator of strings, all of which will be written
 
1304
            to the target destination.
 
1305
        :param trans_id: TreeTransform handle
 
1306
        :param mode_id: If not None, force the mode of the target file to match
 
1307
            the mode of the object referenced by mode_id.
 
1308
            Otherwise, we will try to preserve mode bits of an existing file.
 
1309
        :param sha1: If the sha1 of this content is already known, pass it in.
 
1310
            We can use it to prevent future sha1 computations.
1262
1311
        """
1263
1312
        name = self._limbo_name(trans_id)
1264
1313
        f = open(name, 'wb')
1265
1314
        try:
1266
 
            try:
1267
 
                unique_add(self._new_contents, trans_id, 'file')
1268
 
            except:
1269
 
                # Clean up the file, it never got registered so
1270
 
                # TreeTransform.finalize() won't clean it up.
1271
 
                f.close()
1272
 
                os.unlink(name)
1273
 
                raise
1274
 
 
 
1315
            unique_add(self._new_contents, trans_id, 'file')
1275
1316
            f.writelines(contents)
1276
1317
        finally:
1277
1318
            f.close()
1278
1319
        self._set_mtime(name)
1279
1320
        self._set_mode(trans_id, mode_id, S_ISREG)
 
1321
        # It is unfortunate we have to use lstat instead of fstat, but we just
 
1322
        # used utime and chmod on the file, so we need the accurate final
 
1323
        # details.
 
1324
        if sha1 is not None:
 
1325
            self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1280
1326
 
1281
1327
    def _read_file_chunks(self, trans_id):
1282
1328
        cur_file = open(self._limbo_name(trans_id), 'rb')
1341
1387
    def cancel_creation(self, trans_id):
1342
1388
        """Cancel the creation of new file contents."""
1343
1389
        del self._new_contents[trans_id]
 
1390
        if trans_id in self._observed_sha1s:
 
1391
            del self._observed_sha1s[trans_id]
1344
1392
        children = self._limbo_children.get(trans_id)
1345
1393
        # if this is a limbo directory with children, move them before removing
1346
1394
        # the directory
1362
1410
        if orphan_policy is None:
1363
1411
            orphan_policy = default_policy
1364
1412
        if orphan_policy not in orphaning_registry:
1365
 
            trace.warning('%s (from %s) is not a known policy, defaulting to %s'
1366
 
                          % (orphan_policy, conf_var_name, default_policy))
 
1413
            trace.warning('%s (from %s) is not a known policy, defaulting '
 
1414
                'to %s' % (orphan_policy, conf_var_name, default_policy))
1367
1415
            orphan_policy = default_policy
1368
1416
        handle_orphan = orphaning_registry.get(orphan_policy)
1369
1417
        handle_orphan(self, trans_id, parent_id)
1679
1727
        child_pb = ui.ui_factory.nested_progress_bar()
1680
1728
        try:
1681
1729
            if precomputed_delta is None:
1682
 
                child_pb.update('Apply phase', 0, 2)
 
1730
                child_pb.update(gettext('Apply phase'), 0, 2)
1683
1731
                inventory_delta = self._generate_inventory_delta()
1684
1732
                offset = 1
1685
1733
            else:
1690
1738
            else:
1691
1739
                mover = _mover
1692
1740
            try:
1693
 
                child_pb.update('Apply phase', 0 + offset, 2 + offset)
 
1741
                child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1694
1742
                self._apply_removals(mover)
1695
 
                child_pb.update('Apply phase', 1 + offset, 2 + offset)
 
1743
                child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1696
1744
                modified_paths = self._apply_insertions(mover)
1697
1745
            except:
1698
1746
                mover.rollback()
1701
1749
                mover.apply_deletions()
1702
1750
        finally:
1703
1751
            child_pb.finished()
 
1752
        if self.final_file_id(self.root) is None:
 
1753
            inventory_delta = [e for e in inventory_delta if e[0] != '']
1704
1754
        self._tree.apply_inventory_delta(inventory_delta)
 
1755
        self._apply_observed_sha1s()
1705
1756
        self._done = True
1706
1757
        self.finalize()
1707
1758
        return _TransformResults(modified_paths, self.rename_count)
1715
1766
        try:
1716
1767
            for num, trans_id in enumerate(self._removed_id):
1717
1768
                if (num % 10) == 0:
1718
 
                    child_pb.update('removing file', num, total_entries)
 
1769
                    child_pb.update(gettext('removing file'), num, total_entries)
1719
1770
                if trans_id == self._new_root:
1720
1771
                    file_id = self._tree.get_root_id()
1721
1772
                else:
1733
1784
            final_kinds = {}
1734
1785
            for num, (path, trans_id) in enumerate(new_paths):
1735
1786
                if (num % 10) == 0:
1736
 
                    child_pb.update('adding file',
 
1787
                    child_pb.update(gettext('adding file'),
1737
1788
                                    num + len(self._removed_id), total_entries)
1738
1789
                file_id = new_path_file_ids[trans_id]
1739
1790
                if file_id is None:
1779
1830
        tree_paths.sort(reverse=True)
1780
1831
        child_pb = ui.ui_factory.nested_progress_bar()
1781
1832
        try:
1782
 
            for num, data in enumerate(tree_paths):
1783
 
                path, trans_id = data
1784
 
                child_pb.update('removing file', num, len(tree_paths))
 
1833
            for num, (path, trans_id) in enumerate(tree_paths):
 
1834
                # do not attempt to move root into a subdirectory of itself.
 
1835
                if path == '':
 
1836
                    continue
 
1837
                child_pb.update(gettext('removing file'), num, len(tree_paths))
1785
1838
                full_path = self._tree.abspath(path)
1786
1839
                if trans_id in self._removed_contents:
1787
1840
                    delete_path = os.path.join(self._deletiondir, trans_id)
1816
1869
        try:
1817
1870
            for num, (path, trans_id) in enumerate(new_paths):
1818
1871
                if (num % 10) == 0:
1819
 
                    child_pb.update('adding file', num, len(new_paths))
 
1872
                    child_pb.update(gettext('adding file'), num, len(new_paths))
1820
1873
                full_path = self._tree.abspath(path)
1821
1874
                if trans_id in self._needs_rename:
1822
1875
                    try:
1827
1880
                            raise
1828
1881
                    else:
1829
1882
                        self.rename_count += 1
 
1883
                    # TODO: if trans_id in self._observed_sha1s, we should
 
1884
                    #       re-stat the final target, since ctime will be
 
1885
                    #       updated by the change.
1830
1886
                if (trans_id in self._new_contents or
1831
1887
                    self.path_changed(trans_id)):
1832
1888
                    if trans_id in self._new_contents:
1833
1889
                        modified_paths.append(full_path)
1834
1890
                if trans_id in self._new_executability:
1835
1891
                    self._set_executability(path, trans_id)
 
1892
                if trans_id in self._observed_sha1s:
 
1893
                    o_sha1, o_st_val = self._observed_sha1s[trans_id]
 
1894
                    st = osutils.lstat(full_path)
 
1895
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1836
1896
        finally:
1837
1897
            child_pb.finished()
 
1898
        for path, trans_id in new_paths:
 
1899
            # new_paths includes stuff like workingtree conflicts. Only the
 
1900
            # stuff in new_contents actually comes from limbo.
 
1901
            if trans_id in self._limbo_files:
 
1902
                del self._limbo_files[trans_id]
1838
1903
        self._new_contents.clear()
1839
1904
        return modified_paths
1840
1905
 
 
1906
    def _apply_observed_sha1s(self):
 
1907
        """After we have finished renaming everything, update observed sha1s
 
1908
 
 
1909
        This has to be done after self._tree.apply_inventory_delta, otherwise
 
1910
        it doesn't know anything about the files we are updating. Also, we want
 
1911
        to do this as late as possible, so that most entries end up cached.
 
1912
        """
 
1913
        # TODO: this doesn't update the stat information for directories. So
 
1914
        #       the first 'bzr status' will still need to rewrite
 
1915
        #       .bzr/checkout/dirstate. However, we at least don't need to
 
1916
        #       re-read all of the files.
 
1917
        # TODO: If the operation took a while, we could do a time.sleep(3) here
 
1918
        #       to allow the clock to tick over and ensure we won't have any
 
1919
        #       problems. (we could observe start time, and finish time, and if
 
1920
        #       it is less than eg 10% overhead, add a sleep call.)
 
1921
        paths = FinalPaths(self)
 
1922
        for trans_id, observed in self._observed_sha1s.iteritems():
 
1923
            path = paths.get_path(trans_id)
 
1924
            # We could get the file_id, but dirstate prefers to use the path
 
1925
            # anyway, and it is 'cheaper' to determine.
 
1926
            # file_id = self._new_id[trans_id]
 
1927
            self._tree._observed_sha1(None, path, observed)
 
1928
 
1841
1929
 
1842
1930
class TransformPreview(DiskTreeTransform):
1843
1931
    """A TreeTransform for generating preview trees.
1859
1947
        path = self._tree_id_paths.get(trans_id)
1860
1948
        if path is None:
1861
1949
            return None
1862
 
        file_id = self._tree.path2id(path)
1863
 
        try:
1864
 
            return self._tree.kind(file_id)
1865
 
        except errors.NoSuchFile:
1866
 
            return None
 
1950
        kind = self._tree.path_content_summary(path)[0]
 
1951
        if kind == 'missing':
 
1952
            kind = None
 
1953
        return kind
1867
1954
 
1868
1955
    def _set_mode(self, trans_id, mode_id, typefunc):
1869
1956
        """Set the mode of new file contents.
1893
1980
        raise NotImplementedError(self.new_orphan)
1894
1981
 
1895
1982
 
1896
 
class _PreviewTree(tree.Tree):
 
1983
class _PreviewTree(tree.InventoryTree):
1897
1984
    """Partial implementation of Tree to support show_diff_trees"""
1898
1985
 
1899
1986
    def __init__(self, transform):
1928
2015
                yield self._get_repository().revision_tree(revision_id)
1929
2016
 
1930
2017
    def _get_file_revision(self, file_id, vf, tree_revision):
1931
 
        parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
 
2018
        parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
1932
2019
                       self._iter_parent_trees()]
1933
2020
        vf.add_lines((file_id, tree_revision), parent_keys,
1934
2021
                     self.get_file_lines(file_id))
1938
2025
            vf.fallback_versionedfiles.append(base_vf)
1939
2026
        return tree_revision
1940
2027
 
1941
 
    def _stat_limbo_file(self, file_id):
1942
 
        trans_id = self._transform.trans_id_file_id(file_id)
 
2028
    def _stat_limbo_file(self, file_id=None, trans_id=None):
 
2029
        if trans_id is None:
 
2030
            trans_id = self._transform.trans_id_file_id(file_id)
1943
2031
        name = self._transform._limbo_name(trans_id)
1944
2032
        return os.lstat(name)
1945
2033
 
2160
2248
 
2161
2249
    def get_file_size(self, file_id):
2162
2250
        """See Tree.get_file_size"""
 
2251
        trans_id = self._transform.trans_id_file_id(file_id)
 
2252
        kind = self._transform.final_kind(trans_id)
 
2253
        if kind != 'file':
 
2254
            return None
 
2255
        if trans_id in self._transform._new_contents:
 
2256
            return self._stat_limbo_file(trans_id=trans_id).st_size
2163
2257
        if self.kind(file_id) == 'file':
2164
2258
            return self._transform._tree.get_file_size(file_id)
2165
2259
        else:
2166
2260
            return None
2167
2261
 
 
2262
    def get_file_verifier(self, file_id, path=None, stat_value=None):
 
2263
        trans_id = self._transform.trans_id_file_id(file_id)
 
2264
        kind = self._transform._new_contents.get(trans_id)
 
2265
        if kind is None:
 
2266
            return self._transform._tree.get_file_verifier(file_id)
 
2267
        if kind == 'file':
 
2268
            fileobj = self.get_file(file_id)
 
2269
            try:
 
2270
                return ("SHA1", sha_file(fileobj))
 
2271
            finally:
 
2272
                fileobj.close()
 
2273
 
2168
2274
    def get_file_sha1(self, file_id, path=None, stat_value=None):
2169
2275
        trans_id = self._transform.trans_id_file_id(file_id)
2170
2276
        kind = self._transform._new_contents.get(trans_id)
2193
2299
            except errors.NoSuchId:
2194
2300
                return False
2195
2301
 
 
2302
    def has_filename(self, path):
 
2303
        trans_id = self._path2trans_id(path)
 
2304
        if trans_id in self._transform._new_contents:
 
2305
            return True
 
2306
        elif trans_id in self._transform._removed_contents:
 
2307
            return False
 
2308
        else:
 
2309
            return self._transform._tree.has_filename(path)
 
2310
 
2196
2311
    def path_content_summary(self, path):
2197
2312
        trans_id = self._path2trans_id(path)
2198
2313
        tt = self._transform
2286
2401
                                   self.get_file(file_id).readlines(),
2287
2402
                                   default_revision)
2288
2403
 
2289
 
    def get_symlink_target(self, file_id):
 
2404
    def get_symlink_target(self, file_id, path=None):
2290
2405
        """See Tree.get_symlink_target"""
2291
2406
        if not self._content_change(file_id):
2292
2407
            return self._transform._tree.get_symlink_target(file_id)
2469
2584
                    existing_files.update(f[0] for f in files)
2470
2585
            for num, (tree_path, entry) in \
2471
2586
                enumerate(tree.inventory.iter_entries_by_dir()):
2472
 
                pb.update("Building tree", num - len(deferred_contents), total)
 
2587
                pb.update(gettext("Building tree"), num - len(deferred_contents), total)
2473
2588
                if entry.parent_id is None:
2474
2589
                    continue
2475
2590
                reparent = False
2502
2617
                    executable = tree.is_executable(file_id, tree_path)
2503
2618
                    if executable:
2504
2619
                        tt.set_executability(executable, trans_id)
2505
 
                    trans_data = (trans_id, tree_path)
 
2620
                    trans_data = (trans_id, tree_path, entry.text_sha1)
2506
2621
                    deferred_contents.append((file_id, trans_data))
2507
2622
                else:
2508
2623
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2524
2639
            precomputed_delta = None
2525
2640
        conflicts = cook_conflicts(raw_conflicts, tt)
2526
2641
        for conflict in conflicts:
2527
 
            warning(conflict)
 
2642
            trace.warning(unicode(conflict))
2528
2643
        try:
2529
2644
            wt.add_conflicts(conflicts)
2530
2645
        except errors.UnsupportedOperation:
2553
2668
        unchanged = dict(unchanged)
2554
2669
        new_desired_files = []
2555
2670
        count = 0
2556
 
        for file_id, (trans_id, tree_path) in desired_files:
 
2671
        for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2557
2672
            accelerator_path = unchanged.get(file_id)
2558
2673
            if accelerator_path is None:
2559
 
                new_desired_files.append((file_id, (trans_id, tree_path)))
 
2674
                new_desired_files.append((file_id,
 
2675
                    (trans_id, tree_path, text_sha1)))
2560
2676
                continue
2561
 
            pb.update('Adding file contents', count + offset, total)
 
2677
            pb.update(gettext('Adding file contents'), count + offset, total)
2562
2678
            if hardlink:
2563
2679
                tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2564
2680
                                   trans_id)
2569
2685
                    contents = filtered_output_bytes(contents, filters,
2570
2686
                        ContentFilterContext(tree_path, tree))
2571
2687
                try:
2572
 
                    tt.create_file(contents, trans_id)
 
2688
                    tt.create_file(contents, trans_id, sha1=text_sha1)
2573
2689
                finally:
2574
2690
                    try:
2575
2691
                        contents.close()
2578
2694
                        pass
2579
2695
            count += 1
2580
2696
        offset += count
2581
 
    for count, ((trans_id, tree_path), contents) in enumerate(
 
2697
    for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2582
2698
            tree.iter_files_bytes(new_desired_files)):
2583
2699
        if wt.supports_content_filtering():
2584
2700
            filters = wt._content_filter_stack(tree_path)
2585
2701
            contents = filtered_output_bytes(contents, filters,
2586
2702
                ContentFilterContext(tree_path, tree))
2587
 
        tt.create_file(contents, trans_id)
2588
 
        pb.update('Adding file contents', count + offset, total)
 
2703
        tt.create_file(contents, trans_id, sha1=text_sha1)
 
2704
        pb.update(gettext('Adding file contents'), count + offset, total)
2589
2705
 
2590
2706
 
2591
2707
def _reparent_children(tt, old_parent, new_parent):
2765
2881
                unversioned_filter=working_tree.is_ignored)
2766
2882
            delta.report_changes(tt.iter_changes(), change_reporter)
2767
2883
        for conflict in conflicts:
2768
 
            warning(conflict)
 
2884
            trace.warning(unicode(conflict))
2769
2885
        pp.next_phase()
2770
2886
        tt.apply()
2771
2887
        working_tree.set_merge_modified(merge_modified)
2802
2918
                 backups, merge_modified, basis_tree=None):
2803
2919
    if basis_tree is not None:
2804
2920
        basis_tree.lock_read()
2805
 
    change_list = target_tree.iter_changes(working_tree,
 
2921
    # We ask the working_tree for its changes relative to the target, rather
 
2922
    # than the target changes relative to the working tree. Because WT4 has an
 
2923
    # optimizer to compare itself to a target, but no optimizer for the
 
2924
    # reverse.
 
2925
    change_list = working_tree.iter_changes(target_tree,
2806
2926
        specific_files=specific_files, pb=pb)
2807
2927
    if target_tree.get_root_id() is None:
2808
2928
        skip_root = True
2812
2932
        deferred_files = []
2813
2933
        for id_num, (file_id, path, changed_content, versioned, parent, name,
2814
2934
                kind, executable) in enumerate(change_list):
2815
 
            if skip_root and file_id[0] is not None and parent[0] is None:
 
2935
            target_path, wt_path = path
 
2936
            target_versioned, wt_versioned = versioned
 
2937
            target_parent, wt_parent = parent
 
2938
            target_name, wt_name = name
 
2939
            target_kind, wt_kind = kind
 
2940
            target_executable, wt_executable = executable
 
2941
            if skip_root and wt_parent is None:
2816
2942
                continue
2817
2943
            trans_id = tt.trans_id_file_id(file_id)
2818
2944
            mode_id = None
2819
2945
            if changed_content:
2820
2946
                keep_content = False
2821
 
                if kind[0] == 'file' and (backups or kind[1] is None):
 
2947
                if wt_kind == 'file' and (backups or target_kind is None):
2822
2948
                    wt_sha1 = working_tree.get_file_sha1(file_id)
2823
2949
                    if merge_modified.get(file_id) != wt_sha1:
2824
2950
                        # acquire the basis tree lazily to prevent the
2827
2953
                        if basis_tree is None:
2828
2954
                            basis_tree = working_tree.basis_tree()
2829
2955
                            basis_tree.lock_read()
2830
 
                        if file_id in basis_tree:
 
2956
                        if basis_tree.has_id(file_id):
2831
2957
                            if wt_sha1 != basis_tree.get_file_sha1(file_id):
2832
2958
                                keep_content = True
2833
 
                        elif kind[1] is None and not versioned[1]:
 
2959
                        elif target_kind is None and not target_versioned:
2834
2960
                            keep_content = True
2835
 
                if kind[0] is not None:
 
2961
                if wt_kind is not None:
2836
2962
                    if not keep_content:
2837
2963
                        tt.delete_contents(trans_id)
2838
 
                    elif kind[1] is not None:
2839
 
                        parent_trans_id = tt.trans_id_file_id(parent[0])
 
2964
                    elif target_kind is not None:
 
2965
                        parent_trans_id = tt.trans_id_file_id(wt_parent)
2840
2966
                        backup_name = tt._available_backup_name(
2841
 
                            name[0], parent_trans_id)
 
2967
                            wt_name, parent_trans_id)
2842
2968
                        tt.adjust_path(backup_name, parent_trans_id, trans_id)
2843
 
                        new_trans_id = tt.create_path(name[0], parent_trans_id)
2844
 
                        if versioned == (True, True):
 
2969
                        new_trans_id = tt.create_path(wt_name, parent_trans_id)
 
2970
                        if wt_versioned and target_versioned:
2845
2971
                            tt.unversion_file(trans_id)
2846
2972
                            tt.version_file(file_id, new_trans_id)
2847
2973
                        # New contents should have the same unix perms as old
2848
2974
                        # contents
2849
2975
                        mode_id = trans_id
2850
2976
                        trans_id = new_trans_id
2851
 
                if kind[1] in ('directory', 'tree-reference'):
 
2977
                if target_kind in ('directory', 'tree-reference'):
2852
2978
                    tt.create_directory(trans_id)
2853
 
                    if kind[1] == 'tree-reference':
 
2979
                    if target_kind == 'tree-reference':
2854
2980
                        revision = target_tree.get_reference_revision(file_id,
2855
 
                                                                      path[1])
 
2981
                                                                      target_path)
2856
2982
                        tt.set_tree_reference(revision, trans_id)
2857
 
                elif kind[1] == 'symlink':
 
2983
                elif target_kind == 'symlink':
2858
2984
                    tt.create_symlink(target_tree.get_symlink_target(file_id),
2859
2985
                                      trans_id)
2860
 
                elif kind[1] == 'file':
 
2986
                elif target_kind == 'file':
2861
2987
                    deferred_files.append((file_id, (trans_id, mode_id)))
2862
2988
                    if basis_tree is None:
2863
2989
                        basis_tree = working_tree.basis_tree()
2864
2990
                        basis_tree.lock_read()
2865
2991
                    new_sha1 = target_tree.get_file_sha1(file_id)
2866
 
                    if (file_id in basis_tree and new_sha1 ==
2867
 
                        basis_tree.get_file_sha1(file_id)):
 
2992
                    if (basis_tree.has_id(file_id) and
 
2993
                        new_sha1 == basis_tree.get_file_sha1(file_id)):
2868
2994
                        if file_id in merge_modified:
2869
2995
                            del merge_modified[file_id]
2870
2996
                    else:
2871
2997
                        merge_modified[file_id] = new_sha1
2872
2998
 
2873
2999
                    # preserve the execute bit when backing up
2874
 
                    if keep_content and executable[0] == executable[1]:
2875
 
                        tt.set_executability(executable[1], trans_id)
2876
 
                elif kind[1] is not None:
2877
 
                    raise AssertionError(kind[1])
2878
 
            if versioned == (False, True):
 
3000
                    if keep_content and wt_executable == target_executable:
 
3001
                        tt.set_executability(target_executable, trans_id)
 
3002
                elif target_kind is not None:
 
3003
                    raise AssertionError(target_kind)
 
3004
            if not wt_versioned and target_versioned:
2879
3005
                tt.version_file(file_id, trans_id)
2880
 
            if versioned == (True, False):
 
3006
            if wt_versioned and not target_versioned:
2881
3007
                tt.unversion_file(trans_id)
2882
 
            if (name[1] is not None and
2883
 
                (name[0] != name[1] or parent[0] != parent[1])):
2884
 
                if name[1] == '' and parent[1] is None:
 
3008
            if (target_name is not None and
 
3009
                (wt_name != target_name or wt_parent != target_parent)):
 
3010
                if target_name == '' and target_parent is None:
2885
3011
                    parent_trans = ROOT_PARENT
2886
3012
                else:
2887
 
                    parent_trans = tt.trans_id_file_id(parent[1])
2888
 
                if parent[0] is None and versioned[0]:
2889
 
                    tt.adjust_root_path(name[1], parent_trans)
 
3013
                    parent_trans = tt.trans_id_file_id(target_parent)
 
3014
                if wt_parent is None and wt_versioned:
 
3015
                    tt.adjust_root_path(target_name, parent_trans)
2890
3016
                else:
2891
 
                    tt.adjust_path(name[1], parent_trans, trans_id)
2892
 
            if executable[0] != executable[1] and kind[1] == "file":
2893
 
                tt.set_executability(executable[1], trans_id)
 
3017
                    tt.adjust_path(target_name, parent_trans, trans_id)
 
3018
            if wt_executable != target_executable and target_kind == "file":
 
3019
                tt.set_executability(target_executable, trans_id)
2894
3020
        if working_tree.supports_content_filtering():
2895
3021
            for index, ((trans_id, mode_id), bytes) in enumerate(
2896
3022
                target_tree.iter_files_bytes(deferred_files)):
2922
3048
    pb = ui.ui_factory.nested_progress_bar()
2923
3049
    try:
2924
3050
        for n in range(10):
2925
 
            pb.update('Resolution pass', n+1, 10)
 
3051
            pb.update(gettext('Resolution pass'), n+1, 10)
2926
3052
            conflicts = tt.find_conflicts()
2927
3053
            if len(conflicts) == 0:
2928
3054
                return new_conflicts
2999
3125
                        file_id = tt.final_file_id(trans_id)
3000
3126
                        if file_id is None:
3001
3127
                            file_id = tt.inactive_file_id(trans_id)
3002
 
                        entry = path_tree.inventory[file_id]
 
3128
                        _, entry = path_tree.iter_entries_by_dir(
 
3129
                            [file_id]).next()
3003
3130
                        # special-case the other tree root (move its
3004
3131
                        # children to current root)
3005
3132
                        if entry.parent_id is None:
3020
3147
        elif c_type == 'unversioned parent':
3021
3148
            file_id = tt.inactive_file_id(conflict[1])
3022
3149
            # special-case the other tree root (move its children instead)
3023
 
            if path_tree and file_id in path_tree:
 
3150
            if path_tree and path_tree.has_id(file_id):
3024
3151
                if path_tree.path2id('') == file_id:
3025
3152
                    # This is the root entry, skip it
3026
3153
                    continue
3044
3171
 
3045
3172
def cook_conflicts(raw_conflicts, tt):
3046
3173
    """Generate a list of cooked conflicts, sorted by file path"""
3047
 
    from bzrlib.conflicts import Conflict
3048
3174
    conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3049
 
    return sorted(conflict_iter, key=Conflict.sort_key)
 
3175
    return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
3050
3176
 
3051
3177
 
3052
3178
def iter_cook_conflicts(raw_conflicts, tt):
3053
 
    from bzrlib.conflicts import Conflict
3054
3179
    fp = FinalPaths(tt)
3055
3180
    for conflict in raw_conflicts:
3056
3181
        c_type = conflict[0]
3058
3183
        modified_path = fp.get_path(conflict[2])
3059
3184
        modified_id = tt.final_file_id(conflict[2])
3060
3185
        if len(conflict) == 3:
3061
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
3062
 
                                     file_id=modified_id)
 
3186
            yield conflicts.Conflict.factory(
 
3187
                c_type, action=action, path=modified_path, file_id=modified_id)
3063
3188
 
3064
3189
        else:
3065
3190
            conflicting_path = fp.get_path(conflict[3])
3066
3191
            conflicting_id = tt.final_file_id(conflict[3])
3067
 
            yield Conflict.factory(c_type, action=action, path=modified_path,
3068
 
                                   file_id=modified_id,
3069
 
                                   conflict_path=conflicting_path,
3070
 
                                   conflict_file_id=conflicting_id)
 
3192
            yield conflicts.Conflict.factory(
 
3193
                c_type, action=action, path=modified_path,
 
3194
                file_id=modified_id,
 
3195
                conflict_path=conflicting_path,
 
3196
                conflict_file_id=conflicting_id)
3071
3197
 
3072
3198
 
3073
3199
class _FileMover(object):