~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

(gz) Fix test failure on alpha by correcting format string for
 gc_chk_sha1_record (Martin [gz])

Show diffs side-by-side

added added

removed removed

Lines of Context:
19
19
from stat import S_ISREG, S_IEXEC
20
20
import time
21
21
 
 
22
import bzrlib
22
23
from bzrlib import (
23
24
    errors,
24
25
    lazy_import,
25
26
    registry,
26
 
    trace,
27
27
    tree,
28
28
    )
29
29
lazy_import.lazy_import(globals(), """
32
32
    bencode,
33
33
    bzrdir,
34
34
    commit,
35
 
    conflicts,
36
35
    delta,
37
36
    errors,
38
37
    inventory,
39
38
    multiparent,
40
39
    osutils,
41
40
    revision as _mod_revision,
 
41
    trace,
42
42
    ui,
43
43
    urlutils,
44
44
    )
45
 
from bzrlib.i18n import gettext
46
45
""")
47
46
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
48
47
                           ReusingTransform, CantMoveRoot,
49
48
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
50
49
                           UnableCreateSymlink)
51
50
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
 
51
from bzrlib.inventory import InventoryEntry
52
52
from bzrlib.osutils import (
53
53
    delete_any,
54
54
    file_kind,
64
64
    deprecated_in,
65
65
    deprecated_method,
66
66
    )
 
67
from bzrlib.trace import warning
67
68
 
68
69
 
69
70
ROOT_PARENT = "root-parent"
104
105
        self._new_parent = {}
105
106
        # mapping of trans_id with new contents -> new file_kind
106
107
        self._new_contents = {}
107
 
        # mapping of trans_id => (sha1 of content, stat_value)
108
 
        self._observed_sha1s = {}
109
108
        # Set of trans_ids whose contents will be removed
110
109
        self._removed_contents = set()
111
110
        # Mapping of trans_id -> new execute-bit value
139
138
        # A counter of how many files have been renamed
140
139
        self.rename_count = 0
141
140
 
142
 
    def __enter__(self):
143
 
        """Support Context Manager API."""
144
 
        return self
145
 
 
146
 
    def __exit__(self, exc_type, exc_val, exc_tb):
147
 
        """Support Context Manager API."""
148
 
        self.finalize()
149
 
 
150
141
    def finalize(self):
151
142
        """Release the working tree lock, if held.
152
143
 
227
218
        This means that the old root trans-id becomes obsolete, so it is
228
219
        recommended only to invoke this after the root trans-id has become
229
220
        irrelevant.
230
 
 
231
221
        """
232
222
        new_roots = [k for k, v in self._new_parent.iteritems() if v is
233
223
                     ROOT_PARENT]
239
229
            self._new_root = new_roots[0]
240
230
            return
241
231
        old_new_root = new_roots[0]
 
232
        # TODO: What to do if a old_new_root is present, but self._new_root is
 
233
        #       not listed as being removed? This code explicitly unversions
 
234
        #       the old root and versions it with the new file_id. Though that
 
235
        #       seems like an incomplete delta
 
236
 
242
237
        # unversion the new root's directory.
243
 
        if self.final_kind(self._new_root) is None:
244
 
            file_id = self.final_file_id(old_new_root)
245
 
        else:
246
 
            file_id = self.final_file_id(self._new_root)
 
238
        file_id = self.final_file_id(old_new_root)
247
239
        if old_new_root in self._new_id:
248
240
            self.cancel_versioning(old_new_root)
249
241
        else:
253
245
        if (self.tree_file_id(self._new_root) is not None and
254
246
            self._new_root not in self._removed_id):
255
247
            self.unversion_file(self._new_root)
256
 
        if file_id is not None:
257
 
            self.version_file(file_id, self._new_root)
 
248
        self.version_file(file_id, self._new_root)
258
249
 
259
250
        # Now move children of new root into old root directory.
260
251
        # Ensure all children are registered with the transaction, but don't
394
385
        return sorted(FinalPaths(self).get_paths(new_ids))
395
386
 
396
387
    def _inventory_altered(self):
397
 
        """Determine which trans_ids need new Inventory entries.
398
 
 
399
 
        An new entry is needed when anything that would be reflected by an
400
 
        inventory entry changes, including file name, file_id, parent file_id,
401
 
        file kind, and the execute bit.
402
 
 
403
 
        Some care is taken to return entries with real changes, not cases
404
 
        where the value is deleted and then restored to its original value,
405
 
        but some actually unchanged values may be returned.
406
 
 
407
 
        :returns: A list of (path, trans_id) for all items requiring an
408
 
            inventory change. Ordered by path.
409
 
        """
410
 
        changed_ids = set()
411
 
        # Find entries whose file_ids are new (or changed).
412
 
        new_file_id = set(t for t in self._new_id
413
 
                          if self._new_id[t] != self.tree_file_id(t))
414
 
        for id_set in [self._new_name, self._new_parent, new_file_id,
 
388
        """Get the trans_ids and paths of files needing new inv entries."""
 
389
        new_ids = set()
 
390
        for id_set in [self._new_name, self._new_parent, self._new_id,
415
391
                       self._new_executability]:
416
 
            changed_ids.update(id_set)
417
 
        # removing implies a kind change
 
392
            new_ids.update(id_set)
418
393
        changed_kind = set(self._removed_contents)
419
 
        # so does adding
420
394
        changed_kind.intersection_update(self._new_contents)
421
 
        # Ignore entries that are already known to have changed.
422
 
        changed_kind.difference_update(changed_ids)
423
 
        #  to keep only the truly changed ones
 
395
        changed_kind.difference_update(new_ids)
424
396
        changed_kind = (t for t in changed_kind
425
397
                        if self.tree_kind(t) != self.final_kind(t))
426
 
        # all kind changes will alter the inventory
427
 
        changed_ids.update(changed_kind)
428
 
        # To find entries with changed parent_ids, find parents which existed,
429
 
        # but changed file_id.
430
 
        changed_file_id = set(t for t in new_file_id if t in self._removed_id)
431
 
        # Now add all their children to the set.
432
 
        for parent_trans_id in new_file_id:
433
 
            changed_ids.update(self.iter_tree_children(parent_trans_id))
434
 
        return sorted(FinalPaths(self).get_paths(changed_ids))
 
398
        new_ids.update(changed_kind)
 
399
        return sorted(FinalPaths(self).get_paths(new_ids))
435
400
 
436
401
    def final_kind(self, trans_id):
437
402
        """Determine the final file kind, after any changes applied.
664
629
            if kind is None:
665
630
                conflicts.append(('versioning no contents', trans_id))
666
631
                continue
667
 
            if not inventory.InventoryEntry.versionable_kind(kind):
 
632
            if not InventoryEntry.versionable_kind(kind):
668
633
                conflicts.append(('versioning bad kind', trans_id, kind))
669
634
        return conflicts
670
635
 
789
754
        return trans_id
790
755
 
791
756
    def new_file(self, name, parent_id, contents, file_id=None,
792
 
                 executable=None, sha1=None):
 
757
                 executable=None):
793
758
        """Convenience method to create files.
794
759
 
795
760
        name is the name of the file to create.
802
767
        trans_id = self._new_entry(name, parent_id, file_id)
803
768
        # TODO: rather than scheduling a set_executable call,
804
769
        # have create_file create the file with the right mode.
805
 
        self.create_file(contents, trans_id, sha1=sha1)
 
770
        self.create_file(contents, trans_id)
806
771
        if executable is not None:
807
772
            self.set_executability(executable, trans_id)
808
773
        return trans_id
1190
1155
        self._deletiondir = None
1191
1156
        # A mapping of transform ids to their limbo filename
1192
1157
        self._limbo_files = {}
1193
 
        self._possibly_stale_limbo_files = set()
1194
1158
        # A mapping of transform ids to a set of the transform ids of children
1195
1159
        # that their limbo directory has
1196
1160
        self._limbo_children = {}
1209
1173
        if self._tree is None:
1210
1174
            return
1211
1175
        try:
1212
 
            limbo_paths = self._limbo_files.values() + list(
1213
 
                self._possibly_stale_limbo_files)
1214
 
            limbo_paths = sorted(limbo_paths, reverse=True)
1215
 
            for path in limbo_paths:
1216
 
                try:
1217
 
                    delete_any(path)
1218
 
                except OSError, e:
1219
 
                    if e.errno != errno.ENOENT:
1220
 
                        raise
1221
 
                    # XXX: warn? perhaps we just got interrupted at an
1222
 
                    # inconvenient moment, but perhaps files are disappearing
1223
 
                    # from under us?
 
1176
            entries = [(self._limbo_name(t), t, k) for t, k in
 
1177
                       self._new_contents.iteritems()]
 
1178
            entries.sort(reverse=True)
 
1179
            for path, trans_id, kind in entries:
 
1180
                delete_any(path)
1224
1181
            try:
1225
1182
                delete_any(self._limbodir)
1226
1183
            except OSError:
1275
1232
        entries from _limbo_files, because they are now stale.
1276
1233
        """
1277
1234
        for trans_id in trans_ids:
1278
 
            old_path = self._limbo_files[trans_id]
1279
 
            self._possibly_stale_limbo_files.add(old_path)
1280
 
            del self._limbo_files[trans_id]
 
1235
            old_path = self._limbo_files.pop(trans_id)
1281
1236
            if trans_id not in self._new_contents:
1282
1237
                continue
1283
1238
            new_path = self._limbo_name(trans_id)
1284
1239
            os.rename(old_path, new_path)
1285
 
            self._possibly_stale_limbo_files.remove(old_path)
1286
1240
            for descendant in self._limbo_descendants(trans_id):
1287
1241
                desc_path = self._limbo_files[descendant]
1288
1242
                desc_path = new_path + desc_path[len(old_path):]
1295
1249
            descendants.update(self._limbo_descendants(descendant))
1296
1250
        return descendants
1297
1251
 
1298
 
    def create_file(self, contents, trans_id, mode_id=None, sha1=None):
 
1252
    def create_file(self, contents, trans_id, mode_id=None):
1299
1253
        """Schedule creation of a new file.
1300
1254
 
1301
 
        :seealso: new_file.
1302
 
 
1303
 
        :param contents: an iterator of strings, all of which will be written
1304
 
            to the target destination.
1305
 
        :param trans_id: TreeTransform handle
1306
 
        :param mode_id: If not None, force the mode of the target file to match
1307
 
            the mode of the object referenced by mode_id.
1308
 
            Otherwise, we will try to preserve mode bits of an existing file.
1309
 
        :param sha1: If the sha1 of this content is already known, pass it in.
1310
 
            We can use it to prevent future sha1 computations.
 
1255
        See also new_file.
 
1256
 
 
1257
        Contents is an iterator of strings, all of which will be written
 
1258
        to the target destination.
 
1259
 
 
1260
        New file takes the permissions of any existing file with that id,
 
1261
        unless mode_id is specified.
1311
1262
        """
1312
1263
        name = self._limbo_name(trans_id)
1313
1264
        f = open(name, 'wb')
1314
1265
        try:
1315
 
            unique_add(self._new_contents, trans_id, 'file')
 
1266
            try:
 
1267
                unique_add(self._new_contents, trans_id, 'file')
 
1268
            except:
 
1269
                # Clean up the file, it never got registered so
 
1270
                # TreeTransform.finalize() won't clean it up.
 
1271
                f.close()
 
1272
                os.unlink(name)
 
1273
                raise
 
1274
 
1316
1275
            f.writelines(contents)
1317
1276
        finally:
1318
1277
            f.close()
1319
1278
        self._set_mtime(name)
1320
1279
        self._set_mode(trans_id, mode_id, S_ISREG)
1321
 
        # It is unfortunate we have to use lstat instead of fstat, but we just
1322
 
        # used utime and chmod on the file, so we need the accurate final
1323
 
        # details.
1324
 
        if sha1 is not None:
1325
 
            self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1326
1280
 
1327
1281
    def _read_file_chunks(self, trans_id):
1328
1282
        cur_file = open(self._limbo_name(trans_id), 'rb')
1387
1341
    def cancel_creation(self, trans_id):
1388
1342
        """Cancel the creation of new file contents."""
1389
1343
        del self._new_contents[trans_id]
1390
 
        if trans_id in self._observed_sha1s:
1391
 
            del self._observed_sha1s[trans_id]
1392
1344
        children = self._limbo_children.get(trans_id)
1393
1345
        # if this is a limbo directory with children, move them before removing
1394
1346
        # the directory
1410
1362
        if orphan_policy is None:
1411
1363
            orphan_policy = default_policy
1412
1364
        if orphan_policy not in orphaning_registry:
1413
 
            trace.warning('%s (from %s) is not a known policy, defaulting '
1414
 
                'to %s' % (orphan_policy, conf_var_name, default_policy))
 
1365
            trace.warning('%s (from %s) is not a known policy, defaulting to %s'
 
1366
                          % (orphan_policy, conf_var_name, default_policy))
1415
1367
            orphan_policy = default_policy
1416
1368
        handle_orphan = orphaning_registry.get(orphan_policy)
1417
1369
        handle_orphan(self, trans_id, parent_id)
1727
1679
        child_pb = ui.ui_factory.nested_progress_bar()
1728
1680
        try:
1729
1681
            if precomputed_delta is None:
1730
 
                child_pb.update(gettext('Apply phase'), 0, 2)
 
1682
                child_pb.update('Apply phase', 0, 2)
1731
1683
                inventory_delta = self._generate_inventory_delta()
1732
1684
                offset = 1
1733
1685
            else:
1738
1690
            else:
1739
1691
                mover = _mover
1740
1692
            try:
1741
 
                child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
 
1693
                child_pb.update('Apply phase', 0 + offset, 2 + offset)
1742
1694
                self._apply_removals(mover)
1743
 
                child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
 
1695
                child_pb.update('Apply phase', 1 + offset, 2 + offset)
1744
1696
                modified_paths = self._apply_insertions(mover)
1745
1697
            except:
1746
1698
                mover.rollback()
1749
1701
                mover.apply_deletions()
1750
1702
        finally:
1751
1703
            child_pb.finished()
1752
 
        if self.final_file_id(self.root) is None:
1753
 
            inventory_delta = [e for e in inventory_delta if e[0] != '']
1754
1704
        self._tree.apply_inventory_delta(inventory_delta)
1755
 
        self._apply_observed_sha1s()
1756
1705
        self._done = True
1757
1706
        self.finalize()
1758
1707
        return _TransformResults(modified_paths, self.rename_count)
1766
1715
        try:
1767
1716
            for num, trans_id in enumerate(self._removed_id):
1768
1717
                if (num % 10) == 0:
1769
 
                    child_pb.update(gettext('removing file'), num, total_entries)
 
1718
                    child_pb.update('removing file', num, total_entries)
1770
1719
                if trans_id == self._new_root:
1771
1720
                    file_id = self._tree.get_root_id()
1772
1721
                else:
1784
1733
            final_kinds = {}
1785
1734
            for num, (path, trans_id) in enumerate(new_paths):
1786
1735
                if (num % 10) == 0:
1787
 
                    child_pb.update(gettext('adding file'),
 
1736
                    child_pb.update('adding file',
1788
1737
                                    num + len(self._removed_id), total_entries)
1789
1738
                file_id = new_path_file_ids[trans_id]
1790
1739
                if file_id is None:
1830
1779
        tree_paths.sort(reverse=True)
1831
1780
        child_pb = ui.ui_factory.nested_progress_bar()
1832
1781
        try:
1833
 
            for num, (path, trans_id) in enumerate(tree_paths):
1834
 
                # do not attempt to move root into a subdirectory of itself.
1835
 
                if path == '':
1836
 
                    continue
1837
 
                child_pb.update(gettext('removing file'), num, len(tree_paths))
 
1782
            for num, data in enumerate(tree_paths):
 
1783
                path, trans_id = data
 
1784
                child_pb.update('removing file', num, len(tree_paths))
1838
1785
                full_path = self._tree.abspath(path)
1839
1786
                if trans_id in self._removed_contents:
1840
1787
                    delete_path = os.path.join(self._deletiondir, trans_id)
1869
1816
        try:
1870
1817
            for num, (path, trans_id) in enumerate(new_paths):
1871
1818
                if (num % 10) == 0:
1872
 
                    child_pb.update(gettext('adding file'), num, len(new_paths))
 
1819
                    child_pb.update('adding file', num, len(new_paths))
1873
1820
                full_path = self._tree.abspath(path)
1874
1821
                if trans_id in self._needs_rename:
1875
1822
                    try:
1880
1827
                            raise
1881
1828
                    else:
1882
1829
                        self.rename_count += 1
1883
 
                    # TODO: if trans_id in self._observed_sha1s, we should
1884
 
                    #       re-stat the final target, since ctime will be
1885
 
                    #       updated by the change.
1886
1830
                if (trans_id in self._new_contents or
1887
1831
                    self.path_changed(trans_id)):
1888
1832
                    if trans_id in self._new_contents:
1889
1833
                        modified_paths.append(full_path)
1890
1834
                if trans_id in self._new_executability:
1891
1835
                    self._set_executability(path, trans_id)
1892
 
                if trans_id in self._observed_sha1s:
1893
 
                    o_sha1, o_st_val = self._observed_sha1s[trans_id]
1894
 
                    st = osutils.lstat(full_path)
1895
 
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1896
1836
        finally:
1897
1837
            child_pb.finished()
1898
 
        for path, trans_id in new_paths:
1899
 
            # new_paths includes stuff like workingtree conflicts. Only the
1900
 
            # stuff in new_contents actually comes from limbo.
1901
 
            if trans_id in self._limbo_files:
1902
 
                del self._limbo_files[trans_id]
1903
1838
        self._new_contents.clear()
1904
1839
        return modified_paths
1905
1840
 
1906
 
    def _apply_observed_sha1s(self):
1907
 
        """After we have finished renaming everything, update observed sha1s
1908
 
 
1909
 
        This has to be done after self._tree.apply_inventory_delta, otherwise
1910
 
        it doesn't know anything about the files we are updating. Also, we want
1911
 
        to do this as late as possible, so that most entries end up cached.
1912
 
        """
1913
 
        # TODO: this doesn't update the stat information for directories. So
1914
 
        #       the first 'bzr status' will still need to rewrite
1915
 
        #       .bzr/checkout/dirstate. However, we at least don't need to
1916
 
        #       re-read all of the files.
1917
 
        # TODO: If the operation took a while, we could do a time.sleep(3) here
1918
 
        #       to allow the clock to tick over and ensure we won't have any
1919
 
        #       problems. (we could observe start time, and finish time, and if
1920
 
        #       it is less than eg 10% overhead, add a sleep call.)
1921
 
        paths = FinalPaths(self)
1922
 
        for trans_id, observed in self._observed_sha1s.iteritems():
1923
 
            path = paths.get_path(trans_id)
1924
 
            # We could get the file_id, but dirstate prefers to use the path
1925
 
            # anyway, and it is 'cheaper' to determine.
1926
 
            # file_id = self._new_id[trans_id]
1927
 
            self._tree._observed_sha1(None, path, observed)
1928
 
 
1929
1841
 
1930
1842
class TransformPreview(DiskTreeTransform):
1931
1843
    """A TreeTransform for generating preview trees.
1947
1859
        path = self._tree_id_paths.get(trans_id)
1948
1860
        if path is None:
1949
1861
            return None
1950
 
        kind = self._tree.path_content_summary(path)[0]
1951
 
        if kind == 'missing':
1952
 
            kind = None
1953
 
        return kind
 
1862
        file_id = self._tree.path2id(path)
 
1863
        try:
 
1864
            return self._tree.kind(file_id)
 
1865
        except errors.NoSuchFile:
 
1866
            return None
1954
1867
 
1955
1868
    def _set_mode(self, trans_id, mode_id, typefunc):
1956
1869
        """Set the mode of new file contents.
1980
1893
        raise NotImplementedError(self.new_orphan)
1981
1894
 
1982
1895
 
1983
 
class _PreviewTree(tree.InventoryTree):
 
1896
class _PreviewTree(tree.Tree):
1984
1897
    """Partial implementation of Tree to support show_diff_trees"""
1985
1898
 
1986
1899
    def __init__(self, transform):
2015
1928
                yield self._get_repository().revision_tree(revision_id)
2016
1929
 
2017
1930
    def _get_file_revision(self, file_id, vf, tree_revision):
2018
 
        parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
 
1931
        parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
2019
1932
                       self._iter_parent_trees()]
2020
1933
        vf.add_lines((file_id, tree_revision), parent_keys,
2021
1934
                     self.get_file_lines(file_id))
2025
1938
            vf.fallback_versionedfiles.append(base_vf)
2026
1939
        return tree_revision
2027
1940
 
2028
 
    def _stat_limbo_file(self, file_id=None, trans_id=None):
2029
 
        if trans_id is None:
2030
 
            trans_id = self._transform.trans_id_file_id(file_id)
 
1941
    def _stat_limbo_file(self, file_id):
 
1942
        trans_id = self._transform.trans_id_file_id(file_id)
2031
1943
        name = self._transform._limbo_name(trans_id)
2032
1944
        return os.lstat(name)
2033
1945
 
2248
2160
 
2249
2161
    def get_file_size(self, file_id):
2250
2162
        """See Tree.get_file_size"""
2251
 
        trans_id = self._transform.trans_id_file_id(file_id)
2252
 
        kind = self._transform.final_kind(trans_id)
2253
 
        if kind != 'file':
2254
 
            return None
2255
 
        if trans_id in self._transform._new_contents:
2256
 
            return self._stat_limbo_file(trans_id=trans_id).st_size
2257
2163
        if self.kind(file_id) == 'file':
2258
2164
            return self._transform._tree.get_file_size(file_id)
2259
2165
        else:
2260
2166
            return None
2261
2167
 
2262
 
    def get_file_verifier(self, file_id, path=None, stat_value=None):
2263
 
        trans_id = self._transform.trans_id_file_id(file_id)
2264
 
        kind = self._transform._new_contents.get(trans_id)
2265
 
        if kind is None:
2266
 
            return self._transform._tree.get_file_verifier(file_id)
2267
 
        if kind == 'file':
2268
 
            fileobj = self.get_file(file_id)
2269
 
            try:
2270
 
                return ("SHA1", sha_file(fileobj))
2271
 
            finally:
2272
 
                fileobj.close()
2273
 
 
2274
2168
    def get_file_sha1(self, file_id, path=None, stat_value=None):
2275
2169
        trans_id = self._transform.trans_id_file_id(file_id)
2276
2170
        kind = self._transform._new_contents.get(trans_id)
2299
2193
            except errors.NoSuchId:
2300
2194
                return False
2301
2195
 
2302
 
    def has_filename(self, path):
2303
 
        trans_id = self._path2trans_id(path)
2304
 
        if trans_id in self._transform._new_contents:
2305
 
            return True
2306
 
        elif trans_id in self._transform._removed_contents:
2307
 
            return False
2308
 
        else:
2309
 
            return self._transform._tree.has_filename(path)
2310
 
 
2311
2196
    def path_content_summary(self, path):
2312
2197
        trans_id = self._path2trans_id(path)
2313
2198
        tt = self._transform
2401
2286
                                   self.get_file(file_id).readlines(),
2402
2287
                                   default_revision)
2403
2288
 
2404
 
    def get_symlink_target(self, file_id, path=None):
 
2289
    def get_symlink_target(self, file_id):
2405
2290
        """See Tree.get_symlink_target"""
2406
2291
        if not self._content_change(file_id):
2407
2292
            return self._transform._tree.get_symlink_target(file_id)
2584
2469
                    existing_files.update(f[0] for f in files)
2585
2470
            for num, (tree_path, entry) in \
2586
2471
                enumerate(tree.inventory.iter_entries_by_dir()):
2587
 
                pb.update(gettext("Building tree"), num - len(deferred_contents), total)
 
2472
                pb.update("Building tree", num - len(deferred_contents), total)
2588
2473
                if entry.parent_id is None:
2589
2474
                    continue
2590
2475
                reparent = False
2617
2502
                    executable = tree.is_executable(file_id, tree_path)
2618
2503
                    if executable:
2619
2504
                        tt.set_executability(executable, trans_id)
2620
 
                    trans_data = (trans_id, tree_path, entry.text_sha1)
 
2505
                    trans_data = (trans_id, tree_path)
2621
2506
                    deferred_contents.append((file_id, trans_data))
2622
2507
                else:
2623
2508
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2639
2524
            precomputed_delta = None
2640
2525
        conflicts = cook_conflicts(raw_conflicts, tt)
2641
2526
        for conflict in conflicts:
2642
 
            trace.warning(unicode(conflict))
 
2527
            warning(conflict)
2643
2528
        try:
2644
2529
            wt.add_conflicts(conflicts)
2645
2530
        except errors.UnsupportedOperation:
2668
2553
        unchanged = dict(unchanged)
2669
2554
        new_desired_files = []
2670
2555
        count = 0
2671
 
        for file_id, (trans_id, tree_path, text_sha1) in desired_files:
 
2556
        for file_id, (trans_id, tree_path) in desired_files:
2672
2557
            accelerator_path = unchanged.get(file_id)
2673
2558
            if accelerator_path is None:
2674
 
                new_desired_files.append((file_id,
2675
 
                    (trans_id, tree_path, text_sha1)))
 
2559
                new_desired_files.append((file_id, (trans_id, tree_path)))
2676
2560
                continue
2677
 
            pb.update(gettext('Adding file contents'), count + offset, total)
 
2561
            pb.update('Adding file contents', count + offset, total)
2678
2562
            if hardlink:
2679
2563
                tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2680
2564
                                   trans_id)
2685
2569
                    contents = filtered_output_bytes(contents, filters,
2686
2570
                        ContentFilterContext(tree_path, tree))
2687
2571
                try:
2688
 
                    tt.create_file(contents, trans_id, sha1=text_sha1)
 
2572
                    tt.create_file(contents, trans_id)
2689
2573
                finally:
2690
2574
                    try:
2691
2575
                        contents.close()
2694
2578
                        pass
2695
2579
            count += 1
2696
2580
        offset += count
2697
 
    for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
 
2581
    for count, ((trans_id, tree_path), contents) in enumerate(
2698
2582
            tree.iter_files_bytes(new_desired_files)):
2699
2583
        if wt.supports_content_filtering():
2700
2584
            filters = wt._content_filter_stack(tree_path)
2701
2585
            contents = filtered_output_bytes(contents, filters,
2702
2586
                ContentFilterContext(tree_path, tree))
2703
 
        tt.create_file(contents, trans_id, sha1=text_sha1)
2704
 
        pb.update(gettext('Adding file contents'), count + offset, total)
 
2587
        tt.create_file(contents, trans_id)
 
2588
        pb.update('Adding file contents', count + offset, total)
2705
2589
 
2706
2590
 
2707
2591
def _reparent_children(tt, old_parent, new_parent):
2881
2765
                unversioned_filter=working_tree.is_ignored)
2882
2766
            delta.report_changes(tt.iter_changes(), change_reporter)
2883
2767
        for conflict in conflicts:
2884
 
            trace.warning(unicode(conflict))
 
2768
            warning(conflict)
2885
2769
        pp.next_phase()
2886
2770
        tt.apply()
2887
2771
        working_tree.set_merge_modified(merge_modified)
2918
2802
                 backups, merge_modified, basis_tree=None):
2919
2803
    if basis_tree is not None:
2920
2804
        basis_tree.lock_read()
2921
 
    # We ask the working_tree for its changes relative to the target, rather
2922
 
    # than the target changes relative to the working tree. Because WT4 has an
2923
 
    # optimizer to compare itself to a target, but no optimizer for the
2924
 
    # reverse.
2925
 
    change_list = working_tree.iter_changes(target_tree,
 
2805
    change_list = target_tree.iter_changes(working_tree,
2926
2806
        specific_files=specific_files, pb=pb)
2927
2807
    if target_tree.get_root_id() is None:
2928
2808
        skip_root = True
2932
2812
        deferred_files = []
2933
2813
        for id_num, (file_id, path, changed_content, versioned, parent, name,
2934
2814
                kind, executable) in enumerate(change_list):
2935
 
            target_path, wt_path = path
2936
 
            target_versioned, wt_versioned = versioned
2937
 
            target_parent, wt_parent = parent
2938
 
            target_name, wt_name = name
2939
 
            target_kind, wt_kind = kind
2940
 
            target_executable, wt_executable = executable
2941
 
            if skip_root and wt_parent is None:
 
2815
            if skip_root and file_id[0] is not None and parent[0] is None:
2942
2816
                continue
2943
2817
            trans_id = tt.trans_id_file_id(file_id)
2944
2818
            mode_id = None
2945
2819
            if changed_content:
2946
2820
                keep_content = False
2947
 
                if wt_kind == 'file' and (backups or target_kind is None):
 
2821
                if kind[0] == 'file' and (backups or kind[1] is None):
2948
2822
                    wt_sha1 = working_tree.get_file_sha1(file_id)
2949
2823
                    if merge_modified.get(file_id) != wt_sha1:
2950
2824
                        # acquire the basis tree lazily to prevent the
2953
2827
                        if basis_tree is None:
2954
2828
                            basis_tree = working_tree.basis_tree()
2955
2829
                            basis_tree.lock_read()
2956
 
                        if basis_tree.has_id(file_id):
 
2830
                        if file_id in basis_tree:
2957
2831
                            if wt_sha1 != basis_tree.get_file_sha1(file_id):
2958
2832
                                keep_content = True
2959
 
                        elif target_kind is None and not target_versioned:
 
2833
                        elif kind[1] is None and not versioned[1]:
2960
2834
                            keep_content = True
2961
 
                if wt_kind is not None:
 
2835
                if kind[0] is not None:
2962
2836
                    if not keep_content:
2963
2837
                        tt.delete_contents(trans_id)
2964
 
                    elif target_kind is not None:
2965
 
                        parent_trans_id = tt.trans_id_file_id(wt_parent)
 
2838
                    elif kind[1] is not None:
 
2839
                        parent_trans_id = tt.trans_id_file_id(parent[0])
2966
2840
                        backup_name = tt._available_backup_name(
2967
 
                            wt_name, parent_trans_id)
 
2841
                            name[0], parent_trans_id)
2968
2842
                        tt.adjust_path(backup_name, parent_trans_id, trans_id)
2969
 
                        new_trans_id = tt.create_path(wt_name, parent_trans_id)
2970
 
                        if wt_versioned and target_versioned:
 
2843
                        new_trans_id = tt.create_path(name[0], parent_trans_id)
 
2844
                        if versioned == (True, True):
2971
2845
                            tt.unversion_file(trans_id)
2972
2846
                            tt.version_file(file_id, new_trans_id)
2973
2847
                        # New contents should have the same unix perms as old
2974
2848
                        # contents
2975
2849
                        mode_id = trans_id
2976
2850
                        trans_id = new_trans_id
2977
 
                if target_kind in ('directory', 'tree-reference'):
 
2851
                if kind[1] in ('directory', 'tree-reference'):
2978
2852
                    tt.create_directory(trans_id)
2979
 
                    if target_kind == 'tree-reference':
 
2853
                    if kind[1] == 'tree-reference':
2980
2854
                        revision = target_tree.get_reference_revision(file_id,
2981
 
                                                                      target_path)
 
2855
                                                                      path[1])
2982
2856
                        tt.set_tree_reference(revision, trans_id)
2983
 
                elif target_kind == 'symlink':
 
2857
                elif kind[1] == 'symlink':
2984
2858
                    tt.create_symlink(target_tree.get_symlink_target(file_id),
2985
2859
                                      trans_id)
2986
 
                elif target_kind == 'file':
 
2860
                elif kind[1] == 'file':
2987
2861
                    deferred_files.append((file_id, (trans_id, mode_id)))
2988
2862
                    if basis_tree is None:
2989
2863
                        basis_tree = working_tree.basis_tree()
2990
2864
                        basis_tree.lock_read()
2991
2865
                    new_sha1 = target_tree.get_file_sha1(file_id)
2992
 
                    if (basis_tree.has_id(file_id) and
2993
 
                        new_sha1 == basis_tree.get_file_sha1(file_id)):
 
2866
                    if (file_id in basis_tree and new_sha1 ==
 
2867
                        basis_tree.get_file_sha1(file_id)):
2994
2868
                        if file_id in merge_modified:
2995
2869
                            del merge_modified[file_id]
2996
2870
                    else:
2997
2871
                        merge_modified[file_id] = new_sha1
2998
2872
 
2999
2873
                    # preserve the execute bit when backing up
3000
 
                    if keep_content and wt_executable == target_executable:
3001
 
                        tt.set_executability(target_executable, trans_id)
3002
 
                elif target_kind is not None:
3003
 
                    raise AssertionError(target_kind)
3004
 
            if not wt_versioned and target_versioned:
 
2874
                    if keep_content and executable[0] == executable[1]:
 
2875
                        tt.set_executability(executable[1], trans_id)
 
2876
                elif kind[1] is not None:
 
2877
                    raise AssertionError(kind[1])
 
2878
            if versioned == (False, True):
3005
2879
                tt.version_file(file_id, trans_id)
3006
 
            if wt_versioned and not target_versioned:
 
2880
            if versioned == (True, False):
3007
2881
                tt.unversion_file(trans_id)
3008
 
            if (target_name is not None and
3009
 
                (wt_name != target_name or wt_parent != target_parent)):
3010
 
                if target_name == '' and target_parent is None:
 
2882
            if (name[1] is not None and
 
2883
                (name[0] != name[1] or parent[0] != parent[1])):
 
2884
                if name[1] == '' and parent[1] is None:
3011
2885
                    parent_trans = ROOT_PARENT
3012
2886
                else:
3013
 
                    parent_trans = tt.trans_id_file_id(target_parent)
3014
 
                if wt_parent is None and wt_versioned:
3015
 
                    tt.adjust_root_path(target_name, parent_trans)
 
2887
                    parent_trans = tt.trans_id_file_id(parent[1])
 
2888
                if parent[0] is None and versioned[0]:
 
2889
                    tt.adjust_root_path(name[1], parent_trans)
3016
2890
                else:
3017
 
                    tt.adjust_path(target_name, parent_trans, trans_id)
3018
 
            if wt_executable != target_executable and target_kind == "file":
3019
 
                tt.set_executability(target_executable, trans_id)
 
2891
                    tt.adjust_path(name[1], parent_trans, trans_id)
 
2892
            if executable[0] != executable[1] and kind[1] == "file":
 
2893
                tt.set_executability(executable[1], trans_id)
3020
2894
        if working_tree.supports_content_filtering():
3021
2895
            for index, ((trans_id, mode_id), bytes) in enumerate(
3022
2896
                target_tree.iter_files_bytes(deferred_files)):
3048
2922
    pb = ui.ui_factory.nested_progress_bar()
3049
2923
    try:
3050
2924
        for n in range(10):
3051
 
            pb.update(gettext('Resolution pass'), n+1, 10)
 
2925
            pb.update('Resolution pass', n+1, 10)
3052
2926
            conflicts = tt.find_conflicts()
3053
2927
            if len(conflicts) == 0:
3054
2928
                return new_conflicts
3125
2999
                        file_id = tt.final_file_id(trans_id)
3126
3000
                        if file_id is None:
3127
3001
                            file_id = tt.inactive_file_id(trans_id)
3128
 
                        _, entry = path_tree.iter_entries_by_dir(
3129
 
                            [file_id]).next()
 
3002
                        entry = path_tree.inventory[file_id]
3130
3003
                        # special-case the other tree root (move its
3131
3004
                        # children to current root)
3132
3005
                        if entry.parent_id is None:
3147
3020
        elif c_type == 'unversioned parent':
3148
3021
            file_id = tt.inactive_file_id(conflict[1])
3149
3022
            # special-case the other tree root (move its children instead)
3150
 
            if path_tree and path_tree.has_id(file_id):
 
3023
            if path_tree and file_id in path_tree:
3151
3024
                if path_tree.path2id('') == file_id:
3152
3025
                    # This is the root entry, skip it
3153
3026
                    continue
3171
3044
 
3172
3045
def cook_conflicts(raw_conflicts, tt):
3173
3046
    """Generate a list of cooked conflicts, sorted by file path"""
 
3047
    from bzrlib.conflicts import Conflict
3174
3048
    conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3175
 
    return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
 
3049
    return sorted(conflict_iter, key=Conflict.sort_key)
3176
3050
 
3177
3051
 
3178
3052
def iter_cook_conflicts(raw_conflicts, tt):
 
3053
    from bzrlib.conflicts import Conflict
3179
3054
    fp = FinalPaths(tt)
3180
3055
    for conflict in raw_conflicts:
3181
3056
        c_type = conflict[0]
3183
3058
        modified_path = fp.get_path(conflict[2])
3184
3059
        modified_id = tt.final_file_id(conflict[2])
3185
3060
        if len(conflict) == 3:
3186
 
            yield conflicts.Conflict.factory(
3187
 
                c_type, action=action, path=modified_path, file_id=modified_id)
 
3061
            yield Conflict.factory(c_type, action=action, path=modified_path,
 
3062
                                     file_id=modified_id)
3188
3063
 
3189
3064
        else:
3190
3065
            conflicting_path = fp.get_path(conflict[3])
3191
3066
            conflicting_id = tt.final_file_id(conflict[3])
3192
 
            yield conflicts.Conflict.factory(
3193
 
                c_type, action=action, path=modified_path,
3194
 
                file_id=modified_id,
3195
 
                conflict_path=conflicting_path,
3196
 
                conflict_file_id=conflicting_id)
 
3067
            yield Conflict.factory(c_type, action=action, path=modified_path,
 
3068
                                   file_id=modified_id,
 
3069
                                   conflict_path=conflicting_path,
 
3070
                                   conflict_file_id=conflicting_id)
3197
3071
 
3198
3072
 
3199
3073
class _FileMover(object):