~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2011-05-29 16:22:46 UTC
  • mfrom: (5929.2.6 789167-test-server)
  • Revision ID: pqm@pqm.ubuntu.com-20110529162246-bwwmjj18mj717e3i
(vila) Fix a race condition in a smart server hook test (Vincent Ladeuil)

Show diffs side-by-side

added added

removed removed

Lines of Context:
32
32
    bencode,
33
33
    bzrdir,
34
34
    commit,
35
 
    conflicts,
36
35
    delta,
37
36
    errors,
38
37
    inventory,
42
41
    ui,
43
42
    urlutils,
44
43
    )
45
 
from bzrlib.i18n import gettext
46
44
""")
47
45
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
48
46
                           ReusingTransform, CantMoveRoot,
139
137
        # A counter of how many files have been renamed
140
138
        self.rename_count = 0
141
139
 
142
 
    def __enter__(self):
143
 
        """Support Context Manager API."""
144
 
        return self
145
 
 
146
 
    def __exit__(self, exc_type, exc_val, exc_tb):
147
 
        """Support Context Manager API."""
148
 
        self.finalize()
149
 
 
150
140
    def finalize(self):
151
141
        """Release the working tree lock, if held.
152
142
 
227
217
        This means that the old root trans-id becomes obsolete, so it is
228
218
        recommended only to invoke this after the root trans-id has become
229
219
        irrelevant.
230
 
 
231
220
        """
232
221
        new_roots = [k for k, v in self._new_parent.iteritems() if v is
233
222
                     ROOT_PARENT]
239
228
            self._new_root = new_roots[0]
240
229
            return
241
230
        old_new_root = new_roots[0]
 
231
        # TODO: What to do if a old_new_root is present, but self._new_root is
 
232
        #       not listed as being removed? This code explicitly unversions
 
233
        #       the old root and versions it with the new file_id. Though that
 
234
        #       seems like an incomplete delta
 
235
 
242
236
        # unversion the new root's directory.
243
 
        if self.final_kind(self._new_root) is None:
244
 
            file_id = self.final_file_id(old_new_root)
245
 
        else:
246
 
            file_id = self.final_file_id(self._new_root)
 
237
        file_id = self.final_file_id(old_new_root)
247
238
        if old_new_root in self._new_id:
248
239
            self.cancel_versioning(old_new_root)
249
240
        else:
253
244
        if (self.tree_file_id(self._new_root) is not None and
254
245
            self._new_root not in self._removed_id):
255
246
            self.unversion_file(self._new_root)
256
 
        if file_id is not None:
257
 
            self.version_file(file_id, self._new_root)
 
247
        self.version_file(file_id, self._new_root)
258
248
 
259
249
        # Now move children of new root into old root directory.
260
250
        # Ensure all children are registered with the transaction, but don't
394
384
        return sorted(FinalPaths(self).get_paths(new_ids))
395
385
 
396
386
    def _inventory_altered(self):
397
 
        """Determine which trans_ids need new Inventory entries.
398
 
 
399
 
        An new entry is needed when anything that would be reflected by an
400
 
        inventory entry changes, including file name, file_id, parent file_id,
401
 
        file kind, and the execute bit.
402
 
 
403
 
        Some care is taken to return entries with real changes, not cases
404
 
        where the value is deleted and then restored to its original value,
405
 
        but some actually unchanged values may be returned.
406
 
 
407
 
        :returns: A list of (path, trans_id) for all items requiring an
408
 
            inventory change. Ordered by path.
409
 
        """
410
 
        changed_ids = set()
411
 
        # Find entries whose file_ids are new (or changed).
412
 
        new_file_id = set(t for t in self._new_id
413
 
                          if self._new_id[t] != self.tree_file_id(t))
414
 
        for id_set in [self._new_name, self._new_parent, new_file_id,
 
387
        """Get the trans_ids and paths of files needing new inv entries."""
 
388
        new_ids = set()
 
389
        for id_set in [self._new_name, self._new_parent, self._new_id,
415
390
                       self._new_executability]:
416
 
            changed_ids.update(id_set)
417
 
        # removing implies a kind change
 
391
            new_ids.update(id_set)
418
392
        changed_kind = set(self._removed_contents)
419
 
        # so does adding
420
393
        changed_kind.intersection_update(self._new_contents)
421
 
        # Ignore entries that are already known to have changed.
422
 
        changed_kind.difference_update(changed_ids)
423
 
        #  to keep only the truly changed ones
 
394
        changed_kind.difference_update(new_ids)
424
395
        changed_kind = (t for t in changed_kind
425
396
                        if self.tree_kind(t) != self.final_kind(t))
426
 
        # all kind changes will alter the inventory
427
 
        changed_ids.update(changed_kind)
428
 
        # To find entries with changed parent_ids, find parents which existed,
429
 
        # but changed file_id.
430
 
        changed_file_id = set(t for t in new_file_id if t in self._removed_id)
431
 
        # Now add all their children to the set.
432
 
        for parent_trans_id in new_file_id:
433
 
            changed_ids.update(self.iter_tree_children(parent_trans_id))
434
 
        return sorted(FinalPaths(self).get_paths(changed_ids))
 
397
        new_ids.update(changed_kind)
 
398
        return sorted(FinalPaths(self).get_paths(new_ids))
435
399
 
436
400
    def final_kind(self, trans_id):
437
401
        """Determine the final file kind, after any changes applied.
1190
1154
        self._deletiondir = None
1191
1155
        # A mapping of transform ids to their limbo filename
1192
1156
        self._limbo_files = {}
1193
 
        self._possibly_stale_limbo_files = set()
1194
1157
        # A mapping of transform ids to a set of the transform ids of children
1195
1158
        # that their limbo directory has
1196
1159
        self._limbo_children = {}
1209
1172
        if self._tree is None:
1210
1173
            return
1211
1174
        try:
1212
 
            limbo_paths = self._limbo_files.values() + list(
1213
 
                self._possibly_stale_limbo_files)
1214
 
            limbo_paths = sorted(limbo_paths, reverse=True)
1215
 
            for path in limbo_paths:
1216
 
                try:
1217
 
                    delete_any(path)
1218
 
                except OSError, e:
1219
 
                    if e.errno != errno.ENOENT:
1220
 
                        raise
1221
 
                    # XXX: warn? perhaps we just got interrupted at an
1222
 
                    # inconvenient moment, but perhaps files are disappearing
1223
 
                    # from under us?
 
1175
            entries = [(self._limbo_name(t), t, k) for t, k in
 
1176
                       self._new_contents.iteritems()]
 
1177
            entries.sort(reverse=True)
 
1178
            for path, trans_id, kind in entries:
 
1179
                delete_any(path)
1224
1180
            try:
1225
1181
                delete_any(self._limbodir)
1226
1182
            except OSError:
1275
1231
        entries from _limbo_files, because they are now stale.
1276
1232
        """
1277
1233
        for trans_id in trans_ids:
1278
 
            old_path = self._limbo_files[trans_id]
1279
 
            self._possibly_stale_limbo_files.add(old_path)
1280
 
            del self._limbo_files[trans_id]
 
1234
            old_path = self._limbo_files.pop(trans_id)
1281
1235
            if trans_id not in self._new_contents:
1282
1236
                continue
1283
1237
            new_path = self._limbo_name(trans_id)
1284
1238
            os.rename(old_path, new_path)
1285
 
            self._possibly_stale_limbo_files.remove(old_path)
1286
1239
            for descendant in self._limbo_descendants(trans_id):
1287
1240
                desc_path = self._limbo_files[descendant]
1288
1241
                desc_path = new_path + desc_path[len(old_path):]
1312
1265
        name = self._limbo_name(trans_id)
1313
1266
        f = open(name, 'wb')
1314
1267
        try:
1315
 
            unique_add(self._new_contents, trans_id, 'file')
 
1268
            try:
 
1269
                unique_add(self._new_contents, trans_id, 'file')
 
1270
            except:
 
1271
                # Clean up the file, it never got registered so
 
1272
                # TreeTransform.finalize() won't clean it up.
 
1273
                f.close()
 
1274
                os.unlink(name)
 
1275
                raise
1316
1276
            f.writelines(contents)
1317
1277
        finally:
1318
1278
            f.close()
1727
1687
        child_pb = ui.ui_factory.nested_progress_bar()
1728
1688
        try:
1729
1689
            if precomputed_delta is None:
1730
 
                child_pb.update(gettext('Apply phase'), 0, 2)
 
1690
                child_pb.update('Apply phase', 0, 2)
1731
1691
                inventory_delta = self._generate_inventory_delta()
1732
1692
                offset = 1
1733
1693
            else:
1738
1698
            else:
1739
1699
                mover = _mover
1740
1700
            try:
1741
 
                child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
 
1701
                child_pb.update('Apply phase', 0 + offset, 2 + offset)
1742
1702
                self._apply_removals(mover)
1743
 
                child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
 
1703
                child_pb.update('Apply phase', 1 + offset, 2 + offset)
1744
1704
                modified_paths = self._apply_insertions(mover)
1745
1705
            except:
1746
1706
                mover.rollback()
1749
1709
                mover.apply_deletions()
1750
1710
        finally:
1751
1711
            child_pb.finished()
1752
 
        if self.final_file_id(self.root) is None:
1753
 
            inventory_delta = [e for e in inventory_delta if e[0] != '']
1754
1712
        self._tree.apply_inventory_delta(inventory_delta)
1755
1713
        self._apply_observed_sha1s()
1756
1714
        self._done = True
1766
1724
        try:
1767
1725
            for num, trans_id in enumerate(self._removed_id):
1768
1726
                if (num % 10) == 0:
1769
 
                    child_pb.update(gettext('removing file'), num, total_entries)
 
1727
                    child_pb.update('removing file', num, total_entries)
1770
1728
                if trans_id == self._new_root:
1771
1729
                    file_id = self._tree.get_root_id()
1772
1730
                else:
1784
1742
            final_kinds = {}
1785
1743
            for num, (path, trans_id) in enumerate(new_paths):
1786
1744
                if (num % 10) == 0:
1787
 
                    child_pb.update(gettext('adding file'),
 
1745
                    child_pb.update('adding file',
1788
1746
                                    num + len(self._removed_id), total_entries)
1789
1747
                file_id = new_path_file_ids[trans_id]
1790
1748
                if file_id is None:
1830
1788
        tree_paths.sort(reverse=True)
1831
1789
        child_pb = ui.ui_factory.nested_progress_bar()
1832
1790
        try:
1833
 
            for num, (path, trans_id) in enumerate(tree_paths):
1834
 
                # do not attempt to move root into a subdirectory of itself.
1835
 
                if path == '':
1836
 
                    continue
1837
 
                child_pb.update(gettext('removing file'), num, len(tree_paths))
 
1791
            for num, data in enumerate(tree_paths):
 
1792
                path, trans_id = data
 
1793
                child_pb.update('removing file', num, len(tree_paths))
1838
1794
                full_path = self._tree.abspath(path)
1839
1795
                if trans_id in self._removed_contents:
1840
1796
                    delete_path = os.path.join(self._deletiondir, trans_id)
1869
1825
        try:
1870
1826
            for num, (path, trans_id) in enumerate(new_paths):
1871
1827
                if (num % 10) == 0:
1872
 
                    child_pb.update(gettext('adding file'), num, len(new_paths))
 
1828
                    child_pb.update('adding file', num, len(new_paths))
1873
1829
                full_path = self._tree.abspath(path)
1874
1830
                if trans_id in self._needs_rename:
1875
1831
                    try:
1895
1851
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1896
1852
        finally:
1897
1853
            child_pb.finished()
1898
 
        for path, trans_id in new_paths:
1899
 
            # new_paths includes stuff like workingtree conflicts. Only the
1900
 
            # stuff in new_contents actually comes from limbo.
1901
 
            if trans_id in self._limbo_files:
1902
 
                del self._limbo_files[trans_id]
1903
1854
        self._new_contents.clear()
1904
1855
        return modified_paths
1905
1856
 
2259
2210
        else:
2260
2211
            return None
2261
2212
 
2262
 
    def get_file_verifier(self, file_id, path=None, stat_value=None):
2263
 
        trans_id = self._transform.trans_id_file_id(file_id)
2264
 
        kind = self._transform._new_contents.get(trans_id)
2265
 
        if kind is None:
2266
 
            return self._transform._tree.get_file_verifier(file_id)
2267
 
        if kind == 'file':
2268
 
            fileobj = self.get_file(file_id)
2269
 
            try:
2270
 
                return ("SHA1", sha_file(fileobj))
2271
 
            finally:
2272
 
                fileobj.close()
2273
 
 
2274
2213
    def get_file_sha1(self, file_id, path=None, stat_value=None):
2275
2214
        trans_id = self._transform.trans_id_file_id(file_id)
2276
2215
        kind = self._transform._new_contents.get(trans_id)
2584
2523
                    existing_files.update(f[0] for f in files)
2585
2524
            for num, (tree_path, entry) in \
2586
2525
                enumerate(tree.inventory.iter_entries_by_dir()):
2587
 
                pb.update(gettext("Building tree"), num - len(deferred_contents), total)
 
2526
                pb.update("Building tree", num - len(deferred_contents), total)
2588
2527
                if entry.parent_id is None:
2589
2528
                    continue
2590
2529
                reparent = False
2674
2613
                new_desired_files.append((file_id,
2675
2614
                    (trans_id, tree_path, text_sha1)))
2676
2615
                continue
2677
 
            pb.update(gettext('Adding file contents'), count + offset, total)
 
2616
            pb.update('Adding file contents', count + offset, total)
2678
2617
            if hardlink:
2679
2618
                tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2680
2619
                                   trans_id)
2701
2640
            contents = filtered_output_bytes(contents, filters,
2702
2641
                ContentFilterContext(tree_path, tree))
2703
2642
        tt.create_file(contents, trans_id, sha1=text_sha1)
2704
 
        pb.update(gettext('Adding file contents'), count + offset, total)
 
2643
        pb.update('Adding file contents', count + offset, total)
2705
2644
 
2706
2645
 
2707
2646
def _reparent_children(tt, old_parent, new_parent):
2953
2892
                        if basis_tree is None:
2954
2893
                            basis_tree = working_tree.basis_tree()
2955
2894
                            basis_tree.lock_read()
2956
 
                        if basis_tree.has_id(file_id):
 
2895
                        if file_id in basis_tree:
2957
2896
                            if wt_sha1 != basis_tree.get_file_sha1(file_id):
2958
2897
                                keep_content = True
2959
2898
                        elif target_kind is None and not target_versioned:
2989
2928
                        basis_tree = working_tree.basis_tree()
2990
2929
                        basis_tree.lock_read()
2991
2930
                    new_sha1 = target_tree.get_file_sha1(file_id)
2992
 
                    if (basis_tree.has_id(file_id) and
2993
 
                        new_sha1 == basis_tree.get_file_sha1(file_id)):
 
2931
                    if (file_id in basis_tree and new_sha1 ==
 
2932
                        basis_tree.get_file_sha1(file_id)):
2994
2933
                        if file_id in merge_modified:
2995
2934
                            del merge_modified[file_id]
2996
2935
                    else:
3048
2987
    pb = ui.ui_factory.nested_progress_bar()
3049
2988
    try:
3050
2989
        for n in range(10):
3051
 
            pb.update(gettext('Resolution pass'), n+1, 10)
 
2990
            pb.update('Resolution pass', n+1, 10)
3052
2991
            conflicts = tt.find_conflicts()
3053
2992
            if len(conflicts) == 0:
3054
2993
                return new_conflicts
3147
3086
        elif c_type == 'unversioned parent':
3148
3087
            file_id = tt.inactive_file_id(conflict[1])
3149
3088
            # special-case the other tree root (move its children instead)
3150
 
            if path_tree and path_tree.has_id(file_id):
 
3089
            if path_tree and file_id in path_tree:
3151
3090
                if path_tree.path2id('') == file_id:
3152
3091
                    # This is the root entry, skip it
3153
3092
                    continue
3171
3110
 
3172
3111
def cook_conflicts(raw_conflicts, tt):
3173
3112
    """Generate a list of cooked conflicts, sorted by file path"""
 
3113
    from bzrlib.conflicts import Conflict
3174
3114
    conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3175
 
    return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
 
3115
    return sorted(conflict_iter, key=Conflict.sort_key)
3176
3116
 
3177
3117
 
3178
3118
def iter_cook_conflicts(raw_conflicts, tt):
 
3119
    from bzrlib.conflicts import Conflict
3179
3120
    fp = FinalPaths(tt)
3180
3121
    for conflict in raw_conflicts:
3181
3122
        c_type = conflict[0]
3183
3124
        modified_path = fp.get_path(conflict[2])
3184
3125
        modified_id = tt.final_file_id(conflict[2])
3185
3126
        if len(conflict) == 3:
3186
 
            yield conflicts.Conflict.factory(
3187
 
                c_type, action=action, path=modified_path, file_id=modified_id)
 
3127
            yield Conflict.factory(c_type, action=action, path=modified_path,
 
3128
                                     file_id=modified_id)
3188
3129
 
3189
3130
        else:
3190
3131
            conflicting_path = fp.get_path(conflict[3])
3191
3132
            conflicting_id = tt.final_file_id(conflict[3])
3192
 
            yield conflicts.Conflict.factory(
3193
 
                c_type, action=action, path=modified_path,
3194
 
                file_id=modified_id,
3195
 
                conflict_path=conflicting_path,
3196
 
                conflict_file_id=conflicting_id)
 
3133
            yield Conflict.factory(c_type, action=action, path=modified_path,
 
3134
                                   file_id=modified_id,
 
3135
                                   conflict_path=conflicting_path,
 
3136
                                   conflict_file_id=conflicting_id)
3197
3137
 
3198
3138
 
3199
3139
class _FileMover(object):