~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

  • Committer: Martin
  • Date: 2011-04-15 21:22:57 UTC
  • mto: This revision was merged to the branch mainline in revision 5797.
  • Revision ID: gzlist@googlemail.com-20110415212257-jgtovwwp4be7egd9
Add release notes

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
    errors,
24
24
    lazy_import,
25
25
    registry,
 
26
    trace,
 
27
    tree,
26
28
    )
27
29
lazy_import.lazy_import(globals(), """
28
30
from bzrlib import (
36
38
    multiparent,
37
39
    osutils,
38
40
    revision as _mod_revision,
39
 
    trace,
40
41
    ui,
 
42
    urlutils,
41
43
    )
42
44
""")
43
45
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
45
47
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
46
48
                           UnableCreateSymlink)
47
49
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
48
 
from bzrlib.inventory import InventoryEntry
49
50
from bzrlib.osutils import (
50
51
    delete_any,
51
52
    file_kind,
52
53
    has_symlinks,
53
 
    lexists,
54
54
    pathjoin,
55
55
    sha_file,
56
56
    splitpath,
57
57
    supports_executable,
58
 
)
 
58
    )
59
59
from bzrlib.progress import ProgressPhase
60
60
from bzrlib.symbol_versioning import (
61
 
        deprecated_function,
62
 
        deprecated_in,
63
 
        deprecated_method,
64
 
        )
65
 
from bzrlib.trace import mutter, warning
66
 
from bzrlib import tree
67
 
import bzrlib.ui
68
 
import bzrlib.urlutils as urlutils
 
61
    deprecated_function,
 
62
    deprecated_in,
 
63
    deprecated_method,
 
64
    )
69
65
 
70
66
 
71
67
ROOT_PARENT = "root-parent"
106
102
        self._new_parent = {}
107
103
        # mapping of trans_id with new contents -> new file_kind
108
104
        self._new_contents = {}
 
105
        # mapping of trans_id => (sha1 of content, stat_value)
 
106
        self._observed_sha1s = {}
109
107
        # Set of trans_ids whose contents will be removed
110
108
        self._removed_contents = set()
111
109
        # Mapping of trans_id -> new execute-bit value
130
128
            self._new_root = self.trans_id_tree_file_id(root_id)
131
129
        else:
132
130
            self._new_root = None
133
 
        # Indictor of whether the transform has been applied
 
131
        # Indicator of whether the transform has been applied
134
132
        self._done = False
135
133
        # A progress bar
136
134
        self._pb = pb
528
526
        for trans_id in self._removed_id:
529
527
            file_id = self.tree_file_id(trans_id)
530
528
            if file_id is not None:
 
529
                # XXX: This seems like something that should go via a different
 
530
                #      indirection.
531
531
                if self._tree.inventory[file_id].kind == 'directory':
532
532
                    parents.append(trans_id)
533
533
            elif self.tree_kind(trans_id) == 'directory':
628
628
            if kind is None:
629
629
                conflicts.append(('versioning no contents', trans_id))
630
630
                continue
631
 
            if not InventoryEntry.versionable_kind(kind):
 
631
            if not inventory.InventoryEntry.versionable_kind(kind):
632
632
                conflicts.append(('versioning bad kind', trans_id, kind))
633
633
        return conflicts
634
634
 
666
666
        if (self._new_name, self._new_parent) == ({}, {}):
667
667
            return conflicts
668
668
        for children in by_parent.itervalues():
669
 
            name_ids = [(self.final_name(t), t) for t in children]
670
 
            if not self._case_sensitive_target:
671
 
                name_ids = [(n.lower(), t) for n, t in name_ids]
 
669
            name_ids = []
 
670
            for child_tid in children:
 
671
                name = self.final_name(child_tid)
 
672
                if name is not None:
 
673
                    # Keep children only if they still exist in the end
 
674
                    if not self._case_sensitive_target:
 
675
                        name = name.lower()
 
676
                    name_ids.append((name, child_tid))
672
677
            name_ids.sort()
673
678
            last_name = None
674
679
            last_trans_id = None
698
703
        return conflicts
699
704
 
700
705
    def _parent_type_conflicts(self, by_parent):
701
 
        """parents must have directory 'contents'."""
 
706
        """Children must have a directory parent"""
702
707
        conflicts = []
703
708
        for parent_id, children in by_parent.iteritems():
704
709
            if parent_id is ROOT_PARENT:
705
710
                continue
706
 
            if not self._any_contents(children):
 
711
            no_children = True
 
712
            for child_id in children:
 
713
                if self.final_kind(child_id) is not None:
 
714
                    no_children = False
 
715
                    break
 
716
            if no_children:
707
717
                continue
 
718
            # There is at least a child, so we need an existing directory to
 
719
            # contain it.
708
720
            kind = self.final_kind(parent_id)
709
721
            if kind is None:
 
722
                # The directory will be deleted
710
723
                conflicts.append(('missing parent', parent_id))
711
724
            elif kind != "directory":
 
725
                # Meh, we need a *directory* to put something in it
712
726
                conflicts.append(('non-directory parent', parent_id))
713
727
        return conflicts
714
728
 
715
 
    def _any_contents(self, trans_ids):
716
 
        """Return true if any of the trans_ids, will have contents."""
717
 
        for trans_id in trans_ids:
718
 
            if self.final_kind(trans_id) is not None:
719
 
                return True
720
 
        return False
721
 
 
722
729
    def _set_executability(self, path, trans_id):
723
730
        """Set the executability of versioned files """
724
731
        if supports_executable():
746
753
        return trans_id
747
754
 
748
755
    def new_file(self, name, parent_id, contents, file_id=None,
749
 
                 executable=None):
 
756
                 executable=None, sha1=None):
750
757
        """Convenience method to create files.
751
758
 
752
759
        name is the name of the file to create.
759
766
        trans_id = self._new_entry(name, parent_id, file_id)
760
767
        # TODO: rather than scheduling a set_executable call,
761
768
        # have create_file create the file with the right mode.
762
 
        self.create_file(contents, trans_id)
 
769
        self.create_file(contents, trans_id, sha1=sha1)
763
770
        if executable is not None:
764
771
            self.set_executability(executable, trans_id)
765
772
        return trans_id
815
822
        """
816
823
        orphans = []
817
824
        # Find the potential orphans, stop if one item should be kept
818
 
        for c in self.by_parent()[dir_id]:
819
 
            if self.final_file_id(c) is None:
820
 
                orphans.append(c)
 
825
        for child_tid in self.by_parent()[dir_id]:
 
826
            if child_tid in self._removed_contents:
 
827
                # The child is removed as part of the transform. Since it was
 
828
                # versioned before, it's not an orphan
 
829
                continue
 
830
            elif self.final_file_id(child_tid) is None:
 
831
                # The child is not versioned
 
832
                orphans.append(child_tid)
821
833
            else:
822
834
                # We have a versioned file here, searching for orphans is
823
835
                # meaningless.
1236
1248
            descendants.update(self._limbo_descendants(descendant))
1237
1249
        return descendants
1238
1250
 
1239
 
    def create_file(self, contents, trans_id, mode_id=None):
 
1251
    def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1240
1252
        """Schedule creation of a new file.
1241
1253
 
1242
 
        See also new_file.
1243
 
 
1244
 
        Contents is an iterator of strings, all of which will be written
1245
 
        to the target destination.
1246
 
 
1247
 
        New file takes the permissions of any existing file with that id,
1248
 
        unless mode_id is specified.
 
1254
        :seealso: new_file.
 
1255
 
 
1256
        :param contents: an iterator of strings, all of which will be written
 
1257
            to the target destination.
 
1258
        :param trans_id: TreeTransform handle
 
1259
        :param mode_id: If not None, force the mode of the target file to match
 
1260
            the mode of the object referenced by mode_id.
 
1261
            Otherwise, we will try to preserve mode bits of an existing file.
 
1262
        :param sha1: If the sha1 of this content is already known, pass it in.
 
1263
            We can use it to prevent future sha1 computations.
1249
1264
        """
1250
1265
        name = self._limbo_name(trans_id)
1251
1266
        f = open(name, 'wb')
1258
1273
                f.close()
1259
1274
                os.unlink(name)
1260
1275
                raise
1261
 
 
1262
1276
            f.writelines(contents)
1263
1277
        finally:
1264
1278
            f.close()
1265
1279
        self._set_mtime(name)
1266
1280
        self._set_mode(trans_id, mode_id, S_ISREG)
 
1281
        # It is unfortunate we have to use lstat instead of fstat, but we just
 
1282
        # used utime and chmod on the file, so we need the accurate final
 
1283
        # details.
 
1284
        if sha1 is not None:
 
1285
            self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1267
1286
 
1268
1287
    def _read_file_chunks(self, trans_id):
1269
1288
        cur_file = open(self._limbo_name(trans_id), 'rb')
1328
1347
    def cancel_creation(self, trans_id):
1329
1348
        """Cancel the creation of new file contents."""
1330
1349
        del self._new_contents[trans_id]
 
1350
        if trans_id in self._observed_sha1s:
 
1351
            del self._observed_sha1s[trans_id]
1331
1352
        children = self._limbo_children.get(trans_id)
1332
1353
        # if this is a limbo directory with children, move them before removing
1333
1354
        # the directory
1349
1370
        if orphan_policy is None:
1350
1371
            orphan_policy = default_policy
1351
1372
        if orphan_policy not in orphaning_registry:
1352
 
            trace.warning('%s (from %s) is not a known policy, defaulting to %s'
1353
 
                          % (orphan_policy, conf_var_name, default_policy))
 
1373
            trace.warning('%s (from %s) is not a known policy, defaulting '
 
1374
                'to %s' % (orphan_policy, conf_var_name, default_policy))
1354
1375
            orphan_policy = default_policy
1355
1376
        handle_orphan = orphaning_registry.get(orphan_policy)
1356
1377
        handle_orphan(self, trans_id, parent_id)
1663
1684
        """
1664
1685
        if not no_conflicts:
1665
1686
            self._check_malformed()
1666
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1687
        child_pb = ui.ui_factory.nested_progress_bar()
1667
1688
        try:
1668
1689
            if precomputed_delta is None:
1669
1690
                child_pb.update('Apply phase', 0, 2)
1689
1710
        finally:
1690
1711
            child_pb.finished()
1691
1712
        self._tree.apply_inventory_delta(inventory_delta)
 
1713
        self._apply_observed_sha1s()
1692
1714
        self._done = True
1693
1715
        self.finalize()
1694
1716
        return _TransformResults(modified_paths, self.rename_count)
1696
1718
    def _generate_inventory_delta(self):
1697
1719
        """Generate an inventory delta for the current transform."""
1698
1720
        inventory_delta = []
1699
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1721
        child_pb = ui.ui_factory.nested_progress_bar()
1700
1722
        new_paths = self._inventory_altered()
1701
1723
        total_entries = len(new_paths) + len(self._removed_id)
1702
1724
        try:
1764
1786
        """
1765
1787
        tree_paths = list(self._tree_path_ids.iteritems())
1766
1788
        tree_paths.sort(reverse=True)
1767
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1789
        child_pb = ui.ui_factory.nested_progress_bar()
1768
1790
        try:
1769
1791
            for num, data in enumerate(tree_paths):
1770
1792
                path, trans_id = data
1799
1821
        modified_paths = []
1800
1822
        new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1801
1823
                                 new_paths)
1802
 
        child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
1824
        child_pb = ui.ui_factory.nested_progress_bar()
1803
1825
        try:
1804
1826
            for num, (path, trans_id) in enumerate(new_paths):
1805
1827
                if (num % 10) == 0:
1814
1836
                            raise
1815
1837
                    else:
1816
1838
                        self.rename_count += 1
 
1839
                    # TODO: if trans_id in self._observed_sha1s, we should
 
1840
                    #       re-stat the final target, since ctime will be
 
1841
                    #       updated by the change.
1817
1842
                if (trans_id in self._new_contents or
1818
1843
                    self.path_changed(trans_id)):
1819
1844
                    if trans_id in self._new_contents:
1820
1845
                        modified_paths.append(full_path)
1821
1846
                if trans_id in self._new_executability:
1822
1847
                    self._set_executability(path, trans_id)
 
1848
                if trans_id in self._observed_sha1s:
 
1849
                    o_sha1, o_st_val = self._observed_sha1s[trans_id]
 
1850
                    st = osutils.lstat(full_path)
 
1851
                    self._observed_sha1s[trans_id] = (o_sha1, st)
1823
1852
        finally:
1824
1853
            child_pb.finished()
1825
1854
        self._new_contents.clear()
1826
1855
        return modified_paths
1827
1856
 
 
1857
    def _apply_observed_sha1s(self):
 
1858
        """After we have finished renaming everything, update observed sha1s
 
1859
 
 
1860
        This has to be done after self._tree.apply_inventory_delta, otherwise
 
1861
        it doesn't know anything about the files we are updating. Also, we want
 
1862
        to do this as late as possible, so that most entries end up cached.
 
1863
        """
 
1864
        # TODO: this doesn't update the stat information for directories. So
 
1865
        #       the first 'bzr status' will still need to rewrite
 
1866
        #       .bzr/checkout/dirstate. However, we at least don't need to
 
1867
        #       re-read all of the files.
 
1868
        # TODO: If the operation took a while, we could do a time.sleep(3) here
 
1869
        #       to allow the clock to tick over and ensure we won't have any
 
1870
        #       problems. (we could observe start time, and finish time, and if
 
1871
        #       it is less than eg 10% overhead, add a sleep call.)
 
1872
        paths = FinalPaths(self)
 
1873
        for trans_id, observed in self._observed_sha1s.iteritems():
 
1874
            path = paths.get_path(trans_id)
 
1875
            # We could get the file_id, but dirstate prefers to use the path
 
1876
            # anyway, and it is 'cheaper' to determine.
 
1877
            # file_id = self._new_id[trans_id]
 
1878
            self._tree._observed_sha1(None, path, observed)
 
1879
 
1828
1880
 
1829
1881
class TransformPreview(DiskTreeTransform):
1830
1882
    """A TreeTransform for generating preview trees.
1880
1932
        raise NotImplementedError(self.new_orphan)
1881
1933
 
1882
1934
 
1883
 
class _PreviewTree(tree.Tree):
 
1935
class _PreviewTree(tree.InventoryTree):
1884
1936
    """Partial implementation of Tree to support show_diff_trees"""
1885
1937
 
1886
1938
    def __init__(self, transform):
2417
2469
        if num > 0:  # more than just a root
2418
2470
            raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2419
2471
    file_trans_id = {}
2420
 
    top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2472
    top_pb = ui.ui_factory.nested_progress_bar()
2421
2473
    pp = ProgressPhase("Build phase", 2, top_pb)
2422
2474
    if tree.inventory.root is not None:
2423
2475
        # This is kind of a hack: we should be altering the root
2436
2488
        pp.next_phase()
2437
2489
        file_trans_id[wt.get_root_id()] = \
2438
2490
            tt.trans_id_tree_file_id(wt.get_root_id())
2439
 
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2491
        pb = ui.ui_factory.nested_progress_bar()
2440
2492
        try:
2441
2493
            deferred_contents = []
2442
2494
            num = 0
2489
2541
                    executable = tree.is_executable(file_id, tree_path)
2490
2542
                    if executable:
2491
2543
                        tt.set_executability(executable, trans_id)
2492
 
                    trans_data = (trans_id, tree_path)
 
2544
                    trans_data = (trans_id, tree_path, entry.text_sha1)
2493
2545
                    deferred_contents.append((file_id, trans_data))
2494
2546
                else:
2495
2547
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2511
2563
            precomputed_delta = None
2512
2564
        conflicts = cook_conflicts(raw_conflicts, tt)
2513
2565
        for conflict in conflicts:
2514
 
            warning(conflict)
 
2566
            trace.warning(conflict)
2515
2567
        try:
2516
2568
            wt.add_conflicts(conflicts)
2517
2569
        except errors.UnsupportedOperation:
2540
2592
        unchanged = dict(unchanged)
2541
2593
        new_desired_files = []
2542
2594
        count = 0
2543
 
        for file_id, (trans_id, tree_path) in desired_files:
 
2595
        for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2544
2596
            accelerator_path = unchanged.get(file_id)
2545
2597
            if accelerator_path is None:
2546
 
                new_desired_files.append((file_id, (trans_id, tree_path)))
 
2598
                new_desired_files.append((file_id,
 
2599
                    (trans_id, tree_path, text_sha1)))
2547
2600
                continue
2548
2601
            pb.update('Adding file contents', count + offset, total)
2549
2602
            if hardlink:
2556
2609
                    contents = filtered_output_bytes(contents, filters,
2557
2610
                        ContentFilterContext(tree_path, tree))
2558
2611
                try:
2559
 
                    tt.create_file(contents, trans_id)
 
2612
                    tt.create_file(contents, trans_id, sha1=text_sha1)
2560
2613
                finally:
2561
2614
                    try:
2562
2615
                        contents.close()
2565
2618
                        pass
2566
2619
            count += 1
2567
2620
        offset += count
2568
 
    for count, ((trans_id, tree_path), contents) in enumerate(
 
2621
    for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2569
2622
            tree.iter_files_bytes(new_desired_files)):
2570
2623
        if wt.supports_content_filtering():
2571
2624
            filters = wt._content_filter_stack(tree_path)
2572
2625
            contents = filtered_output_bytes(contents, filters,
2573
2626
                ContentFilterContext(tree_path, tree))
2574
 
        tt.create_file(contents, trans_id)
 
2627
        tt.create_file(contents, trans_id, sha1=text_sha1)
2575
2628
        pb.update('Adding file contents', count + offset, total)
2576
2629
 
2577
2630
 
2752
2805
                unversioned_filter=working_tree.is_ignored)
2753
2806
            delta.report_changes(tt.iter_changes(), change_reporter)
2754
2807
        for conflict in conflicts:
2755
 
            warning(conflict)
 
2808
            trace.warning(conflict)
2756
2809
        pp.next_phase()
2757
2810
        tt.apply()
2758
2811
        working_tree.set_merge_modified(merge_modified)
2766
2819
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2767
2820
                              backups, pp, basis_tree=None,
2768
2821
                              merge_modified=None):
2769
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2822
    child_pb = ui.ui_factory.nested_progress_bar()
2770
2823
    try:
2771
2824
        if merge_modified is None:
2772
2825
            merge_modified = working_tree.merge_modified()
2775
2828
                                      merge_modified, basis_tree)
2776
2829
    finally:
2777
2830
        child_pb.finished()
2778
 
    child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
2831
    child_pb = ui.ui_factory.nested_progress_bar()
2779
2832
    try:
2780
2833
        raw_conflicts = resolve_conflicts(tt, child_pb,
2781
2834
            lambda t, c: conflict_pass(t, c, target_tree))
3008
3061
            file_id = tt.inactive_file_id(conflict[1])
3009
3062
            # special-case the other tree root (move its children instead)
3010
3063
            if path_tree and file_id in path_tree:
3011
 
                if path_tree.inventory[file_id].parent_id is None:
 
3064
                if path_tree.path2id('') == file_id:
 
3065
                    # This is the root entry, skip it
3012
3066
                    continue
3013
3067
            tt.version_file(file_id, conflict[1])
3014
3068
            new_conflicts.add((c_type, 'Versioned directory', conflict[1]))