~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/pack_repo.py

  • Committer: Ian Clatworthy
  • Date: 2009-09-02 16:03:51 UTC
  • mto: (4634.39.1 pdf-chm-docs)
  • mto: This revision was merged to the branch mainline in revision 4689.
  • Revision ID: ian.clatworthy@canonical.com-20090902160351-sxptcz3ttc1aencw
first cut at pdf docs via sphinx

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2007-2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
54
54
    revision as _mod_revision,
55
55
    )
56
56
 
57
 
from bzrlib.decorators import needs_write_lock, only_raises
 
57
from bzrlib.decorators import needs_write_lock
58
58
from bzrlib.btree_index import (
59
59
    BTreeGraphIndex,
60
60
    BTreeBuilder,
73
73
    )
74
74
from bzrlib.trace import (
75
75
    mutter,
76
 
    note,
77
76
    warning,
78
77
    )
79
78
 
225
224
        return self.index_name('text', name)
226
225
 
227
226
    def _replace_index_with_readonly(self, index_type):
228
 
        unlimited_cache = False
229
 
        if index_type == 'chk':
230
 
            unlimited_cache = True
231
227
        setattr(self, index_type + '_index',
232
228
            self.index_class(self.index_transport,
233
229
                self.index_name(index_type, self.name),
234
 
                self.index_sizes[self.index_offset(index_type)],
235
 
                unlimited_cache=unlimited_cache))
 
230
                self.index_sizes[self.index_offset(index_type)]))
236
231
 
237
232
 
238
233
class ExistingPack(Pack):
1117
1112
            iterator is a tuple with:
1118
1113
            index, readv_vector, node_vector. readv_vector is a list ready to
1119
1114
            hand to the transport readv method, and node_vector is a list of
1120
 
            (key, eol_flag, references) for the node retrieved by the
 
1115
            (key, eol_flag, references) for the the node retrieved by the
1121
1116
            matching readv_vector.
1122
1117
        """
1123
1118
        # group by pack so we do one readv per pack
1423
1418
        # resumed packs
1424
1419
        self._resumed_packs = []
1425
1420
 
1426
 
    def __repr__(self):
1427
 
        return '%s(%r)' % (self.__class__.__name__, self.repo)
1428
 
 
1429
1421
    def add_pack_to_memory(self, pack):
1430
1422
        """Make a Pack object available to the repository to satisfy queries.
1431
1423
 
1545
1537
                self._remove_pack_from_memory(pack)
1546
1538
        # record the newly available packs and stop advertising the old
1547
1539
        # packs
1548
 
        to_be_obsoleted = []
1549
 
        for _, packs in pack_operations:
1550
 
            to_be_obsoleted.extend(packs)
1551
 
        result = self._save_pack_names(clear_obsolete_packs=True,
1552
 
                                       obsolete_packs=to_be_obsoleted)
 
1540
        result = self._save_pack_names(clear_obsolete_packs=True)
 
1541
        # Move the old packs out of the way now they are no longer referenced.
 
1542
        for revision_count, packs in pack_operations:
 
1543
            self._obsolete_packs(packs)
1553
1544
        return result
1554
1545
 
1555
1546
    def _flush_new_pack(self):
1683
1674
            txt_index = self._make_index(name, '.tix')
1684
1675
            sig_index = self._make_index(name, '.six')
1685
1676
            if self.chk_index is not None:
1686
 
                chk_index = self._make_index(name, '.cix', unlimited_cache=True)
 
1677
                chk_index = self._make_index(name, '.cix')
1687
1678
            else:
1688
1679
                chk_index = None
1689
1680
            result = ExistingPack(self._pack_transport, name, rev_index,
1708
1699
            txt_index = self._make_index(name, '.tix', resume=True)
1709
1700
            sig_index = self._make_index(name, '.six', resume=True)
1710
1701
            if self.chk_index is not None:
1711
 
                chk_index = self._make_index(name, '.cix', resume=True,
1712
 
                                             unlimited_cache=True)
 
1702
                chk_index = self._make_index(name, '.cix', resume=True)
1713
1703
            else:
1714
1704
                chk_index = None
1715
1705
            result = self.resumed_pack_factory(name, rev_index, inv_index,
1745
1735
        return self._index_class(self.transport, 'pack-names', None
1746
1736
                ).iter_all_entries()
1747
1737
 
1748
 
    def _make_index(self, name, suffix, resume=False, unlimited_cache=False):
 
1738
    def _make_index(self, name, suffix, resume=False):
1749
1739
        size_offset = self._suffix_offsets[suffix]
1750
1740
        index_name = name + suffix
1751
1741
        if resume:
1754
1744
        else:
1755
1745
            transport = self._index_transport
1756
1746
            index_size = self._names[name][size_offset]
1757
 
        return self._index_class(transport, index_name, index_size,
1758
 
                                 unlimited_cache=unlimited_cache)
 
1747
        return self._index_class(transport, index_name, index_size)
1759
1748
 
1760
1749
    def _max_pack_count(self, total_revisions):
1761
1750
        """Return the maximum number of packs to use for total revisions.
1789
1778
        :param return: None.
1790
1779
        """
1791
1780
        for pack in packs:
1792
 
            try:
1793
 
                pack.pack_transport.rename(pack.file_name(),
1794
 
                    '../obsolete_packs/' + pack.file_name())
1795
 
            except (errors.PathError, errors.TransportError), e:
1796
 
                # TODO: Should these be warnings or mutters?
1797
 
                mutter("couldn't rename obsolete pack, skipping it:\n%s"
1798
 
                       % (e,))
 
1781
            pack.pack_transport.rename(pack.file_name(),
 
1782
                '../obsolete_packs/' + pack.file_name())
1799
1783
            # TODO: Probably needs to know all possible indices for this pack
1800
1784
            # - or maybe list the directory and move all indices matching this
1801
1785
            # name whether we recognize it or not?
1803
1787
            if self.chk_index is not None:
1804
1788
                suffixes.append('.cix')
1805
1789
            for suffix in suffixes:
1806
 
                try:
1807
 
                    self._index_transport.rename(pack.name + suffix,
1808
 
                        '../obsolete_packs/' + pack.name + suffix)
1809
 
                except (errors.PathError, errors.TransportError), e:
1810
 
                    mutter("couldn't rename obsolete index, skipping it:\n%s"
1811
 
                           % (e,))
 
1790
                self._index_transport.rename(pack.name + suffix,
 
1791
                    '../obsolete_packs/' + pack.name + suffix)
1812
1792
 
1813
1793
    def pack_distribution(self, total_revisions):
1814
1794
        """Generate a list of the number of revisions to put in each pack.
1886
1866
        disk_nodes = set()
1887
1867
        for index, key, value in self._iter_disk_pack_index():
1888
1868
            disk_nodes.add((key, value))
1889
 
        orig_disk_nodes = set(disk_nodes)
1890
1869
 
1891
1870
        # do a two-way diff against our original content
1892
1871
        current_nodes = set()
1905
1884
        disk_nodes.difference_update(deleted_nodes)
1906
1885
        disk_nodes.update(new_nodes)
1907
1886
 
1908
 
        return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
 
1887
        return disk_nodes, deleted_nodes, new_nodes
1909
1888
 
1910
1889
    def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1911
1890
        """Given the correct set of pack files, update our saved info.
1951
1930
                added.append(name)
1952
1931
        return removed, added, modified
1953
1932
 
1954
 
    def _save_pack_names(self, clear_obsolete_packs=False, obsolete_packs=None):
 
1933
    def _save_pack_names(self, clear_obsolete_packs=False):
1955
1934
        """Save the list of packs.
1956
1935
 
1957
1936
        This will take out the mutex around the pack names list for the
1961
1940
 
1962
1941
        :param clear_obsolete_packs: If True, clear out the contents of the
1963
1942
            obsolete_packs directory.
1964
 
        :param obsolete_packs: Packs that are obsolete once the new pack-names
1965
 
            file has been written.
1966
1943
        :return: A list of the names saved that were not previously on disk.
1967
1944
        """
1968
 
        already_obsolete = []
1969
1945
        self.lock_names()
1970
1946
        try:
1971
1947
            builder = self._index_builder_class()
1972
 
            (disk_nodes, deleted_nodes, new_nodes,
1973
 
             orig_disk_nodes) = self._diff_pack_names()
 
1948
            disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
1974
1949
            # TODO: handle same-name, index-size-changes here -
1975
1950
            # e.g. use the value from disk, not ours, *unless* we're the one
1976
1951
            # changing it.
1978
1953
                builder.add_node(key, value)
1979
1954
            self.transport.put_file('pack-names', builder.finish(),
1980
1955
                mode=self.repo.bzrdir._get_file_mode())
 
1956
            # move the baseline forward
1981
1957
            self._packs_at_load = disk_nodes
1982
1958
            if clear_obsolete_packs:
1983
 
                to_preserve = None
1984
 
                if obsolete_packs:
1985
 
                    to_preserve = set([o.name for o in obsolete_packs])
1986
 
                already_obsolete = self._clear_obsolete_packs(to_preserve)
 
1959
                self._clear_obsolete_packs()
1987
1960
        finally:
1988
1961
            self._unlock_names()
1989
1962
        # synchronise the memory packs list with what we just wrote:
1990
1963
        self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1991
 
        if obsolete_packs:
1992
 
            # TODO: We could add one more condition here. "if o.name not in
1993
 
            #       orig_disk_nodes and o != the new_pack we haven't written to
1994
 
            #       disk yet. However, the new pack object is not easily
1995
 
            #       accessible here (it would have to be passed through the
1996
 
            #       autopacking code, etc.)
1997
 
            obsolete_packs = [o for o in obsolete_packs
1998
 
                              if o.name not in already_obsolete]
1999
 
            self._obsolete_packs(obsolete_packs)
2000
1964
        return [new_node[0][0] for new_node in new_nodes]
2001
1965
 
2002
1966
    def reload_pack_names(self):
2017
1981
        if first_read:
2018
1982
            return True
2019
1983
        # out the new value.
2020
 
        (disk_nodes, deleted_nodes, new_nodes,
2021
 
         orig_disk_nodes) = self._diff_pack_names()
2022
 
        # _packs_at_load is meant to be the explicit list of names in
2023
 
        # 'pack-names' at then start. As such, it should not contain any
2024
 
        # pending names that haven't been written out yet.
2025
 
        self._packs_at_load = orig_disk_nodes
 
1984
        disk_nodes, _, _ = self._diff_pack_names()
 
1985
        self._packs_at_load = disk_nodes
2026
1986
        (removed, added,
2027
1987
         modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
2028
1988
        if removed or added or modified:
2037
1997
            raise
2038
1998
        raise errors.RetryAutopack(self.repo, False, sys.exc_info())
2039
1999
 
2040
 
    def _clear_obsolete_packs(self, preserve=None):
 
2000
    def _clear_obsolete_packs(self):
2041
2001
        """Delete everything from the obsolete-packs directory.
2042
 
 
2043
 
        :return: A list of pack identifiers (the filename without '.pack') that
2044
 
            were found in obsolete_packs.
2045
2002
        """
2046
 
        found = []
2047
2003
        obsolete_pack_transport = self.transport.clone('obsolete_packs')
2048
 
        if preserve is None:
2049
 
            preserve = set()
2050
2004
        for filename in obsolete_pack_transport.list_dir('.'):
2051
 
            name, ext = osutils.splitext(filename)
2052
 
            if ext == '.pack':
2053
 
                found.append(name)
2054
 
            if name in preserve:
2055
 
                continue
2056
2005
            try:
2057
2006
                obsolete_pack_transport.delete(filename)
2058
2007
            except (errors.PathError, errors.TransportError), e:
2059
 
                warning("couldn't delete obsolete pack, skipping it:\n%s"
2060
 
                        % (e,))
2061
 
        return found
 
2008
                warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
2062
2009
 
2063
2010
    def _start_write_group(self):
2064
2011
        # Do not permit preparation for writing if we're not in a 'write lock'.
2116
2063
            self._remove_pack_indices(resumed_pack)
2117
2064
        del self._resumed_packs[:]
2118
2065
 
2119
 
    def _check_new_inventories(self):
2120
 
        """Detect missing inventories in this write group.
2121
 
 
2122
 
        :returns: list of strs, summarising any problems found.  If the list is
2123
 
            empty no problems were found.
2124
 
        """
2125
 
        # The base implementation does no checks.  GCRepositoryPackCollection
2126
 
        # overrides this.
2127
 
        return []
2128
 
        
2129
2066
    def _commit_write_group(self):
2130
2067
        all_missing = set()
2131
2068
        for prefix, versioned_file in (
2140
2077
            raise errors.BzrCheckError(
2141
2078
                "Repository %s has missing compression parent(s) %r "
2142
2079
                 % (self.repo, sorted(all_missing)))
2143
 
        problems = self._check_new_inventories()
2144
 
        if problems:
2145
 
            problems_summary = '\n'.join(problems)
2146
 
            raise errors.BzrCheckError(
2147
 
                "Cannot add revision(s) to repository: " + problems_summary)
2148
2080
        self._remove_pack_indices(self._new_pack)
2149
 
        any_new_content = False
 
2081
        should_autopack = False
2150
2082
        if self._new_pack.data_inserted():
2151
2083
            # get all the data to disk and read to use
2152
2084
            self._new_pack.finish()
2153
2085
            self.allocate(self._new_pack)
2154
2086
            self._new_pack = None
2155
 
            any_new_content = True
 
2087
            should_autopack = True
2156
2088
        else:
2157
2089
            self._new_pack.abort()
2158
2090
            self._new_pack = None
2163
2095
            self._remove_pack_from_memory(resumed_pack)
2164
2096
            resumed_pack.finish()
2165
2097
            self.allocate(resumed_pack)
2166
 
            any_new_content = True
 
2098
            should_autopack = True
2167
2099
        del self._resumed_packs[:]
2168
 
        if any_new_content:
2169
 
            result = self.autopack()
2170
 
            if not result:
 
2100
        if should_autopack:
 
2101
            if not self.autopack():
2171
2102
                # when autopack takes no steps, the names list is still
2172
2103
                # unsaved.
2173
2104
                return self._save_pack_names()
2174
 
            return result
2175
2105
        return []
2176
2106
 
2177
2107
    def _suspend_write_group(self):
2280
2210
        self._reconcile_fixes_text_parents = True
2281
2211
        self._reconcile_backsup_inventory = False
2282
2212
 
2283
 
    def _warn_if_deprecated(self, branch=None):
 
2213
    def _warn_if_deprecated(self):
2284
2214
        # This class isn't deprecated, but one sub-format is
2285
2215
        if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2286
 
            super(KnitPackRepository, self)._warn_if_deprecated(branch)
 
2216
            from bzrlib import repository
 
2217
            if repository._deprecation_warning_done:
 
2218
                return
 
2219
            repository._deprecation_warning_done = True
 
2220
            warning("Format %s for %s is deprecated - please use"
 
2221
                    " 'bzr upgrade --1.6.1-rich-root'"
 
2222
                    % (self._format, self.bzrdir.transport.base))
2287
2223
 
2288
2224
    def _abort_write_group(self):
2289
 
        self.revisions._index._key_dependencies.clear()
 
2225
        self.revisions._index._key_dependencies.refs.clear()
2290
2226
        self._pack_collection._abort_write_group()
2291
2227
 
2292
2228
    def _get_source(self, to_format):
2306
2242
        self._pack_collection._start_write_group()
2307
2243
 
2308
2244
    def _commit_write_group(self):
2309
 
        hint = self._pack_collection._commit_write_group()
2310
 
        self.revisions._index._key_dependencies.clear()
2311
 
        return hint
 
2245
        self.revisions._index._key_dependencies.refs.clear()
 
2246
        return self._pack_collection._commit_write_group()
2312
2247
 
2313
2248
    def suspend_write_group(self):
2314
2249
        # XXX check self._write_group is self.get_transaction()?
2315
2250
        tokens = self._pack_collection._suspend_write_group()
2316
 
        self.revisions._index._key_dependencies.clear()
 
2251
        self.revisions._index._key_dependencies.refs.clear()
2317
2252
        self._write_group = None
2318
2253
        return tokens
2319
2254
 
2347
2282
        if self._write_lock_count == 1:
2348
2283
            self._transaction = transactions.WriteTransaction()
2349
2284
        if not locked:
2350
 
            if 'relock' in debug.debug_flags and self._prev_lock == 'w':
2351
 
                note('%r was write locked again', self)
2352
 
            self._prev_lock = 'w'
2353
2285
            for repo in self._fallback_repositories:
2354
2286
                # Writes don't affect fallback repos
2355
2287
                repo.lock_read()
2362
2294
        else:
2363
2295
            self.control_files.lock_read()
2364
2296
        if not locked:
2365
 
            if 'relock' in debug.debug_flags and self._prev_lock == 'r':
2366
 
                note('%r was read locked again', self)
2367
 
            self._prev_lock = 'r'
2368
2297
            for repo in self._fallback_repositories:
2369
2298
                repo.lock_read()
2370
2299
            self._refresh_data()
2398
2327
        packer = ReconcilePacker(collection, packs, extension, revs)
2399
2328
        return packer.pack(pb)
2400
2329
 
2401
 
    @only_raises(errors.LockNotHeld, errors.LockBroken)
2402
2330
    def unlock(self):
2403
2331
        if self._write_lock_count == 1 and self._write_group is not None:
2404
2332
            self.abort_write_group()