1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
1
# Copyright (C) 2007-2010 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
54
55
revision as _mod_revision,
57
from bzrlib.decorators import needs_write_lock
58
from bzrlib.decorators import needs_write_lock, only_raises
58
59
from bzrlib.btree_index import (
224
226
return self.index_name('text', name)
226
228
def _replace_index_with_readonly(self, index_type):
229
unlimited_cache = False
230
if index_type == 'chk':
231
unlimited_cache = True
227
232
setattr(self, index_type + '_index',
228
233
self.index_class(self.index_transport,
229
234
self.index_name(index_type, self.name),
230
self.index_sizes[self.index_offset(index_type)]))
235
self.index_sizes[self.index_offset(index_type)],
236
unlimited_cache=unlimited_cache))
233
239
class ExistingPack(Pack):
641
647
del self.combined_index._indices[:]
642
648
self.add_callback = None
644
def remove_index(self, index, pack):
650
def remove_index(self, index):
645
651
"""Remove index from the indices used to answer queries.
647
653
:param index: An index from the pack parameter.
648
:param pack: A Pack instance.
650
655
del self.index_to_pack[index]
651
656
self.combined_index._indices.remove(index)
1112
1117
iterator is a tuple with:
1113
1118
index, readv_vector, node_vector. readv_vector is a list ready to
1114
1119
hand to the transport readv method, and node_vector is a list of
1115
(key, eol_flag, references) for the the node retrieved by the
1120
(key, eol_flag, references) for the node retrieved by the
1116
1121
matching readv_vector.
1118
1123
# group by pack so we do one readv per pack
1537
1545
self._remove_pack_from_memory(pack)
1538
1546
# record the newly available packs and stop advertising the old
1540
result = self._save_pack_names(clear_obsolete_packs=True)
1541
# Move the old packs out of the way now they are no longer referenced.
1542
for revision_count, packs in pack_operations:
1543
self._obsolete_packs(packs)
1548
to_be_obsoleted = []
1549
for _, packs in pack_operations:
1550
to_be_obsoleted.extend(packs)
1551
result = self._save_pack_names(clear_obsolete_packs=True,
1552
obsolete_packs=to_be_obsoleted)
1546
1555
def _flush_new_pack(self):
1674
1683
txt_index = self._make_index(name, '.tix')
1675
1684
sig_index = self._make_index(name, '.six')
1676
1685
if self.chk_index is not None:
1677
chk_index = self._make_index(name, '.cix')
1686
chk_index = self._make_index(name, '.cix', unlimited_cache=True)
1679
1688
chk_index = None
1680
1689
result = ExistingPack(self._pack_transport, name, rev_index,
1699
1708
txt_index = self._make_index(name, '.tix', resume=True)
1700
1709
sig_index = self._make_index(name, '.six', resume=True)
1701
1710
if self.chk_index is not None:
1702
chk_index = self._make_index(name, '.cix', resume=True)
1711
chk_index = self._make_index(name, '.cix', resume=True,
1712
unlimited_cache=True)
1704
1714
chk_index = None
1705
1715
result = self.resumed_pack_factory(name, rev_index, inv_index,
1735
1745
return self._index_class(self.transport, 'pack-names', None
1736
1746
).iter_all_entries()
1738
def _make_index(self, name, suffix, resume=False):
1748
def _make_index(self, name, suffix, resume=False, unlimited_cache=False):
1739
1749
size_offset = self._suffix_offsets[suffix]
1740
1750
index_name = name + suffix
1745
1755
transport = self._index_transport
1746
1756
index_size = self._names[name][size_offset]
1747
return self._index_class(transport, index_name, index_size)
1757
return self._index_class(transport, index_name, index_size,
1758
unlimited_cache=unlimited_cache)
1749
1760
def _max_pack_count(self, total_revisions):
1750
1761
"""Return the maximum number of packs to use for total revisions.
1778
1789
:param return: None.
1780
1791
for pack in packs:
1781
pack.pack_transport.rename(pack.file_name(),
1782
'../obsolete_packs/' + pack.file_name())
1793
pack.pack_transport.rename(pack.file_name(),
1794
'../obsolete_packs/' + pack.file_name())
1795
except (errors.PathError, errors.TransportError), e:
1796
# TODO: Should these be warnings or mutters?
1797
mutter("couldn't rename obsolete pack, skipping it:\n%s"
1783
1799
# TODO: Probably needs to know all possible indices for this pack
1784
1800
# - or maybe list the directory and move all indices matching this
1785
1801
# name whether we recognize it or not?
1787
1803
if self.chk_index is not None:
1788
1804
suffixes.append('.cix')
1789
1805
for suffix in suffixes:
1790
self._index_transport.rename(pack.name + suffix,
1791
'../obsolete_packs/' + pack.name + suffix)
1807
self._index_transport.rename(pack.name + suffix,
1808
'../obsolete_packs/' + pack.name + suffix)
1809
except (errors.PathError, errors.TransportError), e:
1810
mutter("couldn't rename obsolete index, skipping it:\n%s"
1793
1813
def pack_distribution(self, total_revisions):
1794
1814
"""Generate a list of the number of revisions to put in each pack.
1820
1840
self._remove_pack_indices(pack)
1821
1841
self.packs.remove(pack)
1823
def _remove_pack_indices(self, pack):
1824
"""Remove the indices for pack from the aggregated indices."""
1825
self.revision_index.remove_index(pack.revision_index, pack)
1826
self.inventory_index.remove_index(pack.inventory_index, pack)
1827
self.text_index.remove_index(pack.text_index, pack)
1828
self.signature_index.remove_index(pack.signature_index, pack)
1829
if self.chk_index is not None:
1830
self.chk_index.remove_index(pack.chk_index, pack)
1843
def _remove_pack_indices(self, pack, ignore_missing=False):
1844
"""Remove the indices for pack from the aggregated indices.
1846
:param ignore_missing: Suppress KeyErrors from calling remove_index.
1848
for index_type in Pack.index_definitions.keys():
1849
attr_name = index_type + '_index'
1850
aggregate_index = getattr(self, attr_name)
1851
if aggregate_index is not None:
1852
pack_index = getattr(pack, attr_name)
1854
aggregate_index.remove_index(pack_index)
1832
1860
def reset(self):
1833
1861
"""Clear all cached data."""
1866
1894
disk_nodes = set()
1867
1895
for index, key, value in self._iter_disk_pack_index():
1868
1896
disk_nodes.add((key, value))
1897
orig_disk_nodes = set(disk_nodes)
1870
1899
# do a two-way diff against our original content
1871
1900
current_nodes = set()
1884
1913
disk_nodes.difference_update(deleted_nodes)
1885
1914
disk_nodes.update(new_nodes)
1887
return disk_nodes, deleted_nodes, new_nodes
1916
return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
1889
1918
def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1890
1919
"""Given the correct set of pack files, update our saved info.
1930
1959
added.append(name)
1931
1960
return removed, added, modified
1933
def _save_pack_names(self, clear_obsolete_packs=False):
1962
def _save_pack_names(self, clear_obsolete_packs=False, obsolete_packs=None):
1934
1963
"""Save the list of packs.
1936
1965
This will take out the mutex around the pack names list for the
1941
1970
:param clear_obsolete_packs: If True, clear out the contents of the
1942
1971
obsolete_packs directory.
1972
:param obsolete_packs: Packs that are obsolete once the new pack-names
1973
file has been written.
1943
1974
:return: A list of the names saved that were not previously on disk.
1976
already_obsolete = []
1945
1977
self.lock_names()
1947
1979
builder = self._index_builder_class()
1948
disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
1980
(disk_nodes, deleted_nodes, new_nodes,
1981
orig_disk_nodes) = self._diff_pack_names()
1949
1982
# TODO: handle same-name, index-size-changes here -
1950
1983
# e.g. use the value from disk, not ours, *unless* we're the one
1953
1986
builder.add_node(key, value)
1954
1987
self.transport.put_file('pack-names', builder.finish(),
1955
1988
mode=self.repo.bzrdir._get_file_mode())
1956
# move the baseline forward
1957
1989
self._packs_at_load = disk_nodes
1958
1990
if clear_obsolete_packs:
1959
self._clear_obsolete_packs()
1993
to_preserve = set([o.name for o in obsolete_packs])
1994
already_obsolete = self._clear_obsolete_packs(to_preserve)
1961
1996
self._unlock_names()
1962
1997
# synchronise the memory packs list with what we just wrote:
1963
1998
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
2000
# TODO: We could add one more condition here. "if o.name not in
2001
# orig_disk_nodes and o != the new_pack we haven't written to
2002
# disk yet. However, the new pack object is not easily
2003
# accessible here (it would have to be passed through the
2004
# autopacking code, etc.)
2005
obsolete_packs = [o for o in obsolete_packs
2006
if o.name not in already_obsolete]
2007
self._obsolete_packs(obsolete_packs)
1964
2008
return [new_node[0][0] for new_node in new_nodes]
1966
2010
def reload_pack_names(self):
1983
2027
# out the new value.
1984
disk_nodes, _, _ = self._diff_pack_names()
1985
self._packs_at_load = disk_nodes
2028
(disk_nodes, deleted_nodes, new_nodes,
2029
orig_disk_nodes) = self._diff_pack_names()
2030
# _packs_at_load is meant to be the explicit list of names in
2031
# 'pack-names' at then start. As such, it should not contain any
2032
# pending names that haven't been written out yet.
2033
self._packs_at_load = orig_disk_nodes
1986
2034
(removed, added,
1987
2035
modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1988
2036
if removed or added or modified:
1998
2046
raise errors.RetryAutopack(self.repo, False, sys.exc_info())
2000
def _clear_obsolete_packs(self):
2048
def _clear_obsolete_packs(self, preserve=None):
2001
2049
"""Delete everything from the obsolete-packs directory.
2051
:return: A list of pack identifiers (the filename without '.pack') that
2052
were found in obsolete_packs.
2003
2055
obsolete_pack_transport = self.transport.clone('obsolete_packs')
2056
if preserve is None:
2004
2058
for filename in obsolete_pack_transport.list_dir('.'):
2059
name, ext = osutils.splitext(filename)
2062
if name in preserve:
2006
2065
obsolete_pack_transport.delete(filename)
2007
2066
except (errors.PathError, errors.TransportError), e:
2008
warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
2067
warning("couldn't delete obsolete pack, skipping it:\n%s"
2010
2071
def _start_write_group(self):
2011
2072
# Do not permit preparation for writing if we're not in a 'write lock'.
2038
2099
# FIXME: just drop the transient index.
2039
2100
# forget what names there are
2040
2101
if self._new_pack is not None:
2042
self._new_pack.abort()
2044
# XXX: If we aborted while in the middle of finishing the write
2045
# group, _remove_pack_indices can fail because the indexes are
2046
# already gone. If they're not there we shouldn't fail in this
2047
# case. -- mbp 20081113
2048
self._remove_pack_indices(self._new_pack)
2049
self._new_pack = None
2102
operation = cleanup.OperationWithCleanups(self._new_pack.abort)
2103
operation.add_cleanup(setattr, self, '_new_pack', None)
2104
# If we aborted while in the middle of finishing the write
2105
# group, _remove_pack_indices could fail because the indexes are
2106
# already gone. But they're not there we shouldn't fail in this
2107
# case, so we pass ignore_missing=True.
2108
operation.add_cleanup(self._remove_pack_indices, self._new_pack,
2109
ignore_missing=True)
2110
operation.run_simple()
2050
2111
for resumed_pack in self._resumed_packs:
2052
resumed_pack.abort()
2054
# See comment in previous finally block.
2056
self._remove_pack_indices(resumed_pack)
2112
operation = cleanup.OperationWithCleanups(resumed_pack.abort)
2113
# See comment in previous finally block.
2114
operation.add_cleanup(self._remove_pack_indices, resumed_pack,
2115
ignore_missing=True)
2116
operation.run_simple()
2059
2117
del self._resumed_packs[:]
2061
2119
def _remove_resumed_pack_indices(self):
2066
2124
def _check_new_inventories(self):
2067
2125
"""Detect missing inventories in this write group.
2069
:returns: set of missing keys. Note that not every missing key is
2070
guaranteed to be reported.
2127
:returns: list of strs, summarising any problems found. If the list is
2128
empty no problems were found.
2072
2130
# The base implementation does no checks. GCRepositoryPackCollection
2073
2131
# overrides this.
2076
2134
def _commit_write_group(self):
2077
2135
all_missing = set()
2087
2145
raise errors.BzrCheckError(
2088
2146
"Repository %s has missing compression parent(s) %r "
2089
2147
% (self.repo, sorted(all_missing)))
2090
all_missing = self._check_new_inventories()
2148
problems = self._check_new_inventories()
2150
problems_summary = '\n'.join(problems)
2092
2151
raise errors.BzrCheckError(
2093
"Repository %s missing keys for new revisions %r "
2094
% (self.repo, sorted(all_missing)))
2152
"Cannot add revision(s) to repository: " + problems_summary)
2095
2153
self._remove_pack_indices(self._new_pack)
2096
2154
any_new_content = False
2097
2155
if self._new_pack.data_inserted():
2227
2285
self._reconcile_fixes_text_parents = True
2228
2286
self._reconcile_backsup_inventory = False
2230
def _warn_if_deprecated(self):
2288
def _warn_if_deprecated(self, branch=None):
2231
2289
# This class isn't deprecated, but one sub-format is
2232
2290
if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2233
from bzrlib import repository
2234
if repository._deprecation_warning_done:
2236
repository._deprecation_warning_done = True
2237
warning("Format %s for %s is deprecated - please use"
2238
" 'bzr upgrade --1.6.1-rich-root'"
2239
% (self._format, self.bzrdir.transport.base))
2291
super(KnitPackRepository, self)._warn_if_deprecated(branch)
2241
2293
def _abort_write_group(self):
2242
2294
self.revisions._index._key_dependencies.clear()
2300
2352
if self._write_lock_count == 1:
2301
2353
self._transaction = transactions.WriteTransaction()
2355
if 'relock' in debug.debug_flags and self._prev_lock == 'w':
2356
note('%r was write locked again', self)
2357
self._prev_lock = 'w'
2303
2358
for repo in self._fallback_repositories:
2304
2359
# Writes don't affect fallback repos
2305
2360
repo.lock_read()
2313
2368
self.control_files.lock_read()
2370
if 'relock' in debug.debug_flags and self._prev_lock == 'r':
2371
note('%r was read locked again', self)
2372
self._prev_lock = 'r'
2315
2373
for repo in self._fallback_repositories:
2316
2374
repo.lock_read()
2317
2375
self._refresh_data()
2345
2403
packer = ReconcilePacker(collection, packs, extension, revs)
2346
2404
return packer.pack(pb)
2406
@only_raises(errors.LockNotHeld, errors.LockBroken)
2348
2407
def unlock(self):
2349
2408
if self._write_lock_count == 1 and self._write_group is not None:
2350
2409
self.abort_write_group()
2834
2894
repository_class = KnitPackRepository
2835
2895
_commit_builder_class = PackRootCommitBuilder
2836
2896
rich_root_data = True
2837
2898
supports_tree_reference = True
2838
2899
supports_external_lookups = True
2839
2900
# What index classes to use