1
# Copyright (C) 2007-2010 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
647
646
del self.combined_index._indices[:]
648
647
self.add_callback = None
650
def remove_index(self, index):
649
def remove_index(self, index, pack):
651
650
"""Remove index from the indices used to answer queries.
653
652
:param index: An index from the pack parameter.
653
:param pack: A Pack instance.
655
655
del self.index_to_pack[index]
656
656
self.combined_index._indices.remove(index)
1545
1542
self._remove_pack_from_memory(pack)
1546
1543
# record the newly available packs and stop advertising the old
1548
to_be_obsoleted = []
1549
for _, packs in pack_operations:
1550
to_be_obsoleted.extend(packs)
1551
result = self._save_pack_names(clear_obsolete_packs=True,
1552
obsolete_packs=to_be_obsoleted)
1545
result = self._save_pack_names(clear_obsolete_packs=True)
1546
# Move the old packs out of the way now they are no longer referenced.
1547
for revision_count, packs in pack_operations:
1548
self._obsolete_packs(packs)
1555
1551
def _flush_new_pack(self):
1789
1785
:param return: None.
1791
1787
for pack in packs:
1793
pack.pack_transport.rename(pack.file_name(),
1794
'../obsolete_packs/' + pack.file_name())
1795
except (errors.PathError, errors.TransportError), e:
1796
# TODO: Should these be warnings or mutters?
1797
mutter("couldn't rename obsolete pack, skipping it:\n%s"
1788
pack.pack_transport.rename(pack.file_name(),
1789
'../obsolete_packs/' + pack.file_name())
1799
1790
# TODO: Probably needs to know all possible indices for this pack
1800
1791
# - or maybe list the directory and move all indices matching this
1801
1792
# name whether we recognize it or not?
1803
1794
if self.chk_index is not None:
1804
1795
suffixes.append('.cix')
1805
1796
for suffix in suffixes:
1807
self._index_transport.rename(pack.name + suffix,
1808
'../obsolete_packs/' + pack.name + suffix)
1809
except (errors.PathError, errors.TransportError), e:
1810
mutter("couldn't rename obsolete index, skipping it:\n%s"
1797
self._index_transport.rename(pack.name + suffix,
1798
'../obsolete_packs/' + pack.name + suffix)
1813
1800
def pack_distribution(self, total_revisions):
1814
1801
"""Generate a list of the number of revisions to put in each pack.
1840
1827
self._remove_pack_indices(pack)
1841
1828
self.packs.remove(pack)
1843
def _remove_pack_indices(self, pack, ignore_missing=False):
1844
"""Remove the indices for pack from the aggregated indices.
1846
:param ignore_missing: Suppress KeyErrors from calling remove_index.
1848
for index_type in Pack.index_definitions.keys():
1849
attr_name = index_type + '_index'
1850
aggregate_index = getattr(self, attr_name)
1851
if aggregate_index is not None:
1852
pack_index = getattr(pack, attr_name)
1854
aggregate_index.remove_index(pack_index)
1830
def _remove_pack_indices(self, pack):
1831
"""Remove the indices for pack from the aggregated indices."""
1832
self.revision_index.remove_index(pack.revision_index, pack)
1833
self.inventory_index.remove_index(pack.inventory_index, pack)
1834
self.text_index.remove_index(pack.text_index, pack)
1835
self.signature_index.remove_index(pack.signature_index, pack)
1836
if self.chk_index is not None:
1837
self.chk_index.remove_index(pack.chk_index, pack)
1860
1839
def reset(self):
1861
1840
"""Clear all cached data."""
1894
1873
disk_nodes = set()
1895
1874
for index, key, value in self._iter_disk_pack_index():
1896
1875
disk_nodes.add((key, value))
1897
orig_disk_nodes = set(disk_nodes)
1899
1877
# do a two-way diff against our original content
1900
1878
current_nodes = set()
1913
1891
disk_nodes.difference_update(deleted_nodes)
1914
1892
disk_nodes.update(new_nodes)
1916
return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
1894
return disk_nodes, deleted_nodes, new_nodes
1918
1896
def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1919
1897
"""Given the correct set of pack files, update our saved info.
1959
1937
added.append(name)
1960
1938
return removed, added, modified
1962
def _save_pack_names(self, clear_obsolete_packs=False, obsolete_packs=None):
1940
def _save_pack_names(self, clear_obsolete_packs=False):
1963
1941
"""Save the list of packs.
1965
1943
This will take out the mutex around the pack names list for the
1970
1948
:param clear_obsolete_packs: If True, clear out the contents of the
1971
1949
obsolete_packs directory.
1972
:param obsolete_packs: Packs that are obsolete once the new pack-names
1973
file has been written.
1974
1950
:return: A list of the names saved that were not previously on disk.
1976
already_obsolete = []
1977
1952
self.lock_names()
1979
1954
builder = self._index_builder_class()
1980
(disk_nodes, deleted_nodes, new_nodes,
1981
orig_disk_nodes) = self._diff_pack_names()
1955
disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
1982
1956
# TODO: handle same-name, index-size-changes here -
1983
1957
# e.g. use the value from disk, not ours, *unless* we're the one
1986
1960
builder.add_node(key, value)
1987
1961
self.transport.put_file('pack-names', builder.finish(),
1988
1962
mode=self.repo.bzrdir._get_file_mode())
1963
# move the baseline forward
1989
1964
self._packs_at_load = disk_nodes
1990
1965
if clear_obsolete_packs:
1993
to_preserve = set([o.name for o in obsolete_packs])
1994
already_obsolete = self._clear_obsolete_packs(to_preserve)
1966
self._clear_obsolete_packs()
1996
1968
self._unlock_names()
1997
1969
# synchronise the memory packs list with what we just wrote:
1998
1970
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
2000
# TODO: We could add one more condition here. "if o.name not in
2001
# orig_disk_nodes and o != the new_pack we haven't written to
2002
# disk yet. However, the new pack object is not easily
2003
# accessible here (it would have to be passed through the
2004
# autopacking code, etc.)
2005
obsolete_packs = [o for o in obsolete_packs
2006
if o.name not in already_obsolete]
2007
self._obsolete_packs(obsolete_packs)
2008
1971
return [new_node[0][0] for new_node in new_nodes]
2010
1973
def reload_pack_names(self):
2027
1990
# out the new value.
2028
(disk_nodes, deleted_nodes, new_nodes,
2029
orig_disk_nodes) = self._diff_pack_names()
2030
# _packs_at_load is meant to be the explicit list of names in
2031
# 'pack-names' at then start. As such, it should not contain any
2032
# pending names that haven't been written out yet.
2033
self._packs_at_load = orig_disk_nodes
1991
disk_nodes, _, _ = self._diff_pack_names()
1992
self._packs_at_load = disk_nodes
2034
1993
(removed, added,
2035
1994
modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
2036
1995
if removed or added or modified:
2046
2005
raise errors.RetryAutopack(self.repo, False, sys.exc_info())
2048
def _clear_obsolete_packs(self, preserve=None):
2007
def _clear_obsolete_packs(self):
2049
2008
"""Delete everything from the obsolete-packs directory.
2051
:return: A list of pack identifiers (the filename without '.pack') that
2052
were found in obsolete_packs.
2055
2010
obsolete_pack_transport = self.transport.clone('obsolete_packs')
2056
if preserve is None:
2058
2011
for filename in obsolete_pack_transport.list_dir('.'):
2059
name, ext = osutils.splitext(filename)
2062
if name in preserve:
2065
2013
obsolete_pack_transport.delete(filename)
2066
2014
except (errors.PathError, errors.TransportError), e:
2067
warning("couldn't delete obsolete pack, skipping it:\n%s"
2015
warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
2071
2017
def _start_write_group(self):
2072
2018
# Do not permit preparation for writing if we're not in a 'write lock'.
2099
2045
# FIXME: just drop the transient index.
2100
2046
# forget what names there are
2101
2047
if self._new_pack is not None:
2102
operation = cleanup.OperationWithCleanups(self._new_pack.abort)
2103
operation.add_cleanup(setattr, self, '_new_pack', None)
2104
# If we aborted while in the middle of finishing the write
2105
# group, _remove_pack_indices could fail because the indexes are
2106
# already gone. But they're not there we shouldn't fail in this
2107
# case, so we pass ignore_missing=True.
2108
operation.add_cleanup(self._remove_pack_indices, self._new_pack,
2109
ignore_missing=True)
2110
operation.run_simple()
2049
self._new_pack.abort()
2051
# XXX: If we aborted while in the middle of finishing the write
2052
# group, _remove_pack_indices can fail because the indexes are
2053
# already gone. If they're not there we shouldn't fail in this
2054
# case. -- mbp 20081113
2055
self._remove_pack_indices(self._new_pack)
2056
self._new_pack = None
2111
2057
for resumed_pack in self._resumed_packs:
2112
operation = cleanup.OperationWithCleanups(resumed_pack.abort)
2113
# See comment in previous finally block.
2114
operation.add_cleanup(self._remove_pack_indices, resumed_pack,
2115
ignore_missing=True)
2116
operation.run_simple()
2059
resumed_pack.abort()
2061
# See comment in previous finally block.
2063
self._remove_pack_indices(resumed_pack)
2117
2066
del self._resumed_packs[:]
2119
2068
def _remove_resumed_pack_indices(self):
2285
2234
self._reconcile_fixes_text_parents = True
2286
2235
self._reconcile_backsup_inventory = False
2288
def _warn_if_deprecated(self, branch=None):
2237
def _warn_if_deprecated(self):
2289
2238
# This class isn't deprecated, but one sub-format is
2290
2239
if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2291
super(KnitPackRepository, self)._warn_if_deprecated(branch)
2240
from bzrlib import repository
2241
if repository._deprecation_warning_done:
2243
repository._deprecation_warning_done = True
2244
warning("Format %s for %s is deprecated - please use"
2245
" 'bzr upgrade --1.6.1-rich-root'"
2246
% (self._format, self.bzrdir.transport.base))
2293
2248
def _abort_write_group(self):
2294
2249
self.revisions._index._key_dependencies.clear()
2894
2848
repository_class = KnitPackRepository
2895
2849
_commit_builder_class = PackRootCommitBuilder
2896
2850
rich_root_data = True
2898
2851
supports_tree_reference = True
2899
2852
supports_external_lookups = True
2900
2853
# What index classes to use