224
225
return self.index_name('text', name)
226
227
def _replace_index_with_readonly(self, index_type):
228
unlimited_cache = False
229
if index_type == 'chk':
230
unlimited_cache = True
227
231
setattr(self, index_type + '_index',
228
232
self.index_class(self.index_transport,
229
233
self.index_name(index_type, self.name),
230
self.index_sizes[self.index_offset(index_type)]))
234
self.index_sizes[self.index_offset(index_type)],
235
unlimited_cache=unlimited_cache))
233
238
class ExistingPack(Pack):
1105
1117
iterator is a tuple with:
1106
1118
index, readv_vector, node_vector. readv_vector is a list ready to
1107
1119
hand to the transport readv method, and node_vector is a list of
1108
(key, eol_flag, references) for the the node retrieved by the
1120
(key, eol_flag, references) for the node retrieved by the
1109
1121
matching readv_vector.
1111
1123
# group by pack so we do one readv per pack
1530
1545
self._remove_pack_from_memory(pack)
1531
1546
# record the newly available packs and stop advertising the old
1533
result = self._save_pack_names(clear_obsolete_packs=True)
1534
# Move the old packs out of the way now they are no longer referenced.
1535
for revision_count, packs in pack_operations:
1536
self._obsolete_packs(packs)
1548
to_be_obsoleted = []
1549
for _, packs in pack_operations:
1550
to_be_obsoleted.extend(packs)
1551
result = self._save_pack_names(clear_obsolete_packs=True,
1552
obsolete_packs=to_be_obsoleted)
1539
1555
def _flush_new_pack(self):
1567
1583
# determine which packs need changing
1568
1584
pack_operations = [[0, []]]
1569
1585
for pack in self.all_packs():
1570
if not hint or pack.name in hint:
1586
if hint is None or pack.name in hint:
1587
# Either no hint was provided (so we are packing everything),
1588
# or this pack was included in the hint.
1571
1589
pack_operations[-1][0] += pack.get_revision_count()
1572
1590
pack_operations[-1][1].append(pack)
1573
1591
self._execute_pack_operations(pack_operations, OptimisingPacker)
1665
1683
txt_index = self._make_index(name, '.tix')
1666
1684
sig_index = self._make_index(name, '.six')
1667
1685
if self.chk_index is not None:
1668
chk_index = self._make_index(name, '.cix')
1686
chk_index = self._make_index(name, '.cix', unlimited_cache=True)
1670
1688
chk_index = None
1671
1689
result = ExistingPack(self._pack_transport, name, rev_index,
1690
1708
txt_index = self._make_index(name, '.tix', resume=True)
1691
1709
sig_index = self._make_index(name, '.six', resume=True)
1692
1710
if self.chk_index is not None:
1693
chk_index = self._make_index(name, '.cix', resume=True)
1711
chk_index = self._make_index(name, '.cix', resume=True,
1712
unlimited_cache=True)
1695
1714
chk_index = None
1696
1715
result = self.resumed_pack_factory(name, rev_index, inv_index,
1726
1745
return self._index_class(self.transport, 'pack-names', None
1727
1746
).iter_all_entries()
1729
def _make_index(self, name, suffix, resume=False):
1748
def _make_index(self, name, suffix, resume=False, unlimited_cache=False):
1730
1749
size_offset = self._suffix_offsets[suffix]
1731
1750
index_name = name + suffix
1736
1755
transport = self._index_transport
1737
1756
index_size = self._names[name][size_offset]
1738
return self._index_class(transport, index_name, index_size)
1757
return self._index_class(transport, index_name, index_size,
1758
unlimited_cache=unlimited_cache)
1740
1760
def _max_pack_count(self, total_revisions):
1741
1761
"""Return the maximum number of packs to use for total revisions.
1769
1789
:param return: None.
1771
1791
for pack in packs:
1772
pack.pack_transport.rename(pack.file_name(),
1773
'../obsolete_packs/' + pack.file_name())
1793
pack.pack_transport.rename(pack.file_name(),
1794
'../obsolete_packs/' + pack.file_name())
1795
except (errors.PathError, errors.TransportError), e:
1796
# TODO: Should these be warnings or mutters?
1797
mutter("couldn't rename obsolete pack, skipping it:\n%s"
1774
1799
# TODO: Probably needs to know all possible indices for this pack
1775
1800
# - or maybe list the directory and move all indices matching this
1776
1801
# name whether we recognize it or not?
1778
1803
if self.chk_index is not None:
1779
1804
suffixes.append('.cix')
1780
1805
for suffix in suffixes:
1781
self._index_transport.rename(pack.name + suffix,
1782
'../obsolete_packs/' + pack.name + suffix)
1807
self._index_transport.rename(pack.name + suffix,
1808
'../obsolete_packs/' + pack.name + suffix)
1809
except (errors.PathError, errors.TransportError), e:
1810
mutter("couldn't rename obsolete index, skipping it:\n%s"
1784
1813
def pack_distribution(self, total_revisions):
1785
1814
"""Generate a list of the number of revisions to put in each pack.
1875
1905
disk_nodes.difference_update(deleted_nodes)
1876
1906
disk_nodes.update(new_nodes)
1878
return disk_nodes, deleted_nodes, new_nodes
1908
return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
1880
1910
def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1881
1911
"""Given the correct set of pack files, update our saved info.
1932
1962
:param clear_obsolete_packs: If True, clear out the contents of the
1933
1963
obsolete_packs directory.
1964
:param obsolete_packs: Packs that are obsolete once the new pack-names
1965
file has been written.
1934
1966
:return: A list of the names saved that were not previously on disk.
1968
already_obsolete = []
1936
1969
self.lock_names()
1938
1971
builder = self._index_builder_class()
1939
disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
1972
(disk_nodes, deleted_nodes, new_nodes,
1973
orig_disk_nodes) = self._diff_pack_names()
1940
1974
# TODO: handle same-name, index-size-changes here -
1941
1975
# e.g. use the value from disk, not ours, *unless* we're the one
1944
1978
builder.add_node(key, value)
1945
1979
self.transport.put_file('pack-names', builder.finish(),
1946
1980
mode=self.repo.bzrdir._get_file_mode())
1947
# move the baseline forward
1948
1981
self._packs_at_load = disk_nodes
1949
1982
if clear_obsolete_packs:
1950
self._clear_obsolete_packs()
1985
to_preserve = set([o.name for o in obsolete_packs])
1986
already_obsolete = self._clear_obsolete_packs(to_preserve)
1952
1988
self._unlock_names()
1953
1989
# synchronise the memory packs list with what we just wrote:
1954
1990
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1992
# TODO: We could add one more condition here. "if o.name not in
1993
# orig_disk_nodes and o != the new_pack we haven't written to
1994
# disk yet. However, the new pack object is not easily
1995
# accessible here (it would have to be passed through the
1996
# autopacking code, etc.)
1997
obsolete_packs = [o for o in obsolete_packs
1998
if o.name not in already_obsolete]
1999
self._obsolete_packs(obsolete_packs)
1955
2000
return [new_node[0][0] for new_node in new_nodes]
1957
2002
def reload_pack_names(self):
1974
2019
# out the new value.
1975
disk_nodes, _, _ = self._diff_pack_names()
1976
self._packs_at_load = disk_nodes
2020
(disk_nodes, deleted_nodes, new_nodes,
2021
orig_disk_nodes) = self._diff_pack_names()
2022
# _packs_at_load is meant to be the explicit list of names in
2023
# 'pack-names' at then start. As such, it should not contain any
2024
# pending names that haven't been written out yet.
2025
self._packs_at_load = orig_disk_nodes
1977
2026
(removed, added,
1978
2027
modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1979
2028
if removed or added or modified:
1989
2038
raise errors.RetryAutopack(self.repo, False, sys.exc_info())
1991
def _clear_obsolete_packs(self):
2040
def _clear_obsolete_packs(self, preserve=None):
1992
2041
"""Delete everything from the obsolete-packs directory.
2043
:return: A list of pack identifiers (the filename without '.pack') that
2044
were found in obsolete_packs.
1994
2047
obsolete_pack_transport = self.transport.clone('obsolete_packs')
2048
if preserve is None:
1995
2050
for filename in obsolete_pack_transport.list_dir('.'):
2051
name, ext = osutils.splitext(filename)
2054
if name in preserve:
1997
2057
obsolete_pack_transport.delete(filename)
1998
2058
except (errors.PathError, errors.TransportError), e:
1999
warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
2059
warning("couldn't delete obsolete pack, skipping it:\n%s"
2001
2063
def _start_write_group(self):
2002
2064
# Do not permit preparation for writing if we're not in a 'write lock'.
2054
2116
self._remove_pack_indices(resumed_pack)
2055
2117
del self._resumed_packs[:]
2119
def _check_new_inventories(self):
2120
"""Detect missing inventories in this write group.
2122
:returns: list of strs, summarising any problems found. If the list is
2123
empty no problems were found.
2125
# The base implementation does no checks. GCRepositoryPackCollection
2057
2129
def _commit_write_group(self):
2058
2130
all_missing = set()
2059
2131
for prefix, versioned_file in (
2068
2140
raise errors.BzrCheckError(
2069
2141
"Repository %s has missing compression parent(s) %r "
2070
2142
% (self.repo, sorted(all_missing)))
2143
problems = self._check_new_inventories()
2145
problems_summary = '\n'.join(problems)
2146
raise errors.BzrCheckError(
2147
"Cannot add revision(s) to repository: " + problems_summary)
2071
2148
self._remove_pack_indices(self._new_pack)
2072
should_autopack = False
2149
any_new_content = False
2073
2150
if self._new_pack.data_inserted():
2074
2151
# get all the data to disk and read to use
2075
2152
self._new_pack.finish()
2076
2153
self.allocate(self._new_pack)
2077
2154
self._new_pack = None
2078
should_autopack = True
2155
any_new_content = True
2080
2157
self._new_pack.abort()
2081
2158
self._new_pack = None
2200
2280
self._reconcile_fixes_text_parents = True
2201
2281
self._reconcile_backsup_inventory = False
2203
def _warn_if_deprecated(self):
2283
def _warn_if_deprecated(self, branch=None):
2204
2284
# This class isn't deprecated, but one sub-format is
2205
2285
if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2206
from bzrlib import repository
2207
if repository._deprecation_warning_done:
2209
repository._deprecation_warning_done = True
2210
warning("Format %s for %s is deprecated - please use"
2211
" 'bzr upgrade --1.6.1-rich-root'"
2212
% (self._format, self.bzrdir.transport.base))
2286
super(KnitPackRepository, self)._warn_if_deprecated(branch)
2214
2288
def _abort_write_group(self):
2215
self.revisions._index._key_dependencies.refs.clear()
2289
self.revisions._index._key_dependencies.clear()
2216
2290
self._pack_collection._abort_write_group()
2218
def _find_inconsistent_revision_parents(self):
2219
"""Find revisions with incorrectly cached parents.
2221
:returns: an iterator yielding tuples of (revison-id, parents-in-index,
2222
parents-in-revision).
2224
if not self.is_locked():
2225
raise errors.ObjectNotLocked(self)
2226
pb = ui.ui_factory.nested_progress_bar()
2229
revision_nodes = self._pack_collection.revision_index \
2230
.combined_index.iter_all_entries()
2231
index_positions = []
2232
# Get the cached index values for all revisions, and also the
2233
# location in each index of the revision text so we can perform
2235
for index, key, value, refs in revision_nodes:
2236
node = (index, key, value, refs)
2237
index_memo = self.revisions._index._node_to_position(node)
2238
if index_memo[0] != index:
2239
raise AssertionError('%r != %r' % (index_memo[0], index))
2240
index_positions.append((index_memo, key[0],
2241
tuple(parent[0] for parent in refs[0])))
2242
pb.update("Reading revision index", 0, 0)
2243
index_positions.sort()
2245
pb.update("Checking cached revision graph", 0,
2246
len(index_positions))
2247
for offset in xrange(0, len(index_positions), 1000):
2248
pb.update("Checking cached revision graph", offset)
2249
to_query = index_positions[offset:offset + batch_size]
2252
rev_ids = [item[1] for item in to_query]
2253
revs = self.get_revisions(rev_ids)
2254
for revision, item in zip(revs, to_query):
2255
index_parents = item[2]
2256
rev_parents = tuple(revision.parent_ids)
2257
if index_parents != rev_parents:
2258
result.append((revision.revision_id, index_parents,
2264
2292
def _get_source(self, to_format):
2265
2293
if to_format.network_name() == self._format.network_name():
2266
2294
return KnitPackStreamSource(self, to_format)
2278
2306
self._pack_collection._start_write_group()
2280
2308
def _commit_write_group(self):
2281
self.revisions._index._key_dependencies.refs.clear()
2282
return self._pack_collection._commit_write_group()
2309
hint = self._pack_collection._commit_write_group()
2310
self.revisions._index._key_dependencies.clear()
2284
2313
def suspend_write_group(self):
2285
2314
# XXX check self._write_group is self.get_transaction()?
2286
2315
tokens = self._pack_collection._suspend_write_group()
2287
self.revisions._index._key_dependencies.refs.clear()
2316
self.revisions._index._key_dependencies.clear()
2288
2317
self._write_group = None
2600
2633
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2602
def check_conversion_target(self, target_format):
2603
if not target_format.rich_root_data:
2604
raise errors.BadConversionTarget(
2605
'Does not support rich root data.', target_format)
2606
if not getattr(target_format, 'supports_tree_reference', False):
2607
raise errors.BadConversionTarget(
2608
'Does not support nested trees', target_format)
2610
2635
def get_format_string(self):
2611
2636
"""See RepositoryFormat.get_format_string()."""
2612
2637
return "Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n"
2646
2671
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2648
def check_conversion_target(self, target_format):
2649
if not target_format.rich_root_data:
2650
raise errors.BadConversionTarget(
2651
'Does not support rich root data.', target_format)
2653
2673
def get_format_string(self):
2654
2674
"""See RepositoryFormat.get_format_string()."""
2655
2675
return ("Bazaar pack repository format 1 with rich root"
2732
2749
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2734
def check_conversion_target(self, target_format):
2735
if not target_format.rich_root_data:
2736
raise errors.BadConversionTarget(
2737
'Does not support rich root data.', target_format)
2739
2751
def get_format_string(self):
2740
2752
"""See RepositoryFormat.get_format_string()."""
2741
2753
return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6.1)\n"
2783
2795
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2785
def check_conversion_target(self, target_format):
2786
if not target_format.rich_root_data:
2787
raise errors.BadConversionTarget(
2788
'Does not support rich root data.', target_format)
2790
2797
def get_format_string(self):
2791
2798
"""See RepositoryFormat.get_format_string()."""
2792
2799
return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6)\n"
2863
2867
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2865
def check_conversion_target(self, target_format):
2866
if not target_format.rich_root_data:
2867
raise errors.BadConversionTarget(
2868
'Does not support rich root data.', target_format)
2870
2869
def get_format_string(self):
2871
2870
"""See RepositoryFormat.get_format_string()."""
2872
2871
return "Bazaar RepositoryFormatKnitPack6RichRoot (bzr 1.9)\n"
2909
2908
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2911
def check_conversion_target(self, target_format):
2912
if not target_format.rich_root_data:
2913
raise errors.BadConversionTarget(
2914
'Does not support rich root data.', target_format)
2915
if not getattr(target_format, 'supports_tree_reference', False):
2916
raise errors.BadConversionTarget(
2917
'Does not support nested trees', target_format)
2919
2910
def get_format_string(self):
2920
2911
"""See RepositoryFormat.get_format_string()."""
2921
2912
return ("Bazaar development format 2 with subtree support "