4
# Copyright (C) 2004, 2005 by Martin Pool
5
# Copyright (C) 2005 by Canonical Ltd
1
# Copyright (C) 2005, 2006 Canonical Ltd
8
3
# This program is free software; you can redistribute it and/or modify
9
4
# it under the terms of the GNU General Public License as published by
10
5
# the Free Software Foundation; either version 2 of the License, or
11
6
# (at your option) any later version.
13
8
# This program is distributed in the hope that it will be useful,
14
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
15
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16
11
# GNU General Public License for more details.
18
13
# You should have received a copy of the GNU General Public License
19
14
# along with this program; if not, write to the Free Software
20
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24
######################################################################
28
"""Consistency check of tree."""
30
mutter("checking tree")
32
check_patch_chaining()
33
check_patch_uniqueness()
35
mutter("tree looks OK")
36
## TODO: Check that previous-inventory and previous-manifest
37
## are the same as those stored in the previous changeset.
39
## TODO: Check all patches present in patch directory are
40
## mentioned in patch history; having an orphaned patch only gives
43
## TODO: Check cached data is consistent with data reconstructed
46
## TODO: Check no control files are versioned.
48
## TODO: Check that the before-hash of each file in a later
49
## revision matches the after-hash in the previous revision to
53
def check_inventory():
54
mutter("checking inventory file and ids...")
58
for l in controlfile('inventory').readlines():
61
bailout("malformed inventory line: " + `l`)
64
if file_id in seen_ids:
65
bailout("duplicated file id " + file_id)
68
if name in seen_names:
69
bailout("duplicated file name in inventory: " + quotefn(name))
72
if is_control_file(name):
73
raise BzrError("control file %s present in inventory" % quotefn(name))
76
def check_patches_exist():
77
"""Check constraint of current version: all patches exist"""
78
mutter("checking all patches are present...")
79
for pid in revision_history():
80
read_patch_header(pid)
83
def check_patch_chaining():
84
"""Check ancestry of patches and history file is consistent"""
85
mutter("checking patch chaining...")
87
for pid in revision_history():
88
log_prev = read_patch_header(pid).precursor
90
bailout("inconsistent precursor links on " + pid)
94
def check_patch_uniqueness():
95
"""Make sure no patch is listed twice in the history.
97
This should be implied by having correct ancestry but I'll check it
99
mutter("checking history for duplicates...")
101
for pid in revision_history():
103
bailout("patch " + pid + " appears twice in history")
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
# TODO: Check ancestries are correct for every revision: includes
18
# every committed so far, and in a reasonable order.
20
# TODO: Also check non-mainline revisions mentioned as parents.
22
# TODO: Check for extra files in the control directory.
24
# TODO: Check revision, inventory and entry objects have all
27
# TODO: Get every revision in the revision-store even if they're not
28
# referenced by history and make sure they're all valid.
30
# TODO: Perhaps have a way to record errors other than by raising exceptions;
31
# would perhaps be enough to accumulate exception objects in a list without
32
# raising them. If there's more than one exception it'd be good to see them
35
"""Checking of bzr objects.
37
check_refs is a concept used for optimising check. Objects that depend on other
38
objects (e.g. tree on repository) can list the objects they would be requesting
39
so that when the dependent object is checked, matches can be pulled out and
40
evaluated in-line rather than re-reading the same data many times.
41
check_refs are tuples (kind, value). Currently defined kinds are:
42
* 'trees', where value is a revid and the looked up objects are revision trees.
43
* 'lefthand-distance', where value is a revid and the looked up objects are the
44
distance along the lefthand path to NULL for that revid.
45
* 'revision-existence', where value is a revid, and the result is True or False
46
indicating that the revision was found/not found.
53
from bzrlib.branch import Branch
54
from bzrlib.bzrdir import BzrDir
55
from bzrlib.revision import NULL_REVISION
56
from bzrlib.symbol_versioning import deprecated_function, deprecated_in
57
from bzrlib.trace import note
58
from bzrlib.workingtree import WorkingTree
61
"""Check a repository"""
63
# The Check object interacts with InventoryEntry.check, etc.
65
def __init__(self, repository, check_repo=True):
66
self.repository = repository
67
self.checked_rev_cnt = 0
69
self.missing_parent_links = {}
70
self.missing_inventory_sha_cnt = 0
71
self.missing_revision_cnt = 0
72
self.checked_weaves = set()
73
self.unreferenced_versions = set()
74
self.inconsistent_parents = []
75
self.rich_roots = repository.supports_rich_root()
76
self.text_key_references = {}
77
self.check_repo = check_repo
78
self.other_results = []
79
# Plain text lines to include in the report
80
self._report_items = []
81
# Keys we are looking for; may be large and need spilling to disk.
82
# key->(type(revision/inventory/text/signature/map), sha1, first-referer)
83
self.pending_keys = {}
84
# Ancestors map for all of revisions being checked; while large helper
85
# functions we call would create it anyway, so better to have once and
89
def check(self, callback_refs=None, check_repo=True):
90
if callback_refs is None:
92
self.repository.lock_read()
93
self.progress = ui.ui_factory.nested_progress_bar()
95
self.progress.update('check', 0, 4)
97
self.progress.update('checking revisions', 0)
98
self.check_revisions()
99
self.progress.update('checking commit contents', 1)
100
self.repository._check_inventories(self)
101
self.progress.update('checking file graphs', 2)
102
# check_weaves is done after the revision scan so that
103
# revision index is known to be valid.
105
self.progress.update('checking branches and trees', 3)
107
repo = self.repository
108
# calculate all refs, and callback the objects requesting them.
110
wanting_items = set()
111
# Current crude version calculates everything and calls
112
# everything at once. Doing a queue and popping as things are
113
# satisfied would be cheaper on memory [but few people have
114
# huge numbers of working trees today. TODO: fix before
118
for ref, wantlist in callback_refs.iteritems():
119
wanting_items.update(wantlist)
122
refs[ref] = repo.revision_tree(value)
123
elif kind == 'lefthand-distance':
125
elif kind == 'revision-existence':
126
existences.add(value)
128
raise AssertionError(
129
'unknown ref kind for ref %s' % ref)
130
node_distances = repo.get_graph().find_lefthand_distances(distances)
131
for key, distance in node_distances.iteritems():
132
refs[('lefthand-distance', key)] = distance
133
if key in existences and distance > 0:
134
refs[('revision-existence', key)] = True
135
existences.remove(key)
136
parent_map = repo.get_graph().get_parent_map(existences)
137
for key in parent_map:
138
refs[('revision-existence', key)] = True
139
existences.remove(key)
140
for key in existences:
141
refs[('revision-existence', key)] = False
142
for item in wanting_items:
143
if isinstance(item, WorkingTree):
145
if isinstance(item, Branch):
146
self.other_results.append(item.check(refs))
148
self.progress.finished()
149
self.repository.unlock()
151
def _check_revisions(self, revisions_iterator):
152
"""Check revision objects by decorating a generator.
154
:param revisions_iterator: An iterator of(revid, Revision-or-None).
155
:return: A generator of the contents of revisions_iterator.
157
self.planned_revisions = set()
158
for revid, revision in revisions_iterator:
159
yield revid, revision
160
self._check_one_rev(revid, revision)
161
# Flatten the revisions we found to guarantee consistent later
163
self.planned_revisions = list(self.planned_revisions)
164
# TODO: extract digital signatures as items to callback on too.
166
def check_revisions(self):
167
"""Scan revisions, checking data directly available as we go."""
168
revision_iterator = self.repository._iter_revisions(None)
169
revision_iterator = self._check_revisions(revision_iterator)
170
# We read the all revisions here:
171
# - doing this allows later code to depend on the revision index.
172
# - we can fill out existence flags at this point
173
# - we can read the revision inventory sha at this point
174
# - we can check properties and serialisers etc.
175
if not self.repository._format.revision_graph_can_have_wrong_parents:
176
# The check against the index isn't needed.
177
self.revs_with_bad_parents_in_index = None
178
for thing in revision_iterator:
181
bad_revisions = self.repository._find_inconsistent_revision_parents(
183
self.revs_with_bad_parents_in_index = list(bad_revisions)
185
def report_results(self, verbose):
187
self._report_repo_results(verbose)
188
for result in self.other_results:
189
result.report_results(verbose)
191
def _report_repo_results(self, verbose):
192
note('checked repository %s format %s',
193
self.repository.user_url,
194
self.repository._format)
195
note('%6d revisions', self.checked_rev_cnt)
196
note('%6d file-ids', len(self.checked_weaves))
198
note('%6d unreferenced text versions',
199
len(self.unreferenced_versions))
200
if verbose and len(self.unreferenced_versions):
201
for file_id, revision_id in self.unreferenced_versions:
202
note('unreferenced version: {%s} in %s', revision_id,
204
if self.missing_inventory_sha_cnt:
205
note('%6d revisions are missing inventory_sha1',
206
self.missing_inventory_sha_cnt)
207
if self.missing_revision_cnt:
208
note('%6d revisions are mentioned but not present',
209
self.missing_revision_cnt)
211
note('%6d ghost revisions', len(self.ghosts))
213
for ghost in self.ghosts:
215
if len(self.missing_parent_links):
216
note('%6d revisions missing parents in ancestry',
217
len(self.missing_parent_links))
219
for link, linkers in self.missing_parent_links.items():
220
note(' %s should be in the ancestry for:', link)
221
for linker in linkers:
222
note(' * %s', linker)
223
if len(self.inconsistent_parents):
224
note('%6d inconsistent parents', len(self.inconsistent_parents))
226
for info in self.inconsistent_parents:
227
revision_id, file_id, found_parents, correct_parents = info
228
note(' * %s version %s has parents %r '
230
% (file_id, revision_id, found_parents,
232
if self.revs_with_bad_parents_in_index:
233
note('%6d revisions have incorrect parents in the revision index',
234
len(self.revs_with_bad_parents_in_index))
236
for item in self.revs_with_bad_parents_in_index:
237
revision_id, index_parents, actual_parents = item
239
' %s has wrong parents in index: '
241
revision_id, index_parents, actual_parents)
242
for item in self._report_items:
245
def _check_one_rev(self, rev_id, rev):
246
"""Cross-check one revision.
248
:param rev_id: A revision id to check.
249
:param rev: A revision or None to indicate a missing revision.
251
if rev.revision_id != rev_id:
252
self._report_items.append(
253
'Mismatched internal revid {%s} and index revid {%s}' % (
254
rev.revision_id, rev_id))
255
rev_id = rev.revision_id
256
# Check this revision tree etc, and count as seen when we encounter a
258
self.planned_revisions.add(rev_id)
260
self.ghosts.discard(rev_id)
261
# Count all parents as ghosts if we haven't seen them yet.
262
for parent in rev.parent_ids:
263
if not parent in self.planned_revisions:
264
self.ghosts.add(parent)
266
self.ancestors[rev_id] = tuple(rev.parent_ids) or (NULL_REVISION,)
267
self.add_pending_item(rev_id, ('inventories', rev_id), 'inventory',
269
self.checked_rev_cnt += 1
271
def add_pending_item(self, referer, key, kind, sha1):
272
"""Add a reference to a sha1 to be cross checked against a key.
274
:param referer: The referer that expects key to have sha1.
275
:param key: A storage key e.g. ('texts', 'foo@bar-20040504-1234')
276
:param kind: revision/inventory/text/map/signature
277
:param sha1: A hex sha1 or None if no sha1 is known.
279
existing = self.pending_keys.get(key)
281
if sha1 != existing[1]:
282
self._report_items.append('Multiple expected sha1s for %s. {%s}'
283
' expects {%s}, {%s} expects {%s}', (
284
key, referer, sha1, existing[1], existing[0]))
286
self.pending_keys[key] = (kind, sha1, referer)
288
def check_weaves(self):
289
"""Check all the weaves we can get our hands on.
292
storebar = ui.ui_factory.nested_progress_bar()
294
self._check_weaves(storebar)
298
def _check_weaves(self, storebar):
299
storebar.update('text-index', 0, 2)
300
if self.repository._format.fast_deltas:
301
# We haven't considered every fileid instance so far.
302
weave_checker = self.repository._get_versioned_file_checker(
303
ancestors=self.ancestors)
305
weave_checker = self.repository._get_versioned_file_checker(
306
text_key_references=self.text_key_references,
307
ancestors=self.ancestors)
308
storebar.update('file-graph', 1)
309
result = weave_checker.check_file_version_parents(
310
self.repository.texts)
311
self.checked_weaves = weave_checker.file_ids
312
bad_parents, unused_versions = result
313
bad_parents = bad_parents.items()
314
for text_key, (stored_parents, correct_parents) in bad_parents:
315
# XXX not ready for id join/split operations.
316
weave_id = text_key[0]
317
revision_id = text_key[-1]
318
weave_parents = tuple([parent[-1] for parent in stored_parents])
319
correct_parents = tuple([parent[-1] for parent in correct_parents])
320
self.inconsistent_parents.append(
321
(revision_id, weave_id, weave_parents, correct_parents))
322
self.unreferenced_versions.update(unused_versions)
324
def _add_entry_to_text_key_references(self, inv, entry):
325
if not self.rich_roots and entry.name == '':
327
key = (entry.file_id, entry.revision)
328
self.text_key_references.setdefault(key, False)
329
if entry.revision == inv.revision_id:
330
self.text_key_references[key] = True
333
def scan_branch(branch, needed_refs, to_unlock):
334
"""Scan a branch for refs.
336
:param branch: The branch to schedule for checking.
337
:param needed_refs: Refs we are accumulating.
338
:param to_unlock: The unlock list accumulating.
340
note("Checking branch at '%s'." % (branch.base,))
342
to_unlock.append(branch)
343
branch_refs = branch._get_check_refs()
344
for ref in branch_refs:
345
reflist = needed_refs.setdefault(ref, [])
346
reflist.append(branch)
349
def scan_tree(base_tree, tree, needed_refs, to_unlock):
350
"""Scan a tree for refs.
352
:param base_tree: The original tree check opened, used to detect duplicate
354
:param tree: The tree to schedule for checking.
355
:param needed_refs: Refs we are accumulating.
356
:param to_unlock: The unlock list accumulating.
358
if base_tree is not None and tree.basedir == base_tree.basedir:
360
note("Checking working tree at '%s'." % (tree.basedir,))
362
to_unlock.append(tree)
363
tree_refs = tree._get_check_refs()
364
for ref in tree_refs:
365
reflist = needed_refs.setdefault(ref, [])
369
def check_dwim(path, verbose, do_branch=False, do_repo=False, do_tree=False):
370
"""Check multiple objects.
372
If errors occur they are accumulated and reported as far as possible, and
373
an exception raised at the end of the process.
376
base_tree, branch, repo, relpath = \
377
BzrDir.open_containing_tree_branch_or_repository(path)
378
except errors.NotBranchError:
379
base_tree = branch = repo = None
384
if base_tree is not None:
385
# If the tree is a lightweight checkout we won't see it in
386
# repo.find_branches - add now.
388
scan_tree(None, base_tree, needed_refs, to_unlock)
389
branch = base_tree.branch
390
if branch is not None:
393
# The branch is in a shared repository
394
repo = branch.repository
397
to_unlock.append(repo)
398
branches = repo.find_branches(using=True)
400
if do_branch or do_tree:
401
for branch in branches:
404
tree = branch.bzrdir.open_workingtree()
406
except (errors.NotLocalUrl, errors.NoWorkingTree):
409
scan_tree(base_tree, tree, needed_refs, to_unlock)
411
scan_branch(branch, needed_refs, to_unlock)
412
if do_branch and not branches:
413
note("No branch found at specified location.")
414
if do_tree and base_tree is None and not saw_tree:
415
note("No working tree found at specified location.")
416
if do_repo or do_branch or do_tree:
418
note("Checking repository at '%s'."
420
result = repo.check(None, callback_refs=needed_refs,
422
result.report_results(verbose)
425
note("No working tree found at specified location.")
427
note("No branch found at specified location.")
429
note("No repository found at specified location.")
431
for thing in to_unlock: