24
20
######################################################################
25
21
# consistency checks
28
"""Consistency check of tree."""
30
mutter("checking tree")
32
check_patch_chaining()
33
check_patch_uniqueness()
35
mutter("tree looks OK")
36
## TODO: Check that previous-inventory and previous-manifest
37
## are the same as those stored in the previous changeset.
39
## TODO: Check all patches present in patch directory are
40
## mentioned in patch history; having an orphaned patch only gives
43
## TODO: Check cached data is consistent with data reconstructed
46
## TODO: Check no control files are versioned.
48
## TODO: Check that the before-hash of each file in a later
49
## revision matches the after-hash in the previous revision to
53
def check_inventory():
54
mutter("checking inventory file and ids...")
58
for l in controlfile('inventory').readlines():
61
bailout("malformed inventory line: " + `l`)
64
if file_id in seen_ids:
65
bailout("duplicated file id " + file_id)
68
if name in seen_names:
69
bailout("duplicated file name in inventory: " + quotefn(name))
72
if is_control_file(name):
73
raise BzrError("control file %s present in inventory" % quotefn(name))
76
def check_patches_exist():
77
"""Check constraint of current version: all patches exist"""
78
mutter("checking all patches are present...")
79
for pid in revision_history():
80
read_patch_header(pid)
83
def check_patch_chaining():
84
"""Check ancestry of patches and history file is consistent"""
85
mutter("checking patch chaining...")
87
for pid in revision_history():
88
log_prev = read_patch_header(pid).precursor
90
bailout("inconsistent precursor links on " + pid)
94
def check_patch_uniqueness():
95
"""Make sure no patch is listed twice in the history.
97
This should be implied by having correct ancestry but I'll check it
99
mutter("checking history for duplicates...")
101
for pid in revision_history():
103
bailout("patch " + pid + " appears twice in history")
26
from trace import mutter
27
from errors import bailout
30
def check(branch, progress=True):
34
if not (hasattr(out, 'isatty') and out.isatty()):
39
mutter('checking ' + m)
40
out.write('\rchecking: %-50.50s' % m)
44
mutter('checking ' + m)
46
p('history of %r' % branch.base)
50
history = branch.revision_history()
52
revcount = len(history)
58
p('revision %d/%d' % (revno, revcount))
59
mutter(' revision {%s}' % rid)
60
rev = branch.get_revision(rid)
61
if rev.revision_id != rid:
62
bailout('wrong internal revision id in revision {%s}' % rid)
63
if rev.precursor != last_ptr:
64
bailout('mismatched precursor in revision {%s}' % rid)
66
if rid in checked_revs:
67
bailout('repeated revision {%s}' % rid)
70
## TODO: Check all the required fields are present on the revision.
72
inv = branch.get_inventory(rev.inventory_id)
76
p('revision %d/%d file ids' % (revno, revcount))
78
if file_id in seen_ids:
79
bailout('duplicated file_id {%s} in inventory for revision {%s}'
88
p('revision %d/%d file text %d/%d' % (revno, revcount, i, len_inv))
92
if ie.parent_id != None:
93
if ie.parent_id not in seen_ids:
94
bailout('missing parent {%s} in inventory for revision {%s}'
95
% (ie.parent_id, rid))
98
if ie.text_id in checked_texts:
99
fp = checked_texts[ie.text_id]
101
if not ie.text_id in branch.text_store:
102
bailout('text {%s} not in text_store' % ie.text_id)
104
tf = branch.text_store[ie.text_id]
105
fp = osutils.fingerprint_file(tf)
106
checked_texts[ie.text_id] = fp
108
if ie.text_size != fp['size']:
109
bailout('text {%s} wrong size' % ie.text_id)
110
if ie.text_sha1 != fp['sha1']:
111
bailout('text {%s} wrong sha1' % ie.text_id)
112
elif ie.kind == 'directory':
113
if ie.text_sha1 != None or ie.text_size != None or ie.text_id != None:
114
bailout('directory {%s} has text in revision {%s}'
117
p('revision %d/%d file paths' % (revno, revcount))
118
for path, ie in inv.iter_entries():
119
if path in seen_names:
120
bailout('duplicated path %r in inventory for revision {%s}' % (path, revid))
127
print 'checked %d revisions, %d file texts' % (revcount, len(checked_texts))