49
46
Directly using Reconciler is recommended for library users that
50
47
desire fine grained control or analysis of the found issues.
52
:param canonicalize_chks: Make sure CHKs are in canonical form.
49
:param other: another bzrdir to reconcile against.
54
reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks)
51
reconciler = Reconciler(dir, other=other)
55
52
reconciler.reconcile()
58
55
class Reconciler(object):
59
56
"""Reconcilers are used to reconcile existing data."""
61
def __init__(self, dir, other=None, canonicalize_chks=False):
58
def __init__(self, dir, other=None):
62
59
"""Create a Reconciler."""
64
self.canonicalize_chks = canonicalize_chks
66
62
def reconcile(self):
67
63
"""Perform reconciliation.
69
65
After reconciliation the following attributes document found issues:
71
* `inconsistent_parents`: The number of revisions in the repository
72
whose ancestry was being reported incorrectly.
73
* `garbage_inventories`: The number of inventory objects without
74
revisions that were garbage collected.
75
* `fixed_branch_history`: None if there was no branch, False if the
76
branch history was correct, True if the branch history needed to be
66
inconsistent_parents: The number of revisions in the repository whose
67
ancestry was being reported incorrectly.
68
garbage_inventories: The number of inventory objects without revisions
69
that were garbage collected.
70
fixed_branch_history: None if there was no branch, False if the branch
71
history was correct, True if the branch history
72
needed to be re-normalized.
79
74
self.pb = ui.ui_factory.nested_progress_bar()
94
89
# Nothing to check here
95
90
self.fixed_branch_history = None
97
ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base)
92
self.pb.note('Reconciling branch %s',
98
94
branch_reconciler = self.branch.reconcile(thorough=True)
99
95
self.fixed_branch_history = branch_reconciler.fixed_history
101
97
def _reconcile_repository(self):
102
98
self.repo = self.bzrdir.find_repository()
103
ui.ui_factory.note(gettext('Reconciling repository %s') %
105
self.pb.update(gettext("Reconciling repository"), 0, 1)
106
if self.canonicalize_chks:
108
self.repo.reconcile_canonicalize_chks
109
except AttributeError:
110
raise errors.BzrError(
111
gettext("%s cannot canonicalize CHKs.") % (self.repo,))
112
repo_reconciler = self.repo.reconcile_canonicalize_chks()
114
repo_reconciler = self.repo.reconcile(thorough=True)
99
self.pb.note('Reconciling repository %s',
100
self.repo.bzrdir.root_transport.base)
101
self.pb.update("Reconciling repository", 0, 1)
102
repo_reconciler = self.repo.reconcile(thorough=True)
115
103
self.inconsistent_parents = repo_reconciler.inconsistent_parents
116
104
self.garbage_inventories = repo_reconciler.garbage_inventories
117
105
if repo_reconciler.aborted:
118
ui.ui_factory.note(gettext(
119
'Reconcile aborted: revision index has inconsistent parents.'))
120
ui.ui_factory.note(gettext(
121
'Run "bzr check" for more details.'))
107
'Reconcile aborted: revision index has inconsistent parents.')
109
'Run "bzr check" for more details.')
123
ui.ui_factory.note(gettext('Reconciliation complete.'))
111
self.pb.note('Reconciliation complete.')
126
114
class BranchReconciler(object):
132
120
self.branch = a_branch
134
122
def reconcile(self):
135
operation = cleanup.OperationWithCleanups(self._reconcile)
136
self.add_cleanup = operation.add_cleanup
137
operation.run_simple()
139
def _reconcile(self):
140
123
self.branch.lock_write()
141
self.add_cleanup(self.branch.unlock)
142
self.pb = ui.ui_factory.nested_progress_bar()
143
self.add_cleanup(self.pb.finished)
144
self._reconcile_steps()
125
self.pb = ui.ui_factory.nested_progress_bar()
127
self._reconcile_steps()
146
133
def _reconcile_steps(self):
147
134
self._reconcile_revision_history()
149
136
def _reconcile_revision_history(self):
137
repo = self.branch.repository
150
138
last_revno, last_revision_id = self.branch.last_revision_info()
152
graph = self.branch.repository.get_graph()
154
for revid in graph.iter_lefthand_ancestry(
155
last_revision_id, (_mod_revision.NULL_REVISION,)):
156
real_history.append(revid)
157
except errors.RevisionNotPresent:
158
pass # Hit a ghost left hand parent
139
real_history = list(repo.iter_reverse_revision_history(
159
141
real_history.reverse()
160
142
if last_revno != len(real_history):
161
143
self.fixed_history = True
247
227
# put a revision into the graph.
248
228
self._graph_revision(rev_id)
249
229
self._check_garbage_inventories()
250
# if there are no inconsistent_parents and
230
# if there are no inconsistent_parents and
251
231
# (no garbage inventories or we are not doing a thorough check)
252
if (not self.inconsistent_parents and
232
if (not self.inconsistent_parents and
253
233
(not self.garbage_inventories or not self.thorough)):
254
ui.ui_factory.note(gettext('Inventory ok.'))
234
self.pb.note('Inventory ok.')
256
self.pb.update(gettext('Backing up inventory'), 0, 0)
257
self.repo._backup_inventory()
258
ui.ui_factory.note(gettext('Backup inventory created.'))
259
new_inventories = self.repo._temp_inventories()
236
self.pb.update('Backing up inventory...', 0, 0)
237
self.repo.control_weaves.copy(self.inventory, 'inventory.backup', self.repo.get_transaction())
238
self.pb.note('Backup Inventory created.')
239
# asking for '' should never return a non-empty weave
240
new_inventory_vf = self.repo.control_weaves.get_empty('inventory.new',
241
self.repo.get_transaction())
261
243
# we have topological order of revisions and non ghost parents ready.
262
244
self._setup_steps(len(self._rev_graph))
263
revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)]
264
stream = self._change_inv_parents(
265
self.inventory.get_record_stream(revision_keys, 'unordered', True),
266
self._new_inv_parents,
268
new_inventories.insert_record_stream(stream)
269
# if this worked, the set of new_inventories.keys should equal
245
for rev_id in TopoSorter(self._rev_graph.items()).iter_topo_order():
246
parents = self._rev_graph[rev_id]
247
# double check this really is in topological order.
248
unavailable = [p for p in parents if p not in new_inventory_vf]
250
raise AssertionError('unavailable parents: %r'
252
# this entry has all the non ghost parents in the inventory
254
self._reweave_step('adding inventories')
255
if isinstance(new_inventory_vf, WeaveFile):
256
# It's really a WeaveFile, but we call straight into the
257
# Weave's add method to disable the auto-write-out behaviour.
258
# This is done to avoid a revision_count * time-to-write additional overhead on
260
new_inventory_vf._check_write_ok()
261
Weave._add_lines(new_inventory_vf, rev_id, parents,
262
self.inventory.get_lines(rev_id), None, None, None, False, True)
264
new_inventory_vf.add_lines(rev_id, parents, self.inventory.get_lines(rev_id))
266
if isinstance(new_inventory_vf, WeaveFile):
267
new_inventory_vf._save()
268
# if this worked, the set of new_inventory_vf.names should equal
271
if not (set(new_inventories.keys()) ==
272
set([(revid,) for revid in self.pending])):
270
if not (set(new_inventory_vf.versions()) == self.pending):
273
271
raise AssertionError()
274
self.pb.update(gettext('Writing weave'))
275
self.repo._activate_new_inventory()
272
self.pb.update('Writing weave')
273
self.repo.control_weaves.copy(new_inventory_vf, 'inventory', self.repo.get_transaction())
274
self.repo.control_weaves.delete('inventory.new', self.repo.get_transaction())
276
275
self.inventory = None
277
ui.ui_factory.note(gettext('Inventory regenerated.'))
279
def _new_inv_parents(self, revision_key):
280
"""Lookup ghost-filtered parents for revision_key."""
281
# Use the filtered ghostless parents list:
282
return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]])
284
def _change_inv_parents(self, stream, get_parents, all_revision_keys):
285
"""Adapt a record stream to reconcile the parents."""
286
for record in stream:
287
wanted_parents = get_parents(record.key)
288
if wanted_parents and wanted_parents[0] not in all_revision_keys:
289
# The check for the left most parent only handles knit
290
# compressors, but this code only applies to knit and weave
291
# repositories anyway.
292
bytes = record.get_bytes_as('fulltext')
293
yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes)
295
adapted_record = AdapterFactory(record.key, wanted_parents, record)
297
self._reweave_step('adding inventories')
276
self.pb.note('Inventory regenerated.')
299
278
def _setup_steps(self, new_total):
300
279
"""Setup the markers we need to control the progress bar."""
315
294
mutter('found ghost %s', parent)
316
295
self._rev_graph[rev_id] = parents
296
if self._parents_are_inconsistent(rev_id, parents):
297
self.inconsistent_parents += 1
298
mutter('Inconsistent inventory parents: id {%s} '
299
'inventory claims %r, '
300
'available parents are %r, '
301
'unavailable parents are %r',
303
set(self.inventory.get_parent_map([rev_id])[rev_id]),
305
set(rev.parent_ids).difference(set(parents)))
307
def _parents_are_inconsistent(self, rev_id, parents):
308
"""Return True if the parents list of rev_id does not match the weave.
310
This detects inconsistencies based on the self.thorough value:
311
if thorough is on, the first parent value is checked as well as ghost
313
Otherwise only the ghost differences are evaluated.
315
weave_parents = self.inventory.get_parent_map([rev_id])[rev_id]
316
weave_missing_old_ghosts = set(weave_parents) != set(parents)
317
first_parent_is_wrong = (
318
len(weave_parents) and len(parents) and
319
parents[0] != weave_parents[0])
321
return weave_missing_old_ghosts or first_parent_is_wrong
323
return weave_missing_old_ghosts
318
325
def _check_garbage_inventories(self):
319
326
"""Check for garbage inventories which we cannot trust
368
373
def _load_indexes(self):
369
374
"""Load indexes for the reconciliation."""
370
375
self.transaction = self.repo.get_transaction()
371
self.pb.update(gettext('Reading indexes'), 0, 2)
372
self.inventory = self.repo.inventories
373
self.pb.update(gettext('Reading indexes'), 1, 2)
376
self.pb.update('Reading indexes.', 0, 2)
377
self.inventory = self.repo.get_inventory_weave()
378
self.pb.update('Reading indexes.', 1, 2)
374
379
self.repo._check_for_inconsistent_revision_parents()
375
self.revisions = self.repo.revisions
376
self.pb.update(gettext('Reading indexes'), 2, 2)
380
self.revisions = self.repo._revision_store.get_revision_file(self.transaction)
381
self.pb.update('Reading indexes.', 2, 2)
378
383
def _gc_inventory(self):
379
384
"""Remove inventories that are not referenced from the revision store."""
380
self.pb.update(gettext('Checking unused inventories'), 0, 1)
385
self.pb.update('Checking unused inventories.', 0, 1)
381
386
self._check_garbage_inventories()
382
self.pb.update(gettext('Checking unused inventories'), 1, 3)
387
self.pb.update('Checking unused inventories.', 1, 3)
383
388
if not self.garbage_inventories:
384
ui.ui_factory.note(gettext('Inventory ok.'))
389
self.pb.note('Inventory ok.')
386
self.pb.update(gettext('Backing up inventory'), 0, 0)
387
self.repo._backup_inventory()
388
ui.ui_factory.note(gettext('Backup Inventory created'))
391
self.pb.update('Backing up inventory...', 0, 0)
392
self.repo.control_weaves.copy(self.inventory, 'inventory.backup', self.transaction)
393
self.pb.note('Backup Inventory created.')
389
394
# asking for '' should never return a non-empty weave
390
new_inventories = self.repo._temp_inventories()
395
new_inventory_vf = self.repo.control_weaves.get_empty('inventory.new',
391
398
# we have topological order of revisions and non ghost parents ready.
392
graph = self.revisions.get_parent_map(self.revisions.keys())
393
revision_keys = topo_sort(graph)
394
revision_ids = [key[-1] for key in revision_keys]
395
self._setup_steps(len(revision_keys))
396
stream = self._change_inv_parents(
397
self.inventory.get_record_stream(revision_keys, 'unordered', True),
400
new_inventories.insert_record_stream(stream)
399
self._setup_steps(len(self.revisions))
400
revision_ids = self.revisions.versions()
401
graph = self.revisions.get_parent_map(revision_ids)
402
for rev_id in TopoSorter(graph.items()).iter_topo_order():
403
parents = graph[rev_id]
404
# double check this really is in topological order, ignoring existing ghosts.
405
unavailable = [p for p in parents if p not in new_inventory_vf and
408
raise AssertionError(
409
'unavailable parents: %r' % (unavailable,))
410
# this entry has all the non ghost parents in the inventory
412
self._reweave_step('adding inventories')
413
# ugly but needed, weaves are just way tooooo slow else.
414
new_inventory_vf.add_lines_with_ghosts(rev_id, parents,
415
self.inventory.get_lines(rev_id))
401
417
# if this worked, the set of new_inventory_vf.names should equal
402
# the revisionds list
403
if not(set(new_inventories.keys()) == set(revision_keys)):
419
if not(set(new_inventory_vf.versions()) == set(self.revisions.versions())):
404
420
raise AssertionError()
405
self.pb.update(gettext('Writing weave'))
406
self.repo._activate_new_inventory()
421
self.pb.update('Writing weave')
422
self.repo.control_weaves.copy(new_inventory_vf, 'inventory', self.transaction)
423
self.repo.control_weaves.delete('inventory.new', self.transaction)
407
424
self.inventory = None
408
ui.ui_factory.note(gettext('Inventory regenerated.'))
425
self.pb.note('Inventory regenerated.')
427
def _check_garbage_inventories(self):
428
"""Check for garbage inventories which we cannot trust
430
We cant trust them because their pre-requisite file data may not
431
be present - all we know is that their revision was not installed.
433
inventories = set(self.inventory.versions())
434
revisions = set(self.revisions.versions())
435
garbage = inventories.difference(revisions)
436
self.garbage_inventories = len(garbage)
437
for revision_id in garbage:
438
mutter('Garbage inventory {%s} found.', revision_id)
410
440
def _fix_text_parents(self):
411
441
"""Fix bad versionedfile parent entries.
417
447
parent lists, and replaces the versionedfile with a corrected version.
419
449
transaction = self.repo.get_transaction()
420
versions = [key[-1] for key in self.revisions.keys()]
450
versions = self.revisions.versions()
421
451
mutter('Prepopulating revision text cache with %d revisions',
423
453
vf_checker = self.repo._get_versioned_file_checker()
424
bad_parents, unused_versions = vf_checker.check_file_version_parents(
425
self.repo.texts, self.pb)
426
text_index = vf_checker.text_index
427
per_id_bad_parents = {}
428
for key in unused_versions:
429
# Ensure that every file with unused versions gets rewritten.
430
# NB: This is really not needed, reconcile != pack.
431
per_id_bad_parents[key[0]] = {}
432
# Generate per-knit/weave data.
433
for key, details in bad_parents.iteritems():
436
knit_parents = tuple([parent[-1] for parent in details[0]])
437
correct_parents = tuple([parent[-1] for parent in details[1]])
438
file_details = per_id_bad_parents.setdefault(file_id, {})
439
file_details[rev_id] = (knit_parents, correct_parents)
440
file_id_versions = {}
441
for text_key in text_index:
442
versions_list = file_id_versions.setdefault(text_key[0], [])
443
versions_list.append(text_key[1])
444
# Do the reconcile of individual weaves.
445
for num, file_id in enumerate(per_id_bad_parents):
446
self.pb.update(gettext('Fixing text parents'), num,
447
len(per_id_bad_parents))
448
versions_with_bad_parents = per_id_bad_parents[file_id]
449
id_unused_versions = set(key[-1] for key in unused_versions
450
if key[0] == file_id)
451
if file_id in file_id_versions:
452
file_versions = file_id_versions[file_id]
454
# This id was present in the disk store but is not referenced
455
# by any revision at all.
457
self._fix_text_parent(file_id, versions_with_bad_parents,
458
id_unused_versions, file_versions)
454
# List all weaves before altering, to avoid race conditions when we
455
# delete unused weaves.
456
weaves = list(enumerate(self.repo.weave_store))
457
for num, file_id in weaves:
458
self.pb.update('Fixing text parents', num,
459
len(self.repo.weave_store))
460
vf = self.repo.weave_store.get_weave(file_id, transaction)
461
versions_with_bad_parents, unused_versions = \
462
vf_checker.check_file_version_parents(vf, file_id)
463
if (len(versions_with_bad_parents) == 0 and
464
len(unused_versions) == 0):
466
full_text_versions = set()
467
self._fix_text_parent(file_id, vf, versions_with_bad_parents,
468
full_text_versions, unused_versions)
460
def _fix_text_parent(self, file_id, versions_with_bad_parents,
461
unused_versions, all_versions):
470
def _fix_text_parent(self, file_id, vf, versions_with_bad_parents,
471
full_text_versions, unused_versions):
462
472
"""Fix bad versionedfile entries in a single versioned file."""
463
473
mutter('fixing text parent: %r (%d versions)', file_id,
464
474
len(versions_with_bad_parents))
465
mutter('(%d are unused)', len(unused_versions))
466
new_file_id = 'temp:%s' % file_id
475
mutter('(%d need to be full texts, %d are unused)',
476
len(full_text_versions), len(unused_versions))
477
new_vf = self.repo.weave_store.get_empty('temp:%s' % file_id,
469
for version in all_versions:
480
for version in vf.versions():
470
481
if version in unused_versions:
472
483
elif version in versions_with_bad_parents:
473
484
parents = versions_with_bad_parents[version][1]
475
pmap = self.repo.texts.get_parent_map([(file_id, version)])
476
parents = [key[-1] for key in pmap[(file_id, version)]]
477
new_parents[(new_file_id, version)] = [
478
(new_file_id, parent) for parent in parents]
479
needed_keys.add((file_id, version))
480
def fix_parents(stream):
481
for record in stream:
482
bytes = record.get_bytes_as('fulltext')
483
new_key = (new_file_id, record.key[-1])
484
parents = new_parents[new_key]
485
yield FulltextContentFactory(new_key, parents, record.sha1, bytes)
486
stream = self.repo.texts.get_record_stream(needed_keys, 'topological', True)
487
self.repo._remove_file_id(new_file_id)
488
self.repo.texts.insert_record_stream(fix_parents(stream))
489
self.repo._remove_file_id(file_id)
491
self.repo._move_file_id(new_file_id, file_id)
486
parents = vf.get_parent_map([version])[version]
487
new_parents[version] = parents
488
if not len(new_parents):
489
# No used versions, remove the VF.
490
self.repo.weave_store.delete(file_id, self.transaction)
492
for version in TopoSorter(new_parents.items()).iter_topo_order():
493
lines = vf.get_lines(version)
494
parents = new_parents[version]
495
if parents and (parents[0] in full_text_versions):
496
# Force this record to be a fulltext, not a delta.
497
new_vf._add(version, lines, parents, False,
498
None, None, None, False)
500
new_vf.add_lines(version, parents, lines)
501
self.repo.weave_store.copy(new_vf, file_id, self.transaction)
502
self.repo.weave_store.delete('temp:%s' % file_id, self.transaction)
494
505
class PackReconciler(RepoReconciler):
522
527
collection = self.repo._pack_collection
523
528
collection.ensure_loaded()
524
529
collection.lock_names()
525
self.add_cleanup(collection._unlock_names)
526
packs = collection.all_packs()
527
all_revisions = self.repo.all_revision_ids()
528
total_inventories = len(list(
529
collection.inventory_index.combined_index.iter_all_entries()))
530
if len(all_revisions):
531
if self.canonicalize_chks:
532
reconcile_meth = self.repo._canonicalize_chks_pack
531
packs = collection.all_packs()
532
all_revisions = self.repo.all_revision_ids()
533
total_inventories = len(list(
534
collection.inventory_index.combined_index.iter_all_entries()))
535
if len(all_revisions):
536
self._packer = repofmt.pack_repo.ReconcilePacker(
537
collection, packs, ".reconcile", all_revisions)
538
new_pack = self._packer.pack(pb=self.pb)
539
if new_pack is not None:
540
self._discard_and_save(packs)
534
reconcile_meth = self.repo._reconcile_pack
535
new_pack = reconcile_meth(collection, packs, ".reconcile",
536
all_revisions, self.pb)
537
if new_pack is not None:
542
# only make a new pack when there is data to copy.
538
543
self._discard_and_save(packs)
540
# only make a new pack when there is data to copy.
541
self._discard_and_save(packs)
542
self.garbage_inventories = total_inventories - len(list(
543
collection.inventory_index.combined_index.iter_all_entries()))
544
self.garbage_inventories = total_inventories - len(list(
545
collection.inventory_index.combined_index.iter_all_entries()))
547
collection._unlock_names()
545
549
def _discard_and_save(self, packs):
546
550
"""Discard some packs from the repository.