13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
"""Reconcilers are able to fix some potential data errors in a branch."""
29
29
from bzrlib import (
34
from bzrlib.trace import mutter
35
from bzrlib.tsort import topo_sort
35
from bzrlib.trace import mutter, note
36
from bzrlib.tsort import TopoSorter
36
37
from bzrlib.versionedfile import AdapterFactory, FulltextContentFactory
39
def reconcile(dir, canonicalize_chks=False):
40
def reconcile(dir, other=None):
40
41
"""Reconcile the data in dir.
42
43
Currently this is limited to a inventory 'reweave'.
46
47
Directly using Reconciler is recommended for library users that
47
48
desire fine grained control or analysis of the found issues.
49
:param canonicalize_chks: Make sure CHKs are in canonical form.
50
:param other: another bzrdir to reconcile against.
51
reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks)
52
reconciler = Reconciler(dir, other=other)
52
53
reconciler.reconcile()
55
56
class Reconciler(object):
56
57
"""Reconcilers are used to reconcile existing data."""
58
def __init__(self, dir, other=None, canonicalize_chks=False):
59
def __init__(self, dir, other=None):
59
60
"""Create a Reconciler."""
61
self.canonicalize_chks = canonicalize_chks
63
63
def reconcile(self):
64
64
"""Perform reconciliation.
66
66
After reconciliation the following attributes document found issues:
67
67
inconsistent_parents: The number of revisions in the repository whose
68
68
ancestry was being reported incorrectly.
90
90
# Nothing to check here
91
91
self.fixed_branch_history = None
93
ui.ui_factory.note('Reconciling branch %s' % self.branch.base)
93
self.pb.note('Reconciling branch %s',
94
95
branch_reconciler = self.branch.reconcile(thorough=True)
95
96
self.fixed_branch_history = branch_reconciler.fixed_history
97
98
def _reconcile_repository(self):
98
99
self.repo = self.bzrdir.find_repository()
99
ui.ui_factory.note('Reconciling repository %s' %
100
self.pb.note('Reconciling repository %s',
101
self.repo.bzrdir.root_transport.base)
101
102
self.pb.update("Reconciling repository", 0, 1)
102
if self.canonicalize_chks:
104
self.repo.reconcile_canonicalize_chks
105
except AttributeError:
106
raise errors.BzrError(
107
"%s cannot canonicalize CHKs." % (self.repo,))
108
repo_reconciler = self.repo.reconcile_canonicalize_chks()
110
repo_reconciler = self.repo.reconcile(thorough=True)
103
repo_reconciler = self.repo.reconcile(thorough=True)
111
104
self.inconsistent_parents = repo_reconciler.inconsistent_parents
112
105
self.garbage_inventories = repo_reconciler.garbage_inventories
113
106
if repo_reconciler.aborted:
115
108
'Reconcile aborted: revision index has inconsistent parents.')
117
110
'Run "bzr check" for more details.')
119
ui.ui_factory.note('Reconciliation complete.')
112
self.pb.note('Reconciliation complete.')
122
115
class BranchReconciler(object):
128
121
self.branch = a_branch
130
123
def reconcile(self):
131
operation = cleanup.OperationWithCleanups(self._reconcile)
132
self.add_cleanup = operation.add_cleanup
133
operation.run_simple()
135
def _reconcile(self):
136
124
self.branch.lock_write()
137
self.add_cleanup(self.branch.unlock)
138
self.pb = ui.ui_factory.nested_progress_bar()
139
self.add_cleanup(self.pb.finished)
140
self._reconcile_steps()
126
self.pb = ui.ui_factory.nested_progress_bar()
128
self._reconcile_steps()
142
134
def _reconcile_steps(self):
143
135
self._reconcile_revision_history()
145
137
def _reconcile_revision_history(self):
146
138
repo = self.branch.repository
147
139
last_revno, last_revision_id = self.branch.last_revision_info()
150
for revid in repo.iter_reverse_revision_history(
152
real_history.append(revid)
153
except errors.RevisionNotPresent:
154
pass # Hit a ghost left hand parent
140
real_history = list(repo.iter_reverse_revision_history(
155
142
real_history.reverse()
156
143
if last_revno != len(real_history):
157
144
self.fixed_history = True
159
146
# set_revision_history, as this will regenerate it again.
160
147
# Not really worth a whole BranchReconciler class just for this,
162
ui.ui_factory.note('Fixing last revision info %s => %s' % (
163
last_revno, len(real_history)))
149
self.pb.note('Fixing last revision info %s => %s',
150
last_revno, len(real_history))
164
151
self.branch.set_last_revision_info(len(real_history),
165
152
last_revision_id)
167
154
self.fixed_history = False
168
ui.ui_factory.note('revision_history ok.')
155
self.pb.note('revision_history ok.')
171
158
class RepoReconciler(object):
172
159
"""Reconciler that reconciles a repository.
174
161
The goal of repository reconciliation is to make any derived data
175
consistent with the core data committed by a user. This can involve
162
consistent with the core data committed by a user. This can involve
176
163
reindexing, or removing unreferenced data if that can interfere with
177
164
queries in a given repository.
195
182
def reconcile(self):
196
183
"""Perform reconciliation.
198
185
After reconciliation the following attributes document found issues:
199
186
inconsistent_parents: The number of revisions in the repository whose
200
187
ancestry was being reported incorrectly.
201
188
garbage_inventories: The number of inventory objects without revisions
202
189
that were garbage collected.
204
operation = cleanup.OperationWithCleanups(self._reconcile)
205
self.add_cleanup = operation.add_cleanup
206
operation.run_simple()
208
def _reconcile(self):
209
191
self.repo.lock_write()
210
self.add_cleanup(self.repo.unlock)
211
self.pb = ui.ui_factory.nested_progress_bar()
212
self.add_cleanup(self.pb.finished)
213
self._reconcile_steps()
193
self.pb = ui.ui_factory.nested_progress_bar()
195
self._reconcile_steps()
215
201
def _reconcile_steps(self):
216
202
"""Perform the steps to reconcile this repository."""
219
205
def _reweave_inventory(self):
220
206
"""Regenerate the inventory weave for the repository from scratch.
222
This is a smart function: it will only do the reweave if doing it
208
This is a smart function: it will only do the reweave if doing it
223
209
will correct data issues. The self.thorough flag controls whether
224
210
only data-loss causing issues (!self.thorough) or all issues
225
211
(self.thorough) are treated as requiring the reweave.
227
213
# local because needing to know about WeaveFile is a wart we want to hide
228
214
from bzrlib.weave import WeaveFile, Weave
229
215
transaction = self.repo.get_transaction()
230
self.pb.update('Reading inventory data')
216
self.pb.update('Reading inventory data.')
231
217
self.inventory = self.repo.inventories
232
218
self.revisions = self.repo.revisions
233
219
# the total set of revisions to process
243
229
# put a revision into the graph.
244
230
self._graph_revision(rev_id)
245
231
self._check_garbage_inventories()
246
# if there are no inconsistent_parents and
232
# if there are no inconsistent_parents and
247
233
# (no garbage inventories or we are not doing a thorough check)
248
if (not self.inconsistent_parents and
234
if (not self.inconsistent_parents and
249
235
(not self.garbage_inventories or not self.thorough)):
250
ui.ui_factory.note('Inventory ok.')
236
self.pb.note('Inventory ok.')
252
self.pb.update('Backing up inventory', 0, 0)
238
self.pb.update('Backing up inventory...', 0, 0)
253
239
self.repo._backup_inventory()
254
ui.ui_factory.note('Backup inventory created.')
240
self.pb.note('Backup Inventory created.')
255
241
new_inventories = self.repo._temp_inventories()
257
243
# we have topological order of revisions and non ghost parents ready.
258
244
self._setup_steps(len(self._rev_graph))
259
revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)]
245
revision_keys = [(rev_id,) for rev_id in
246
TopoSorter(self._rev_graph.items()).iter_topo_order()]
260
247
stream = self._change_inv_parents(
261
248
self.inventory.get_record_stream(revision_keys, 'unordered', True),
262
249
self._new_inv_parents,
270
257
self.pb.update('Writing weave')
271
258
self.repo._activate_new_inventory()
272
259
self.inventory = None
273
ui.ui_factory.note('Inventory regenerated.')
260
self.pb.note('Inventory regenerated.')
275
262
def _new_inv_parents(self, revision_key):
276
263
"""Lookup ghost-filtered parents for revision_key."""
364
351
def _load_indexes(self):
365
352
"""Load indexes for the reconciliation."""
366
353
self.transaction = self.repo.get_transaction()
367
self.pb.update('Reading indexes', 0, 2)
354
self.pb.update('Reading indexes.', 0, 2)
368
355
self.inventory = self.repo.inventories
369
self.pb.update('Reading indexes', 1, 2)
356
self.pb.update('Reading indexes.', 1, 2)
370
357
self.repo._check_for_inconsistent_revision_parents()
371
358
self.revisions = self.repo.revisions
372
self.pb.update('Reading indexes', 2, 2)
359
self.pb.update('Reading indexes.', 2, 2)
374
361
def _gc_inventory(self):
375
362
"""Remove inventories that are not referenced from the revision store."""
376
self.pb.update('Checking unused inventories', 0, 1)
363
self.pb.update('Checking unused inventories.', 0, 1)
377
364
self._check_garbage_inventories()
378
self.pb.update('Checking unused inventories', 1, 3)
365
self.pb.update('Checking unused inventories.', 1, 3)
379
366
if not self.garbage_inventories:
380
ui.ui_factory.note('Inventory ok.')
367
self.pb.note('Inventory ok.')
382
self.pb.update('Backing up inventory', 0, 0)
369
self.pb.update('Backing up inventory...', 0, 0)
383
370
self.repo._backup_inventory()
384
ui.ui_factory.note('Backup Inventory created')
371
self.pb.note('Backup Inventory created.')
385
372
# asking for '' should never return a non-empty weave
386
373
new_inventories = self.repo._temp_inventories()
387
374
# we have topological order of revisions and non ghost parents ready.
388
375
graph = self.revisions.get_parent_map(self.revisions.keys())
389
revision_keys = topo_sort(graph)
376
revision_keys = list(TopoSorter(graph).iter_topo_order())
390
377
revision_ids = [key[-1] for key in revision_keys]
391
378
self._setup_steps(len(revision_keys))
392
379
stream = self._change_inv_parents(
401
388
self.pb.update('Writing weave')
402
389
self.repo._activate_new_inventory()
403
390
self.inventory = None
404
ui.ui_factory.note('Inventory regenerated.')
391
self.pb.note('Inventory regenerated.')
406
393
def _fix_text_parents(self):
407
394
"""Fix bad versionedfile parent entries.
503
490
# - lock the names list
504
491
# - perform a customised pack() that regenerates data as needed
505
492
# - unlock the names list
506
# https://bugs.launchpad.net/bzr/+bug/154173
508
def __init__(self, repo, other=None, thorough=False,
509
canonicalize_chks=False):
510
super(PackReconciler, self).__init__(repo, other=other,
512
self.canonicalize_chks = canonicalize_chks
493
# https://bugs.edge.launchpad.net/bzr/+bug/154173
514
495
def _reconcile_steps(self):
515
496
"""Perform the steps to reconcile this repository."""
518
499
collection = self.repo._pack_collection
519
500
collection.ensure_loaded()
520
501
collection.lock_names()
521
self.add_cleanup(collection._unlock_names)
522
packs = collection.all_packs()
523
all_revisions = self.repo.all_revision_ids()
524
total_inventories = len(list(
525
collection.inventory_index.combined_index.iter_all_entries()))
526
if len(all_revisions):
527
if self.canonicalize_chks:
528
reconcile_meth = self.repo._canonicalize_chks_pack
503
packs = collection.all_packs()
504
all_revisions = self.repo.all_revision_ids()
505
total_inventories = len(list(
506
collection.inventory_index.combined_index.iter_all_entries()))
507
if len(all_revisions):
508
self._packer = repofmt.pack_repo.ReconcilePacker(
509
collection, packs, ".reconcile", all_revisions)
510
new_pack = self._packer.pack(pb=self.pb)
511
if new_pack is not None:
512
self._discard_and_save(packs)
530
reconcile_meth = self.repo._reconcile_pack
531
new_pack = reconcile_meth(collection, packs, ".reconcile",
532
all_revisions, self.pb)
533
if new_pack is not None:
514
# only make a new pack when there is data to copy.
534
515
self._discard_and_save(packs)
536
# only make a new pack when there is data to copy.
537
self._discard_and_save(packs)
538
self.garbage_inventories = total_inventories - len(list(
539
collection.inventory_index.combined_index.iter_all_entries()))
516
self.garbage_inventories = total_inventories - len(list(
517
collection.inventory_index.combined_index.iter_all_entries()))
519
collection._unlock_names()
541
521
def _discard_and_save(self, packs):
542
522
"""Discard some packs from the repository.