13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""bzr upgrade logic."""
25
from bzrlib.bzrdir import (
29
from bzrlib.remote import RemoteBzrDir
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""Experiment in converting existing bzr branches to weaves."""
19
# To make this properly useful
21
# 1. assign text version ids, and put those text versions into
22
# the inventory as they're converted.
24
# 2. keep track of the previous version of each file, rather than
25
# just using the last one imported
27
# 3. assign entry versions when files are added, renamed or moved.
29
# 4. when merged-in versions are observed, walk down through them
30
# to discover everything, then commit bottom-up
32
# 5. track ancestry as things are merged in, and commit that in each
35
# Perhaps it's best to first walk the whole graph and make a plan for
36
# what should be imported in what order? Need a kind of topological
37
# sort of all revisions. (Or do we, can we just before doing a revision
38
# see that all its parents have either been converted or abandoned?)
41
# Cannot import a revision until all its parents have been
42
# imported. in other words, we can only import revisions whose
43
# parents have all been imported. the first step must be to
44
# import a revision with no parents, of which there must be at
45
# least one. (So perhaps it's useful to store forward pointers
46
# from a list of parents to their children?)
48
# Another (equivalent?) approach is to build up the ordered
49
# ancestry list for the last revision, and walk through that. We
50
# are going to need that.
52
# We don't want to have to recurse all the way back down the list.
54
# Suppose we keep a queue of the revisions able to be processed at
55
# any point. This starts out with all the revisions having no
58
# This seems like a generally useful algorithm...
60
# The current algorithm is dumb (O(n**2)?) but will do the job, and
61
# takes less than a second on the bzr.dev branch.
63
# This currently does a kind of lazy conversion of file texts, where a
64
# new text is written in every version. That's unnecessary but for
65
# the moment saves us having to worry about when files need new
69
# TODO: Don't create a progress bar here, have it passed by the caller.
70
# At least do it from the UI factory.
86
from bzrlib.branch import Branch, find_branch
87
from bzrlib.branch import BZR_BRANCH_FORMAT_5, BZR_BRANCH_FORMAT_6
88
import bzrlib.hashcache as hashcache
89
from bzrlib.revfile import Revfile
90
from bzrlib.weave import Weave
91
from bzrlib.weavefile import read_weave, write_weave
92
from bzrlib.ui import ui_factory
93
from bzrlib.atomicfile import AtomicFile
94
from bzrlib.xml4 import serializer_v4
95
from bzrlib.xml5 import serializer_v5
96
from bzrlib.trace import mutter, note, warning, enable_default_logging
97
from bzrlib.osutils import sha_strings, sha_string
32
100
class Convert(object):
34
def __init__(self, url=None, format=None, control_dir=None):
35
"""Convert a Bazaar control directory to a given format.
37
Either the url or control_dir parameter must be given.
39
:param url: the URL of the control directory or None if the
40
control_dir is explicitly given instead
41
:param format: the format to convert to or None for the default
42
:param control_dir: the control directory or None if it is
43
specified via the URL parameter instead
46
# XXX: Change to cleanup
47
warning_id = 'cross_format_fetch'
48
saved_warning = warning_id in ui.ui_factory.suppressed_warnings
49
if url is None and control_dir is None:
51
"either the url or control_dir parameter must be set.")
52
if control_dir is not None:
53
self.bzrdir = control_dir
55
self.bzrdir = BzrDir.open_unsupported(url)
56
if isinstance(self.bzrdir, RemoteBzrDir):
57
self.bzrdir._ensure_real()
58
self.bzrdir = self.bzrdir._real_bzrdir
59
if self.bzrdir.root_transport.is_readonly():
60
raise errors.UpgradeReadonly
61
self.transport = self.bzrdir.root_transport
62
ui.ui_factory.suppressed_warnings.add(warning_id)
67
ui.ui_factory.suppressed_warnings.remove(warning_id)
101
def __init__(self, base_dir):
103
self.converted_revs = set()
104
self.absent_revisions = set()
69
110
def convert(self):
71
branch = self.bzrdir.open_branch()
72
if branch.user_url != self.bzrdir.user_url:
74
'This is a checkout. The branch (%s) needs to be upgraded'
75
' separately.' % (branch.user_url,))
77
except (errors.NotBranchError, errors.IncompatibleRepositories):
78
# might not be a format we can open without upgrading; see e.g.
79
# https://bugs.launchpad.net/bzr/+bug/253891
81
if self.format is None:
83
rich_root = self.bzrdir.find_repository()._format.rich_root_data
84
except errors.NoRepositoryPresent:
85
rich_root = False # assume no rich roots
87
format_name = "default-rich-root"
89
format_name = "default"
90
format = format_registry.make_bzrdir(format_name)
93
if not self.bzrdir.needs_format_conversion(format):
94
raise errors.UpToDateFormat(self.bzrdir._format)
95
if not self.bzrdir.can_convert_format():
96
raise errors.BzrError("cannot upgrade from bzrdir format %s" %
98
self.bzrdir.check_conversion_target(format)
99
ui.ui_factory.note('starting upgrade of %s' % self.transport.base)
101
self.backup_oldpath, self.backup_newpath = self.bzrdir.backup_bzrdir()
102
while self.bzrdir.needs_format_conversion(format):
103
converter = self.bzrdir._format.get_converter(format)
104
self.bzrdir = converter.convert(self.bzrdir, None)
105
ui.ui_factory.note('finished')
108
"""Clean-up after a conversion.
110
This removes the backup.bzr directory.
112
transport = self.transport
113
backup_relpath = transport.relpath(self.backup_newpath)
114
child_pb = ui.ui_factory.nested_progress_bar()
115
child_pb.update('Deleting backup.bzr')
117
transport.delete_tree(backup_relpath)
122
def upgrade(url, format=None, clean_up=False, dry_run=False):
123
"""Upgrade locations to format.
125
This routine wraps the smart_upgrade() routine with a nicer UI.
126
In particular, it ensures all URLs can be opened before starting
127
and reports a summary at the end if more than one upgrade was attempted.
128
This routine is useful for command line tools. Other bzrlib clients
129
probably ought to use smart_upgrade() instead.
131
:param url: a URL of the locations to upgrade.
132
:param format: the format to convert to or None for the best default
133
:param clean-up: if True, the backup.bzr directory is removed if the
134
upgrade succeeded for a given repo/branch/tree
135
:param dry_run: show what would happen but don't actually do any upgrades
136
:return: the list of exceptions encountered
138
control_dirs = [BzrDir.open_unsupported(url)]
139
attempted, succeeded, exceptions = smart_upgrade(control_dirs,
140
format, clean_up=clean_up, dry_run=dry_run)
141
if len(attempted) > 1:
142
attempted_count = len(attempted)
143
succeeded_count = len(succeeded)
144
failed_count = attempted_count - succeeded_count
146
'\nSUMMARY: %d upgrades attempted, %d succeeded, %d failed'
147
% (attempted_count, succeeded_count, failed_count))
151
def smart_upgrade(control_dirs, format, clean_up=False,
153
"""Convert control directories to a new format intelligently.
155
If the control directory is a shared repository, dependent branches
156
are also converted provided the repository converted successfully.
157
If the conversion of a branch fails, remaining branches are still tried.
159
:param control_dirs: the BzrDirs to upgrade
160
:param format: the format to convert to or None for the best default
161
:param clean_up: if True, the backup.bzr directory is removed if the
162
upgrade succeeded for a given repo/branch/tree
163
:param dry_run: show what would happen but don't actually do any upgrades
164
:return: attempted-control-dirs, succeeded-control-dirs, exceptions
169
for control_dir in control_dirs:
170
attempted, succeeded, exceptions = _smart_upgrade_one(control_dir,
171
format, clean_up=clean_up, dry_run=dry_run)
172
all_attempted.extend(attempted)
173
all_succeeded.extend(succeeded)
174
all_exceptions.extend(exceptions)
175
return all_attempted, all_succeeded, all_exceptions
178
def _smart_upgrade_one(control_dir, format, clean_up=False,
180
"""Convert a control directory to a new format intelligently.
182
See smart_upgrade for parameter details.
184
# If the URL is a shared repository, find the dependent branches
187
repo = control_dir.open_repository()
188
except errors.NoRepositoryPresent:
189
# A branch or checkout using a shared repository higher up
192
# The URL is a repository. If it successfully upgrades,
193
# then upgrade the dependent branches as well.
195
dependents = repo.find_branches(using=True)
198
attempted = [control_dir]
199
succeeded, exceptions = _convert_items([control_dir], format, clean_up,
201
if succeeded and dependents:
202
ui.ui_factory.note('Found %d dependent branches - upgrading ...'
203
% (len(dependents),))
204
# Convert dependent branches
205
branch_cdirs = [b.bzrdir for b in dependents]
206
successes, problems = _convert_items(branch_cdirs, format, clean_up,
207
dry_run, label="branch")
208
attempted.extend(branch_cdirs)
209
succeeded.extend(successes)
210
exceptions.extend(problems)
213
return attempted, succeeded, exceptions
215
# FIXME: There are several problems below:
216
# - RemoteRepository doesn't support _unsupported (really ?)
217
# - raising AssertionError is rude and may not be necessary
219
# - the only caller uses only the label
220
def _get_object_and_label(control_dir):
221
"""Return the primary object and type label for a control directory.
223
:return: object, label where
224
object is a Branch, Repository or WorkingTree and
227
repository - a repository
228
tree - a lightweight checkout
232
br = control_dir.open_branch(unsupported=True,
233
ignore_fallbacks=True)
234
except NotImplementedError:
235
# RemoteRepository doesn't support the unsupported parameter
236
br = control_dir.open_branch(ignore_fallbacks=True)
237
except errors.NotBranchError:
242
repo = control_dir.open_repository()
243
except errors.NoRepositoryPresent:
246
return repo, "repository"
248
wt = control_dir.open_workingtree()
249
except (errors.NoWorkingTree, errors.NotLocalUrl):
253
raise AssertionError("unknown type of control directory %s", control_dir)
256
def _convert_items(items, format, clean_up, dry_run, label=None):
257
"""Convert a sequence of control directories to the given format.
259
:param items: the control directories to upgrade
260
:param format: the format to convert to or None for the best default
261
:param clean-up: if True, the backup.bzr directory is removed if the
262
upgrade succeeded for a given repo/branch/tree
263
:param dry_run: show what would happen but don't actually do any upgrades
264
:param label: the label for these items or None to calculate one
265
:return: items successfully upgraded, exceptions
269
child_pb = ui.ui_factory.nested_progress_bar()
270
child_pb.update('Upgrading bzrdirs', 0, len(items))
271
for i, control_dir in enumerate(items):
273
location = control_dir.root_transport.base
274
bzr_object, bzr_label = _get_object_and_label(control_dir)
275
type_label = label or bzr_label
276
child_pb.update("Upgrading %s" % (type_label), i+1, len(items))
277
ui.ui_factory.note('Upgrading %s %s ...' % (type_label, location,))
280
cv = Convert(control_dir=control_dir, format=format)
281
except Exception, ex:
282
trace.warning('conversion error: %s' % ex)
283
exceptions.append(ex)
286
# Do any required post processing
287
succeeded.append(control_dir)
290
ui.ui_factory.note('Removing backup ...')
293
except Exception, ex:
294
trace.warning('failed to clean-up %s: %s' % (location, ex))
295
exceptions.append(ex)
300
return succeeded, exceptions
111
if not self._open_branch():
113
note('starting upgrade of %s', os.path.abspath(self.base))
114
self._backup_control_dir()
115
self.pb = ui_factory.progress_bar()
116
if self.old_format == 4:
117
note('starting upgrade from format 4 to 5')
118
self._convert_to_weaves()
120
if self.old_format == 5:
121
note('starting upgrade from format 5 to 6')
122
self._convert_to_prefixed()
124
cache = hashcache.HashCache(os.path.abspath(self.base))
130
def _convert_to_prefixed(self):
131
from bzrlib.store import hash_prefix
132
for store_name in ["weaves", "revision-store"]:
133
note("adding prefixes to %s" % store_name)
134
store_dir = os.path.join(self.base, ".bzr", store_name)
135
for filename in os.listdir(store_dir):
136
if filename.endswith(".weave") or filename.endswith(".gz"):
137
file_id = os.path.splitext(filename)[0]
140
prefix_dir = os.path.join(store_dir, hash_prefix(file_id))
141
if not os.path.isdir(prefix_dir):
143
os.rename(os.path.join(store_dir, filename),
144
os.path.join(prefix_dir, filename))
145
self._set_new_format(BZR_BRANCH_FORMAT_6)
148
def _convert_to_weaves(self):
149
note('note: upgrade may be faster if all store files are ungzipped first')
150
if not os.path.isdir(self.base + '/.bzr/weaves'):
151
os.mkdir(self.base + '/.bzr/weaves')
152
self.inv_weave = Weave('inventory')
153
# holds in-memory weaves for all files
154
self.text_weaves = {}
155
os.remove(self.branch.controlfilename('branch-format'))
156
self._convert_working_inv()
157
rev_history = self.branch.revision_history()
158
# to_read is a stack holding the revisions we still need to process;
159
# appending to it adds new highest-priority revisions
160
self.known_revisions = set(rev_history)
161
self.to_read = rev_history[-1:]
163
rev_id = self.to_read.pop()
164
if (rev_id not in self.revisions
165
and rev_id not in self.absent_revisions):
166
self._load_one_rev(rev_id)
168
to_import = self._make_order()
169
for i, rev_id in enumerate(to_import):
170
self.pb.update('converting revision', i, len(to_import))
171
self._convert_one_rev(rev_id)
173
note('upgraded to weaves:')
174
note(' %6d revisions and inventories' % len(self.revisions))
175
note(' %6d revisions not present' % len(self.absent_revisions))
176
note(' %6d texts' % self.text_count)
177
self._write_all_weaves()
178
self._write_all_revs()
179
self._cleanup_spare_files()
180
self._set_new_format(BZR_BRANCH_FORMAT_5)
183
def _open_branch(self):
184
self.branch = Branch.open_downlevel(self.base)
185
self.old_format = self.branch._branch_format
186
if self.old_format == 6:
187
note('this branch is in the most current format')
189
if self.old_format not in (4, 5):
190
raise BzrError("cannot upgrade from branch format %r" %
191
self.branch._branch_format)
195
def _set_new_format(self, format):
196
self.branch.put_controlfile('branch-format', format)
199
def _cleanup_spare_files(self):
200
for n in 'merged-patches', 'pending-merged-patches':
201
p = self.branch.controlfilename(n)
202
if not os.path.exists(p):
204
## assert os.path.getsize(p) == 0
206
shutil.rmtree(self.base + '/.bzr/inventory-store')
207
shutil.rmtree(self.base + '/.bzr/text-store')
210
def _backup_control_dir(self):
211
orig = self.base + '/.bzr'
212
backup = orig + '.backup'
213
note('making backup of tree history')
214
shutil.copytree(orig, backup)
215
note('%s has been backed up to %s', orig, backup)
216
note('if conversion fails, you can move this directory back to .bzr')
217
note('if it succeeds, you can remove this directory if you wish')
220
def _convert_working_inv(self):
222
inv = serializer_v4.read_inventory(branch.controlfile('inventory', 'rb'))
223
new_inv_xml = serializer_v5.write_inventory_to_string(inv)
224
branch.put_controlfile('inventory', new_inv_xml)
228
def _write_all_weaves(self):
229
write_a_weave(self.inv_weave, self.base + '/.bzr/inventory.weave')
232
for file_id, file_weave in self.text_weaves.items():
233
self.pb.update('writing weave', i, len(self.text_weaves))
234
write_a_weave(file_weave, self.base + '/.bzr/weaves/%s.weave' % file_id)
240
def _write_all_revs(self):
241
"""Write all revisions out in new form."""
242
shutil.rmtree(self.base + '/.bzr/revision-store')
243
os.mkdir(self.base + '/.bzr/revision-store')
245
for i, rev_id in enumerate(self.converted_revs):
246
self.pb.update('write revision', i, len(self.converted_revs))
247
f = file(self.base + '/.bzr/revision-store/%s' % rev_id, 'wb')
249
serializer_v5.write_revision(self.revisions[rev_id], f)
256
def _load_one_rev(self, rev_id):
257
"""Load a revision object into memory.
259
Any parents not either loaded or abandoned get queued to be
261
self.pb.update('loading revision',
263
len(self.known_revisions))
264
if not self.branch.revision_store.has_id(rev_id):
266
note('revision {%s} not present in branch; '
267
'will be converted as a ghost',
269
self.absent_revisions.add(rev_id)
271
rev_xml = self.branch.revision_store.get(rev_id).read()
272
rev = serializer_v4.read_revision_from_string(rev_xml)
273
for parent_id in rev.parent_ids:
274
self.known_revisions.add(parent_id)
275
self.to_read.append(parent_id)
276
self.revisions[rev_id] = rev
279
def _load_old_inventory(self, rev_id):
280
assert rev_id not in self.converted_revs
281
old_inv_xml = self.branch.inventory_store.get(rev_id).read()
282
inv = serializer_v4.read_inventory_from_string(old_inv_xml)
283
rev = self.revisions[rev_id]
284
if rev.inventory_sha1:
285
assert rev.inventory_sha1 == sha_string(old_inv_xml), \
286
'inventory sha mismatch for {%s}' % rev_id
290
def _load_updated_inventory(self, rev_id):
291
assert rev_id in self.converted_revs
292
inv_xml = self.inv_weave.get_text(rev_id)
293
inv = serializer_v5.read_inventory_from_string(inv_xml)
297
def _convert_one_rev(self, rev_id):
298
"""Convert revision and all referenced objects to new format."""
299
rev = self.revisions[rev_id]
300
inv = self._load_old_inventory(rev_id)
301
present_parents = [p for p in rev.parent_ids
302
if p not in self.absent_revisions]
303
self._convert_revision_contents(rev, inv, present_parents)
304
self._store_new_weave(rev, inv, present_parents)
305
self.converted_revs.add(rev_id)
308
def _store_new_weave(self, rev, inv, present_parents):
309
# the XML is now updated with text versions
313
if ie.kind == 'root_directory':
315
assert hasattr(ie, 'revision'), \
316
'no revision on {%s} in {%s}' % \
317
(file_id, rev.revision_id)
318
new_inv_xml = serializer_v5.write_inventory_to_string(inv)
319
new_inv_sha1 = sha_string(new_inv_xml)
320
self.inv_weave.add(rev.revision_id,
322
new_inv_xml.splitlines(True),
324
rev.inventory_sha1 = new_inv_sha1
326
def _convert_revision_contents(self, rev, inv, present_parents):
327
"""Convert all the files within a revision.
329
Also upgrade the inventory to refer to the text revision ids."""
330
rev_id = rev.revision_id
331
mutter('converting texts of revision {%s}',
333
parent_invs = map(self._load_updated_inventory, present_parents)
336
self._convert_file_version(rev, ie, parent_invs)
338
def _convert_file_version(self, rev, ie, parent_invs):
339
"""Convert one version of one file.
341
The file needs to be added into the weave if it is a merge
342
of >=2 parents or if it's changed from its parent.
344
if ie.kind == 'root_directory':
347
rev_id = rev.revision_id
348
w = self.text_weaves.get(file_id)
351
self.text_weaves[file_id] = w
353
previous_entries = ie.find_previous_heads(parent_invs, w)
354
for old_revision in previous_entries:
355
# if this fails, its a ghost ?
356
assert old_revision in self.converted_revs
357
self.snapshot_ie(previous_entries, ie, w, rev_id)
359
assert getattr(ie, 'revision', None) is not None
361
def snapshot_ie(self, previous_revisions, ie, w, rev_id):
362
# TODO: convert this logic, which is ~= snapshot to
363
# a call to:. This needs the path figured out. rather than a work_tree
364
# a v4 revision_tree can be given, or something that looks enough like
365
# one to give the file content to the entry if it needs it.
366
# and we need something that looks like a weave store for snapshot to
368
#ie.snapshot(rev, PATH, previous_revisions, REVISION_TREE, InMemoryWeaveStore(self.text_weaves))
369
if len(previous_revisions) == 1:
370
previous_ie = previous_revisions.values()[0]
371
if ie._unchanged(previous_ie):
372
ie.revision = previous_ie.revision
374
parent_indexes = map(w.lookup, previous_revisions)
376
file_lines = self.branch.text_store.get(ie.text_id).readlines()
377
assert sha_strings(file_lines) == ie.text_sha1
378
assert sum(map(len, file_lines)) == ie.text_size
379
w.add(rev_id, parent_indexes, file_lines, ie.text_sha1)
382
w.add(rev_id, parent_indexes, [], None)
384
##mutter('import text {%s} of {%s}',
385
## ie.text_id, file_id)
387
def _make_order(self):
388
"""Return a suitable order for importing revisions.
390
The order must be such that an revision is imported after all
391
its (present) parents.
393
todo = set(self.revisions.keys())
394
done = self.absent_revisions.copy()
397
# scan through looking for a revision whose parents
399
for rev_id in sorted(list(todo)):
400
rev = self.revisions[rev_id]
401
parent_ids = set(rev.parent_ids)
402
if parent_ids.issubset(done):
403
# can take this one now
410
def write_a_weave(weave, filename):
411
inv_wf = file(filename, 'wb')
413
write_weave(weave, inv_wf)
418
def upgrade(base_dir):