63
58
# and add in all file versions
66
@deprecated_function(zero_eight)
67
62
def greedy_fetch(to_branch, from_branch, revision=None, pb=None):
68
"""Legacy API, please see branch.fetch(from_branch, last_revision, pb)."""
69
63
f = Fetcher(to_branch, from_branch, revision, pb)
70
64
return f.count_copied, f.failed_revisions
75
class RepoFetcher(object):
76
"""Pull revisions and texts from one repository to another.
79
if set, try to limit to the data this revision references.
68
class Fetcher(object):
69
"""Pull revisions and texts from one branch to another.
71
This doesn't update the destination's history; that can be done
72
separately if desired.
75
If set, pull only up to this revision_id.
79
last_revision -- if last_revision
80
is given it will be that, otherwise the last revision of
82
83
count_copied -- number of revisions copied
84
This should not be used directory, its essential a object to encapsulate
85
the logic in InterRepository.fetch().
85
count_weaves -- number of file weaves copied
87
def __init__(self, to_repository, from_repository, last_revision=None, pb=None):
87
def __init__(self, to_branch, from_branch, last_revision=None, pb=None):
88
if to_branch == from_branch:
89
raise Exception("can't fetch from a branch to itself")
90
self.to_branch = to_branch
91
self.to_weaves = to_branch.weave_store
92
self.to_control = to_branch.control_weaves
93
self.from_branch = from_branch
94
self.from_weaves = from_branch.weave_store
95
self.from_control = from_branch.control_weaves
89
96
self.failed_revisions = []
90
97
self.count_copied = 0
91
if to_repository.control_files._transport.base == from_repository.control_files._transport.base:
92
# check that last_revision is in 'from' and then return a no-operation.
93
if last_revision not in (None, NULL_REVISION):
94
from_repository.get_revision(last_revision)
96
self.to_repository = to_repository
97
self.from_repository = from_repository
98
# must not mutate self._last_revision as its potentially a shared instance
99
self._last_revision = last_revision
100
self.copied_file_ids = set()
101
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
102
self.nested_pb = self.pb
102
self.pb = bzrlib.ui.ui_factory.progress_bar()
105
self.nested_pb = None
106
self.from_repository.lock_read()
108
self.to_repository.lock_write()
112
if self.nested_pb is not None:
113
self.nested_pb.finished()
114
self.to_repository.unlock()
116
self.from_repository.unlock()
119
"""Primary worker function.
121
This initialises all the needed variables, and then fetches the
122
requested revisions, finally clearing the progress bar.
124
self.to_weaves = self.to_repository.weave_store
125
self.to_control = self.to_repository.control_weaves
126
self.from_weaves = self.from_repository.weave_store
127
self.from_control = self.from_repository.control_weaves
129
self.file_ids_names = {}
130
pp = ProgressPhase('Fetch phase', 4, self.pb)
133
revs = self._revids_to_fetch()
137
self._fetch_weave_texts(revs)
139
self._fetch_inventory_weave(revs)
141
self._fetch_revision_texts(revs)
142
self.count_copied += len(revs)
146
def _revids_to_fetch(self):
147
mutter('fetch up to rev {%s}', self._last_revision)
148
if self._last_revision is NULL_REVISION:
149
# explicit limit of no revisions needed
151
if (self._last_revision is not None and
152
self.to_repository.has_revision(self._last_revision)):
106
self.last_revision = self._find_last_revision(last_revision)
107
except NoSuchRevision, e:
108
mutter('failed getting last revision: %s', e)
109
raise InstallFailed([last_revision])
110
mutter('fetch up to rev {%s}', self.last_revision)
112
revs_to_fetch = self._compare_ancestries()
114
raise InstallFailed([self.last_revision])
115
self._copy_revisions(revs_to_fetch)
116
self.new_ancestry = revs_to_fetch
119
def _find_last_revision(self, last_revision):
120
"""Find the limiting source revision.
122
Every ancestor of that revision will be merged across.
124
Returns the revision_id, or returns None if there's no history
125
in the source branch."""
126
self.pb.update('get source history')
127
from_history = self.from_branch.revision_history()
128
self.pb.update('get destination history')
130
self.from_branch.get_revision(last_revision)
133
return from_history[-1]
135
return None # no history in the source branch
156
return self.to_repository.missing_revision_ids(self.from_repository,
158
except errors.NoSuchRevision:
159
raise InstallFailed([self._last_revision])
161
def _fetch_weave_texts(self, revs):
162
texts_pb = bzrlib.ui.ui_factory.nested_progress_bar()
164
# fileids_altered_by_revision_ids requires reading the inventory
165
# weave, we will need to read the inventory weave again when
166
# all this is done, so enable caching for that specific weave
167
inv_w = self.from_repository.get_inventory_weave()
169
file_ids = self.from_repository.fileids_altered_by_revision_ids(revs)
171
num_file_ids = len(file_ids)
172
for file_id, required_versions in file_ids.items():
173
texts_pb.update("fetch texts", count, num_file_ids)
175
to_weave = self.to_weaves.get_weave_or_empty(file_id,
176
self.to_repository.get_transaction())
177
from_weave = self.from_weaves.get_weave(file_id,
178
self.from_repository.get_transaction())
179
# we fetch all the texts, because texts do
180
# not reference anything, and its cheap enough
181
to_weave.join(from_weave, version_ids=required_versions)
182
# we don't need *all* of this data anymore, but we dont know
183
# what we do. This cache clearing will result in a new read
184
# of the knit data when we do the checkout, but probably we
185
# want to emit the needed data on the fly rather than at the
187
# the from weave should know not to cache data being joined,
188
# but its ok to ask it to clear.
189
from_weave.clear_cache()
190
to_weave.clear_cache()
194
def _fetch_inventory_weave(self, revs):
195
pb = bzrlib.ui.ui_factory.nested_progress_bar()
197
pb.update("fetch inventory", 0, 2)
198
to_weave = self.to_control.get_weave('inventory',
199
self.to_repository.get_transaction())
201
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
203
# just merge, this is optimisable and its means we don't
204
# copy unreferenced data such as not-needed inventories.
205
pb.update("fetch inventory", 1, 3)
206
from_weave = self.from_repository.get_inventory_weave()
207
pb.update("fetch inventory", 2, 3)
208
# we fetch only the referenced inventories because we do not
209
# know for unselected inventories whether all their required
210
# texts are present in the other repository - it could be
212
to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
214
from_weave.clear_cache()
221
class GenericRepoFetcher(RepoFetcher):
222
"""This is a generic repo to repo fetcher.
224
This makes minimal assumptions about repo layout and contents.
225
It triggers a reconciliation after fetching to ensure integrity.
228
def _fetch_revision_texts(self, revs):
229
"""Fetch revision object texts"""
230
rev_pb = bzrlib.ui.ui_factory.nested_progress_bar()
232
to_txn = self.to_transaction = self.to_repository.get_transaction()
235
to_store = self.to_repository._revision_store
237
pb = bzrlib.ui.ui_factory.nested_progress_bar()
239
pb.update('copying revisions', count, total)
241
sig_text = self.from_repository.get_signature_text(rev)
242
to_store.add_revision_signature_text(rev, sig_text, to_txn)
243
except errors.NoSuchRevision:
246
to_store.add_revision(self.from_repository.get_revision(rev),
251
# fixup inventory if needed:
252
# this is expensive because we have no inverse index to current ghosts.
253
# but on local disk its a few seconds and sftp push is already insane.
255
# FIXME: repository should inform if this is needed.
256
self.to_repository.reconcile()
261
class KnitRepoFetcher(RepoFetcher):
262
"""This is a knit format repository specific fetcher.
264
This differs from the GenericRepoFetcher by not doing a
265
reconciliation after copying, and using knit joining to
269
def _fetch_revision_texts(self, revs):
270
# may need to be a InterRevisionStore call here.
271
from_transaction = self.from_repository.get_transaction()
272
to_transaction = self.to_repository.get_transaction()
273
to_sf = self.to_repository._revision_store.get_signature_file(
275
from_sf = self.from_repository._revision_store.get_signature_file(
277
to_sf.join(from_sf, version_ids=revs, ignore_missing=True)
278
to_rf = self.to_repository._revision_store.get_revision_file(
280
from_rf = self.from_repository._revision_store.get_revision_file(
282
to_rf.join(from_rf, version_ids=revs)
285
class Inter1and2Helper(object):
286
"""Helper for operations that convert data from model 1 and 2
288
This is for use by fetchers and converters.
291
def __init__(self, source, target):
294
:param source: The repository data comes from
295
:param target: The repository data goes to
300
def iter_rev_trees(self, revs):
301
"""Iterate through RevisionTrees efficiently.
303
Additionally, the inventory's revision_id is set if unset.
305
Trees are retrieved in batches of 100, and then yielded in the order
308
:param revs: A list of revision ids
311
for tree in self.source.revision_trees(revs[:100]):
312
if tree.inventory.revision_id is None:
313
tree.inventory.revision_id = tree.get_revision_id()
317
def generate_root_texts(self, revs):
318
"""Generate VersionedFiles for all root ids.
320
:param revs: the revisions to include
322
inventory_weave = self.source.get_inventory_weave()
325
to_store = self.target.weave_store
326
for tree in self.iter_rev_trees(revs):
327
revision_id = tree.inventory.root.revision
328
root_id = tree.inventory.root.file_id
329
parents = inventory_weave.get_parents(revision_id)
330
if root_id not in versionedfile:
331
versionedfile[root_id] = to_store.get_weave_or_empty(root_id,
332
self.target.get_transaction())
333
parent_texts[root_id] = versionedfile[root_id].add_lines(
334
revision_id, parents, [], parent_texts)
336
def regenerate_inventory(self, revs):
337
"""Generate a new inventory versionedfile in target, convertin data.
339
The inventory is retrieved from the source, (deserializing it), and
340
stored in the target (reserializing it in a different format).
341
:param revs: The revisions to include
343
inventory_weave = self.source.get_inventory_weave()
344
for tree in self.iter_rev_trees(revs):
345
parents = inventory_weave.get_parents(tree.get_revision_id())
346
self.target.add_inventory(tree.get_revision_id(), tree.inventory,
350
class Model1toKnit2Fetcher(GenericRepoFetcher):
351
"""Fetch from a Model1 repository into a Knit2 repository
353
def __init__(self, to_repository, from_repository, last_revision=None,
355
self.helper = Inter1and2Helper(from_repository, to_repository)
356
GenericRepoFetcher.__init__(self, to_repository, from_repository,
359
def _fetch_weave_texts(self, revs):
360
GenericRepoFetcher._fetch_weave_texts(self, revs)
361
# Now generate a weave for the tree root
362
self.helper.generate_root_texts(revs)
364
def _fetch_inventory_weave(self, revs):
365
self.helper.regenerate_inventory(revs)
368
class Knit1to2Fetcher(KnitRepoFetcher):
369
"""Fetch from a Knit1 repository into a Knit2 repository"""
371
def __init__(self, to_repository, from_repository, last_revision=None,
373
self.helper = Inter1and2Helper(from_repository, to_repository)
374
KnitRepoFetcher.__init__(self, to_repository, from_repository,
377
def _fetch_weave_texts(self, revs):
378
KnitRepoFetcher._fetch_weave_texts(self, revs)
379
# Now generate a weave for the tree root
380
self.helper.generate_root_texts(revs)
382
def _fetch_inventory_weave(self, revs):
383
self.helper.regenerate_inventory(revs)
386
class Fetcher(object):
387
"""Backwards compatibility glue for branch.fetch()."""
389
@deprecated_method(zero_eight)
390
def __init__(self, to_branch, from_branch, last_revision=None, pb=None):
391
"""Please see branch.fetch()."""
392
to_branch.fetch(from_branch, last_revision, pb)
138
def _compare_ancestries(self):
139
"""Get a list of revisions that must be copied.
141
That is, every revision that's in the ancestry of the source
142
branch and not in the destination branch."""
143
self.pb.update('get source ancestry')
144
self.from_ancestry = self.from_branch.get_ancestry(self.last_revision)
146
dest_last_rev = self.to_branch.last_revision()
147
self.pb.update('get destination ancestry')
149
dest_ancestry = self.to_branch.get_ancestry(dest_last_rev)
152
ss = set(dest_ancestry)
154
for rev_id in self.from_ancestry:
156
to_fetch.append(rev_id)
157
mutter('need to get revision {%s}', rev_id)
158
mutter('need to get %d revisions in total', len(to_fetch))
159
self.count_total = len(to_fetch)
162
def _copy_revisions(self, revs_to_fetch):
164
for rev_id in revs_to_fetch:
168
if self.to_branch.has_revision(rev_id):
170
self.pb.update('fetch revision', i, self.count_total)
171
self._copy_one_revision(rev_id)
172
self.count_copied += 1
175
def _copy_one_revision(self, rev_id):
176
"""Copy revision and everything referenced by it."""
177
mutter('copying revision {%s}', rev_id)
178
rev_xml = self.from_branch.get_revision_xml(rev_id)
179
inv_xml = self.from_branch.get_inventory_xml(rev_id)
180
rev = serializer_v5.read_revision_from_string(rev_xml)
181
inv = serializer_v5.read_inventory_from_string(inv_xml)
182
assert rev.revision_id == rev_id
183
assert rev.inventory_sha1 == sha_string(inv_xml)
184
mutter(' commiter %s, %d parents',
187
self._copy_new_texts(rev_id, inv)
188
parents = rev.parent_ids
189
for parent in parents:
190
if not self.to_branch.has_revision(parent):
191
parents.pop(parents.index(parent))
192
self._copy_inventory(rev_id, inv_xml, parents)
193
self.to_branch.revision_store.add(StringIO(rev_xml), rev_id)
194
mutter('copied revision %s', rev_id)
197
def _copy_inventory(self, rev_id, inv_xml, parent_ids):
198
self.to_control.add_text('inventory', rev_id,
199
split_lines(inv_xml), parent_ids)
201
def _copy_new_texts(self, rev_id, inv):
202
"""Copy any new texts occuring in this revision."""
203
# TODO: Rather than writing out weaves every time, hold them
204
# in memory until everything's done? But this way is nicer
205
# if it's interrupted.
206
for path, ie in inv.iter_entries():
207
if ie.revision != rev_id:
209
mutter('%s {%s} is changed in this revision',
211
self._copy_one_weave(rev_id, ie.file_id)
214
def _copy_one_weave(self, rev_id, file_id):
215
"""Copy one file weave."""
216
mutter('copy file {%s} modified in {%s}', file_id, rev_id)
217
if file_id in self.copied_file_ids:
218
mutter('file {%s} already copied', file_id)
220
from_weave = self.from_weaves.get_weave(file_id)
221
to_weave = self.to_weaves.get_weave_or_empty(file_id)
222
to_weave.join(from_weave)
223
self.to_weaves.put_weave(file_id, to_weave)
224
self.count_weaves += 1
225
self.copied_file_ids.add(file_id)
226
mutter('copied file {%s}', file_id)