72
82
count_copied -- number of revisions copied
74
This should not be used directly, it's essential a object to encapsulate
84
This should not be used directory, its essential a object to encapsulate
75
85
the logic in InterRepository.fetch().
78
def __init__(self, to_repository, from_repository, last_revision=None, pb=None,
80
"""Create a repo fetcher.
82
:param find_ghosts: If True search the entire history for ghosts.
87
def __init__(self, to_repository, from_repository, last_revision=None, pb=None):
84
88
# result variables.
85
89
self.failed_revisions = []
86
90
self.count_copied = 0
87
if to_repository.has_same_location(from_repository):
88
# repository.fetch should be taking care of this case.
89
raise errors.BzrError('RepoFetcher run '
90
'between two objects at the same location: '
91
'%r and %r' % (to_repository, from_repository))
91
if to_repository.control_files._transport.base == from_repository.control_files._transport.base:
92
# check that last_revision is in 'from' and then return a no-operation.
93
if last_revision not in (None, NULL_REVISION):
94
from_repository.get_revision(last_revision)
92
96
self.to_repository = to_repository
93
97
self.from_repository = from_repository
94
98
# must not mutate self._last_revision as its potentially a shared instance
95
99
self._last_revision = last_revision
96
self.find_ghosts = find_ghosts
98
101
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
99
102
self.nested_pb = self.pb
126
122
requested revisions, finally clearing the progress bar.
128
124
self.to_weaves = self.to_repository.weave_store
125
self.to_control = self.to_repository.control_weaves
129
126
self.from_weaves = self.from_repository.weave_store
127
self.from_control = self.from_repository.control_weaves
130
128
self.count_total = 0
131
129
self.file_ids_names = {}
132
pp = ProgressPhase('Transferring', 4, self.pb)
130
pp = ProgressPhase('Fetch phase', 4, self.pb)
135
search = self._revids_to_fetch()
138
if getattr(self, '_fetch_everything_for_search', None) is not None:
139
self._fetch_everything_for_search(search, pp)
141
# backward compatibility
142
self._fetch_everything_for_revisions(search.get_keys, pp)
133
revs = self._revids_to_fetch()
137
self._fetch_weave_texts(revs)
139
self._fetch_inventory_weave(revs)
141
self._fetch_revision_texts(revs)
142
self.count_copied += len(revs)
146
def _fetch_everything_for_search(self, search, pp):
147
"""Fetch all data for the given set of revisions."""
148
# The first phase is "file". We pass the progress bar for it directly
149
# into item_keys_introduced_by, which has more information about how
150
# that phase is progressing than we do. Progress updates for the other
151
# phases are taken care of in this function.
152
# XXX: there should be a clear owner of the progress reporting. Perhaps
153
# item_keys_introduced_by should have a richer API than it does at the
154
# moment, so that it can feed the progress information back to this
157
pb = bzrlib.ui.ui_factory.nested_progress_bar()
159
revs = search.get_keys()
160
data_to_fetch = self.from_repository.item_keys_introduced_by(revs, pb)
161
for knit_kind, file_id, revisions in data_to_fetch:
162
if knit_kind != phase:
164
# Make a new progress bar for this phase
167
pb = bzrlib.ui.ui_factory.nested_progress_bar()
168
if knit_kind == "file":
169
self._fetch_weave_text(file_id, revisions)
170
elif knit_kind == "inventory":
172
# Once we've processed all the files, then we generate the root
173
# texts (if necessary), then we process the inventory. It's a
174
# bit distasteful to have knit_kind == "inventory" mean this,
175
# perhaps it should happen on the first non-"file" knit, in case
176
# it's not always inventory?
177
self._generate_root_texts(revs)
178
self._fetch_inventory_weave(revs, pb)
179
elif knit_kind == "signatures":
180
# Nothing to do here; this will be taken care of when
181
# _fetch_revision_texts happens.
183
elif knit_kind == "revisions":
184
self._fetch_revision_texts(revs, pb)
186
raise AssertionError("Unknown knit kind %r" % knit_kind)
190
self.count_copied += len(revs)
192
146
def _revids_to_fetch(self):
193
"""Determines the exact revisions needed from self.from_repository to
194
install self._last_revision in self.to_repository.
196
If no revisions need to be fetched, then this just returns None.
198
147
mutter('fetch up to rev {%s}', self._last_revision)
199
148
if self._last_revision is NULL_REVISION:
200
149
# explicit limit of no revisions needed
202
151
if (self._last_revision is not None and
203
152
self.to_repository.has_revision(self._last_revision)):
206
return self.to_repository.search_missing_revision_ids(
207
self.from_repository, self._last_revision,
208
find_ghosts=self.find_ghosts)
156
return self.to_repository.missing_revision_ids(self.from_repository,
209
158
except errors.NoSuchRevision:
210
159
raise InstallFailed([self._last_revision])
212
def _fetch_weave_text(self, file_id, required_versions):
213
to_weave = self.to_weaves.get_weave_or_empty(file_id,
214
self.to_repository.get_transaction())
215
from_weave = self.from_weaves.get_weave(file_id,
216
self.from_repository.get_transaction())
217
# we fetch all the texts, because texts do
218
# not reference anything, and its cheap enough
219
to_weave.join(from_weave, version_ids=required_versions)
220
# we don't need *all* of this data anymore, but we dont know
221
# what we do. This cache clearing will result in a new read
222
# of the knit data when we do the checkout, but probably we
223
# want to emit the needed data on the fly rather than at the
225
# the from weave should know not to cache data being joined,
226
# but its ok to ask it to clear.
227
from_weave.clear_cache()
228
to_weave.clear_cache()
230
def _fetch_inventory_weave(self, revs, pb):
231
pb.update("fetch inventory", 0, 2)
232
to_weave = self.to_repository.get_inventory_weave()
233
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
235
# just merge, this is optimisable and its means we don't
236
# copy unreferenced data such as not-needed inventories.
237
pb.update("fetch inventory", 1, 3)
238
from_weave = self.from_repository.get_inventory_weave()
239
pb.update("fetch inventory", 2, 3)
240
# we fetch only the referenced inventories because we do not
241
# know for unselected inventories whether all their required
242
# texts are present in the other repository - it could be
244
to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
246
from_weave.clear_cache()
250
def _generate_root_texts(self, revs):
251
"""This will be called by __fetch between fetching weave texts and
252
fetching the inventory weave.
254
Subclasses should override this if they need to generate root texts
255
after fetching weave texts.
161
def _fetch_weave_texts(self, revs):
162
texts_pb = bzrlib.ui.ui_factory.nested_progress_bar()
164
# fileids_altered_by_revision_ids requires reading the inventory
165
# weave, we will need to read the inventory weave again when
166
# all this is done, so enable caching for that specific weave
167
inv_w = self.from_repository.get_inventory_weave()
169
file_ids = self.from_repository.fileids_altered_by_revision_ids(revs)
171
num_file_ids = len(file_ids)
172
for file_id, required_versions in file_ids.items():
173
texts_pb.update("fetch texts", count, num_file_ids)
175
to_weave = self.to_weaves.get_weave_or_empty(file_id,
176
self.to_repository.get_transaction())
177
from_weave = self.from_weaves.get_weave(file_id,
178
self.from_repository.get_transaction())
179
# we fetch all the texts, because texts do
180
# not reference anything, and its cheap enough
181
to_weave.join(from_weave, version_ids=required_versions)
182
# we don't need *all* of this data anymore, but we dont know
183
# what we do. This cache clearing will result in a new read
184
# of the knit data when we do the checkout, but probably we
185
# want to emit the needed data on the fly rather than at the
187
# the from weave should know not to cache data being joined,
188
# but its ok to ask it to clear.
189
from_weave.clear_cache()
190
to_weave.clear_cache()
194
def _fetch_inventory_weave(self, revs):
195
pb = bzrlib.ui.ui_factory.nested_progress_bar()
197
pb.update("fetch inventory", 0, 2)
198
to_weave = self.to_control.get_weave('inventory',
199
self.to_repository.get_transaction())
201
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
203
# just merge, this is optimisable and its means we don't
204
# copy unreferenced data such as not-needed inventories.
205
pb.update("fetch inventory", 1, 3)
206
from_weave = self.from_repository.get_inventory_weave()
207
pb.update("fetch inventory", 2, 3)
208
# we fetch only the referenced inventories because we do not
209
# know for unselected inventories whether all their required
210
# texts are present in the other repository - it could be
212
to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
214
from_weave.clear_cache()
260
221
class GenericRepoFetcher(RepoFetcher):
264
225
It triggers a reconciliation after fetching to ensure integrity.
267
def _fetch_revision_texts(self, revs, pb):
228
def _fetch_revision_texts(self, revs):
268
229
"""Fetch revision object texts"""
269
to_txn = self.to_transaction = self.to_repository.get_transaction()
272
to_store = self.to_repository._revision_store
274
pb.update('copying revisions', count, total)
276
sig_text = self.from_repository.get_signature_text(rev)
277
to_store.add_revision_signature_text(rev, sig_text, to_txn)
278
except errors.NoSuchRevision:
281
to_store.add_revision(self.from_repository.get_revision(rev),
284
# fixup inventory if needed:
285
# this is expensive because we have no inverse index to current ghosts.
286
# but on local disk its a few seconds and sftp push is already insane.
288
# FIXME: repository should inform if this is needed.
289
self.to_repository.reconcile()
230
rev_pb = bzrlib.ui.ui_factory.nested_progress_bar()
232
to_txn = self.to_transaction = self.to_repository.get_transaction()
235
to_store = self.to_repository._revision_store
237
pb = bzrlib.ui.ui_factory.nested_progress_bar()
239
pb.update('copying revisions', count, total)
241
sig_text = self.from_repository.get_signature_text(rev)
242
to_store.add_revision_signature_text(rev, sig_text, to_txn)
243
except errors.NoSuchRevision:
246
to_store.add_revision(self.from_repository.get_revision(rev),
251
# fixup inventory if needed:
252
# this is expensive because we have no inverse index to current ghosts.
253
# but on local disk its a few seconds and sftp push is already insane.
255
# FIXME: repository should inform if this is needed.
256
self.to_repository.reconcile()
292
261
class KnitRepoFetcher(RepoFetcher):
382
350
class Model1toKnit2Fetcher(GenericRepoFetcher):
383
351
"""Fetch from a Model1 repository into a Knit2 repository
385
def __init__(self, to_repository, from_repository, last_revision=None,
386
pb=None, find_ghosts=True):
353
def __init__(self, to_repository, from_repository, last_revision=None,
387
355
self.helper = Inter1and2Helper(from_repository, to_repository)
388
356
GenericRepoFetcher.__init__(self, to_repository, from_repository,
389
last_revision, pb, find_ghosts)
391
def _generate_root_texts(self, revs):
359
def _fetch_weave_texts(self, revs):
360
GenericRepoFetcher._fetch_weave_texts(self, revs)
361
# Now generate a weave for the tree root
392
362
self.helper.generate_root_texts(revs)
394
def _fetch_inventory_weave(self, revs, pb):
364
def _fetch_inventory_weave(self, revs):
395
365
self.helper.regenerate_inventory(revs)
399
369
"""Fetch from a Knit1 repository into a Knit2 repository"""
401
371
def __init__(self, to_repository, from_repository, last_revision=None,
402
pb=None, find_ghosts=True):
403
373
self.helper = Inter1and2Helper(from_repository, to_repository)
404
374
KnitRepoFetcher.__init__(self, to_repository, from_repository,
405
last_revision, pb, find_ghosts)
407
def _generate_root_texts(self, revs):
377
def _fetch_weave_texts(self, revs):
378
KnitRepoFetcher._fetch_weave_texts(self, revs)
379
# Now generate a weave for the tree root
408
380
self.helper.generate_root_texts(revs)
410
def _fetch_inventory_weave(self, revs, pb):
382
def _fetch_inventory_weave(self, revs):
411
383
self.helper.regenerate_inventory(revs)
414
class RemoteToOtherFetcher(GenericRepoFetcher):
416
def _fetch_everything_for_search(self, search, pp):
417
data_stream = self.from_repository.get_data_stream_for_search(search)
418
self.to_repository.insert_data_stream(data_stream)
386
class Fetcher(object):
387
"""Backwards compatibility glue for branch.fetch()."""
389
@deprecated_method(zero_eight)
390
def __init__(self, to_branch, from_branch, last_revision=None, pb=None):
391
"""Please see branch.fetch()."""
392
to_branch.fetch(from_branch, last_revision, pb)