162
162
"""Tell the builder that the inventory is finished."""
163
163
if self.new_inventory.root is None:
164
164
raise AssertionError('Root entry should be supplied to'
165
' record_entry_contents, as of bzr 0.10.',
166
DeprecationWarning, stacklevel=2)
165
' record_entry_contents, as of bzr 0.10.')
167
166
self.new_inventory.add(InventoryDirectory(ROOT_ID, '', None))
168
167
self.new_inventory.revision_id = self._new_revision_id
169
168
self.inv_sha1 = self.repository.add_inventory(
526
def add_fallback_repository(self, repository):
527
"""Add a repository to use for looking up data not held locally.
529
:param repository: A repository.
531
if not self._format.supports_external_lookups:
532
raise errors.UnstackableRepositoryFormat(self._format, self.base)
533
if not self._add_fallback_repository_check(repository):
534
raise errors.IncompatibleRepositories(self, repository)
535
self._fallback_repositories.append(repository)
536
self.texts.add_fallback_versioned_files(repository.texts)
537
self.inventories.add_fallback_versioned_files(repository.inventories)
538
self.revisions.add_fallback_versioned_files(repository.revisions)
539
self.signatures.add_fallback_versioned_files(repository.signatures)
541
def _add_fallback_repository_check(self, repository):
542
"""Check that this repository can fallback to repository safely.
544
:param repository: A repository to fallback to.
545
:return: True if the repositories can stack ok.
547
return InterRepository._same_model(self, repository)
527
549
def add_inventory(self, revision_id, inv, parents):
528
550
"""Add the inventory inv to the repository as revision_id.
597
619
def all_revision_ids(self):
598
620
"""Returns a list of all the revision ids in the repository.
600
This is deprecated because code should generally work on the graph
601
reachable from a particular revision, and ignore any other revisions
602
that might be present. There is no direct replacement method.
622
This is conceptually deprecated because code should generally work on
623
the graph reachable from a particular revision, and ignore any other
624
revisions that might be present. There is no direct replacement
604
627
if 'evil' in debug.debug_flags:
605
628
mutter_callsite(2, "all_revision_ids is linear with history.")
665
688
# on whether escaping is required.
666
689
self._warn_if_deprecated()
667
690
self._write_group = None
691
# Additional places to query for data.
692
self._fallback_repositories = []
669
694
def __repr__(self):
670
695
return '%s(%r)' % (self.__class__.__name__,
718
743
XXX: this docstring is duplicated in many places, e.g. lockable_files.py
720
745
result = self.control_files.lock_write(token=token)
746
for repo in self._fallback_repositories:
747
# Writes don't affect fallback repos
721
749
self._refresh_data()
724
752
def lock_read(self):
725
753
self.control_files.lock_read()
754
for repo in self._fallback_repositories:
726
756
self._refresh_data()
728
758
def get_physical_lock_status(self):
962
992
raise errors.BzrError(
963
993
'Must end write groups before releasing write locks.')
964
994
self.control_files.unlock()
995
for repo in self._fallback_repositories:
967
999
def clone(self, a_bzrdir, revision_id=None):
1696
1728
parent_map = self.get_parent_map(revision_ids)
1697
1729
return [parent_map.get(r, None) for r in revision_ids]
1699
def get_parent_map(self, keys):
1731
def get_parent_map(self, revision_ids):
1700
1732
"""See graph._StackedParentsProvider.get_parent_map"""
1702
for revision_id in keys:
1703
if revision_id is None:
1733
# revisions index works in keys; this just works in revisions
1734
# therefore wrap and unwrap
1737
for revision_id in revision_ids:
1738
if revision_id == _mod_revision.NULL_REVISION:
1739
result[revision_id] = ()
1740
elif revision_id is None:
1704
1741
raise ValueError('get_parent_map(None) is not valid')
1705
if revision_id == _mod_revision.NULL_REVISION:
1706
parent_map[revision_id] = ()
1709
parent_id_list = self.get_revision(revision_id).parent_ids
1710
except errors.NoSuchRevision:
1713
if len(parent_id_list) == 0:
1714
parent_ids = (_mod_revision.NULL_REVISION,)
1716
parent_ids = tuple(parent_id_list)
1717
parent_map[revision_id] = parent_ids
1743
query_keys.append((revision_id ,))
1744
for ((revision_id,), parent_keys) in \
1745
self.revisions.get_parent_map(query_keys).iteritems():
1747
result[revision_id] = tuple(parent_revid
1748
for (parent_revid,) in parent_keys)
1750
result[revision_id] = (_mod_revision.NULL_REVISION,)
1720
1753
def _make_parents_provider(self):
2250
2283
'bzrlib.repofmt.pack_repo',
2251
2284
'RepositoryFormatKnitPack4',
2286
format_registry.register_lazy(
2287
'Bazaar RepositoryFormatKnitPack5 (bzr 1.6)\n',
2288
'bzrlib.repofmt.pack_repo',
2289
'RepositoryFormatKnitPack5',
2291
format_registry.register_lazy(
2292
'Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6)\n',
2293
'bzrlib.repofmt.pack_repo',
2294
'RepositoryFormatKnitPack5RichRoot',
2253
2297
# Development formats.
2255
2299
# development 0 - stub to introduce development versioning scheme.
2264
2308
'bzrlib.repofmt.pack_repo',
2265
2309
'RepositoryFormatPackDevelopment0Subtree',
2311
format_registry.register_lazy(
2312
"Bazaar development format 1 (needs bzr.dev from before 1.6)\n",
2313
'bzrlib.repofmt.pack_repo',
2314
'RepositoryFormatPackDevelopment1',
2316
format_registry.register_lazy(
2317
("Bazaar development format 1 with subtree support "
2318
"(needs bzr.dev from before 1.6)\n"),
2319
'bzrlib.repofmt.pack_repo',
2320
'RepositoryFormatPackDevelopment1Subtree',
2267
2322
# 1.3->1.4 go below here
2664
2719
@needs_write_lock
2665
2720
def fetch(self, revision_id=None, pb=None, find_ghosts=False):
2666
2721
"""See InterRepository.fetch()."""
2722
if len(self.source._fallback_repositories) > 0:
2723
from bzrlib.fetch import KnitRepoFetcher
2724
fetcher = KnitRepoFetcher(self.target, self.source, revision_id,
2726
return fetcher.count_copied, fetcher.failed_revisions
2667
2727
from bzrlib.repofmt.pack_repo import Packer
2668
2728
mutter("Using fetch logic to copy between %s(%s) and %s(%s)",
2669
2729
self.source, self.source._format, self.target, self.target._format)
2674
2734
# to fetch from all packs to one without
2675
2735
# inventory parsing etc, IFF nothing to be copied is in the target.
2677
revision_ids = self.source.all_revision_ids()
2737
source_revision_ids = frozenset(self.source.all_revision_ids())
2738
revision_ids = source_revision_ids - \
2739
frozenset(self.target.get_parent_map(source_revision_ids))
2678
2740
revision_keys = [(revid,) for revid in revision_ids]
2679
2741
index = self.target._pack_collection.revision_index.combined_index
2680
2742
present_revision_ids = set(item[1][0] for item in
2721
2783
if not find_ghosts and revision_id is not None:
2722
2784
return self._walk_to_common_revisions([revision_id])
2723
2785
elif revision_id is not None:
2724
source_ids = self.source.get_ancestry(revision_id)
2725
if source_ids[0] is not None:
2726
raise AssertionError()
2786
# Find ghosts: search for revisions pointing from one repository to
2787
# the other, and vice versa, anywhere in the history of revision_id.
2788
graph = self.target.get_graph(other_repository=self.source)
2789
searcher = graph._make_breadth_first_searcher([revision_id])
2793
next_revs, ghosts = searcher.next_with_ghosts()
2794
except StopIteration:
2796
if revision_id in ghosts:
2797
raise errors.NoSuchRevision(self.source, revision_id)
2798
found_ids.update(next_revs)
2799
found_ids.update(ghosts)
2800
found_ids = frozenset(found_ids)
2801
# Double query here: should be able to avoid this by changing the
2802
# graph api further.
2803
result_set = found_ids - frozenset(
2804
self.target.get_parent_map(found_ids))
2729
2806
source_ids = self.source.all_revision_ids()
2730
# source_ids is the worst possible case we may need to pull.
2731
# now we want to filter source_ids against what we actually
2732
# have in target, but don't try to check for existence where we know
2733
# we do not have a revision as that would be pointless.
2734
target_ids = set(self.target.all_revision_ids())
2735
result_set = set(source_ids).difference(target_ids)
2807
# source_ids is the worst possible case we may need to pull.
2808
# now we want to filter source_ids against what we actually
2809
# have in target, but don't try to check for existence where we know
2810
# we do not have a revision as that would be pointless.
2811
target_ids = set(self.target.all_revision_ids())
2812
result_set = set(source_ids).difference(target_ids)
2736
2813
return self.source.revision_ids_to_search_result(result_set)
2989
class InterRemoteToOther(InterRepository):
2991
def __init__(self, source, target):
2992
InterRepository.__init__(self, source, target)
2993
self._real_inter = None
2996
def is_compatible(source, target):
2997
if not isinstance(source, remote.RemoteRepository):
2999
# Is source's model compatible with target's model?
3000
source._ensure_real()
3001
real_source = source._real_repository
3002
if isinstance(real_source, remote.RemoteRepository):
3003
raise NotImplementedError(
3004
"We don't support remote repos backed by remote repos yet.")
3005
return InterRepository._same_model(real_source, target)
3007
def _ensure_real_inter(self):
3008
if self._real_inter is None:
3009
self.source._ensure_real()
3010
real_source = self.source._real_repository
3011
self._real_inter = InterRepository.get(real_source, self.target)
3013
def fetch(self, revision_id=None, pb=None, find_ghosts=False):
3014
self._ensure_real_inter()
3015
return self._real_inter.fetch(revision_id=revision_id, pb=pb,
3016
find_ghosts=find_ghosts)
3018
def copy_content(self, revision_id=None):
3019
self._ensure_real_inter()
3020
self._real_inter.copy_content(revision_id=revision_id)
3023
def _get_repo_format_to_test(self):
2912
3028
InterRepository.register_optimiser(InterDifferingSerializer)
2913
3029
InterRepository.register_optimiser(InterSameDataRepository)
2914
3030
InterRepository.register_optimiser(InterWeaveRepo)
2917
3033
InterRepository.register_optimiser(InterKnit1and2)
2918
3034
InterRepository.register_optimiser(InterPackRepo)
2919
3035
InterRepository.register_optimiser(InterOtherToRemote)
3036
InterRepository.register_optimiser(InterRemoteToOther)
2922
3039
class CopyConverter(object):