522
510
if revid and committers:
523
511
result['committers'] = 0
524
512
if revid and revid != _mod_revision.NULL_REVISION:
513
graph = self.get_graph()
526
515
all_committers = set()
527
revisions = self.get_ancestry(revid)
528
# pop the leading None
530
first_revision = None
516
revisions = [r for (r, p) in graph.iter_ancestry([revid])
517
if r != _mod_revision.NULL_REVISION]
531
519
if not committers:
532
520
# ignore the revisions in the middle - just grab first and last
533
521
revisions = revisions[0], revisions[-1]
534
522
for revision in self.get_revisions(revisions):
535
if not first_revision:
536
first_revision = revision
523
if not last_revision:
524
last_revision = revision
538
526
all_committers.add(revision.committer)
539
last_revision = revision
527
first_revision = revision
541
529
result['committers'] = len(all_committers)
542
530
result['firstrev'] = (first_revision.timestamp,
586
574
def search_missing_revision_ids(self, other,
587
575
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
588
find_ghosts=True, revision_ids=None, if_present_ids=None):
576
find_ghosts=True, revision_ids=None, if_present_ids=None,
589
578
"""Return the revision ids that other has that this does not.
591
580
These are returned in topological order.
928
917
raise NotImplementedError(self.add_signature_text)
930
def find_text_key_references(self):
931
"""Find the text key references within the repository.
933
:return: A dictionary mapping text keys ((fileid, revision_id) tuples)
934
to whether they were referred to by the tree of the
935
revision_id that they contain.
937
raise NotImplementedError(self.find_text_key_references)
939
919
def _find_parent_ids_of_revisions(self, revision_ids):
940
920
"""Find all parent ids that are mentioned in the revision graph.
1204
1193
plaintext = testament.as_short_text()
1205
1194
self.store_revision_signature(gpg_strategy, plaintext, revision_id)
1197
def verify_revision(self, revision_id, gpg_strategy):
1198
"""Verify the signature on a revision.
1200
:param revision_id: the revision to verify
1201
:gpg_strategy: the GPGStrategy object to used
1203
:return: gpg.SIGNATURE_VALID or a failed SIGNATURE_ value
1205
if not self.has_signature_for_revision_id(revision_id):
1206
return gpg.SIGNATURE_NOT_SIGNED, None
1207
signature = self.get_signature_text(revision_id)
1209
testament = _mod_testament.Testament.from_revision(self, revision_id)
1210
plaintext = testament.as_short_text()
1212
return gpg_strategy.verify(signature, plaintext)
1207
1214
def has_signature_for_revision_id(self, revision_id):
1208
1215
"""Query for a revision signature for revision_id in the repository."""
1209
1216
raise NotImplementedError(self.has_signature_for_revision_id)
1226
1232
:param check_repo: If False do not check the repository contents, just
1227
1233
calculate the data callback_refs requires and call them back.
1229
return self._check(revision_ids, callback_refs=callback_refs,
1235
return self._check(revision_ids=revision_ids, callback_refs=callback_refs,
1230
1236
check_repo=check_repo)
1232
def _check(self, revision_ids, callback_refs, check_repo):
1233
result = check.Check(self, check_repo=check_repo)
1234
result.check(callback_refs)
1238
def _check(self, revision_ids=None, callback_refs=None, check_repo=True):
1239
raise NotImplementedError(self.check)
1237
1241
def _warn_if_deprecated(self, branch=None):
1238
1242
if not self._format.is_deprecated():
1716
1724
content is copied.
1719
ui.ui_factory.warn_experimental_format_fetch(self)
1720
from bzrlib.fetch import RepoFetcher
1721
# See <https://launchpad.net/bugs/456077> asking for a warning here
1722
if self.source._format.network_name() != self.target._format.network_name():
1723
ui.ui_factory.show_user_warning('cross_format_fetch',
1724
from_format=self.source._format,
1725
to_format=self.target._format)
1726
f = RepoFetcher(to_repository=self.target,
1727
from_repository=self.source,
1728
last_revision=revision_id,
1729
fetch_spec=fetch_spec,
1730
find_ghosts=find_ghosts)
1732
def _walk_to_common_revisions(self, revision_ids, if_present_ids=None):
1733
"""Walk out from revision_ids in source to revisions target has.
1735
:param revision_ids: The start point for the search.
1736
:return: A set of revision ids.
1738
target_graph = self.target.get_graph()
1739
revision_ids = frozenset(revision_ids)
1741
all_wanted_revs = revision_ids.union(if_present_ids)
1743
all_wanted_revs = revision_ids
1744
missing_revs = set()
1745
source_graph = self.source.get_graph()
1746
# ensure we don't pay silly lookup costs.
1747
searcher = source_graph._make_breadth_first_searcher(all_wanted_revs)
1748
null_set = frozenset([_mod_revision.NULL_REVISION])
1749
searcher_exhausted = False
1753
# Iterate the searcher until we have enough next_revs
1754
while len(next_revs) < self._walk_to_common_revisions_batch_size:
1756
next_revs_part, ghosts_part = searcher.next_with_ghosts()
1757
next_revs.update(next_revs_part)
1758
ghosts.update(ghosts_part)
1759
except StopIteration:
1760
searcher_exhausted = True
1762
# If there are ghosts in the source graph, and the caller asked for
1763
# them, make sure that they are present in the target.
1764
# We don't care about other ghosts as we can't fetch them and
1765
# haven't been asked to.
1766
ghosts_to_check = set(revision_ids.intersection(ghosts))
1767
revs_to_get = set(next_revs).union(ghosts_to_check)
1769
have_revs = set(target_graph.get_parent_map(revs_to_get))
1770
# we always have NULL_REVISION present.
1771
have_revs = have_revs.union(null_set)
1772
# Check if the target is missing any ghosts we need.
1773
ghosts_to_check.difference_update(have_revs)
1775
# One of the caller's revision_ids is a ghost in both the
1776
# source and the target.
1777
raise errors.NoSuchRevision(
1778
self.source, ghosts_to_check.pop())
1779
missing_revs.update(next_revs - have_revs)
1780
# Because we may have walked past the original stop point, make
1781
# sure everything is stopped
1782
stop_revs = searcher.find_seen_ancestors(have_revs)
1783
searcher.stop_searching_any(stop_revs)
1784
if searcher_exhausted:
1786
return searcher.get_result()
1727
raise NotImplementedError(self.fetch)
1788
1729
@needs_read_lock
1789
1730
def search_missing_revision_ids(self,
1790
1731
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
1791
find_ghosts=True, revision_ids=None, if_present_ids=None):
1732
find_ghosts=True, revision_ids=None, if_present_ids=None,
1792
1734
"""Return the revision ids that source has that target does not.
1794
1736
:param revision_id: only return revision ids included by this
1802
1744
to fetch for tags, which may reference absent revisions.
1803
1745
:param find_ghosts: If True find missing revisions in deep history
1804
1746
rather than just finding the surface difference.
1747
:param limit: Maximum number of revisions to return, topologically
1805
1749
:return: A bzrlib.graph.SearchResult.
1807
if symbol_versioning.deprecated_passed(revision_id):
1808
symbol_versioning.warn(
1809
'search_missing_revision_ids(revision_id=...) was '
1810
'deprecated in 2.4. Use revision_ids=[...] instead.',
1811
DeprecationWarning, stacklevel=2)
1812
if revision_ids is not None:
1813
raise AssertionError(
1814
'revision_ids is mutually exclusive with revision_id')
1815
if revision_id is not None:
1816
revision_ids = [revision_id]
1818
# stop searching at found target revisions.
1819
if not find_ghosts and (revision_ids is not None or if_present_ids is
1821
return self._walk_to_common_revisions(revision_ids,
1822
if_present_ids=if_present_ids)
1823
# generic, possibly worst case, slow code path.
1824
target_ids = set(self.target.all_revision_ids())
1825
source_ids = self._present_source_revisions_for(
1826
revision_ids, if_present_ids)
1827
result_set = set(source_ids).difference(target_ids)
1828
return self.source.revision_ids_to_search_result(result_set)
1830
def _present_source_revisions_for(self, revision_ids, if_present_ids=None):
1831
"""Returns set of all revisions in ancestry of revision_ids present in
1834
:param revision_ids: if None, all revisions in source are returned.
1835
:param if_present_ids: like revision_ids, but if any/all of these are
1836
absent no error is raised.
1838
if revision_ids is not None or if_present_ids is not None:
1839
# First, ensure all specified revisions exist. Callers expect
1840
# NoSuchRevision when they pass absent revision_ids here.
1841
if revision_ids is None:
1842
revision_ids = set()
1843
if if_present_ids is None:
1844
if_present_ids = set()
1845
revision_ids = set(revision_ids)
1846
if_present_ids = set(if_present_ids)
1847
all_wanted_ids = revision_ids.union(if_present_ids)
1848
graph = self.source.get_graph()
1849
present_revs = set(graph.get_parent_map(all_wanted_ids))
1850
missing = revision_ids.difference(present_revs)
1852
raise errors.NoSuchRevision(self.source, missing.pop())
1853
found_ids = all_wanted_ids.intersection(present_revs)
1854
source_ids = [rev_id for (rev_id, parents) in
1855
graph.iter_ancestry(found_ids)
1856
if rev_id != _mod_revision.NULL_REVISION
1857
and parents is not None]
1859
source_ids = self.source.all_revision_ids()
1860
return set(source_ids)
1751
raise NotImplementedError(self.search_missing_revision_ids)
1863
1754
def _same_model(source, target):
1922
1801
# trigger an assertion if not such
1923
1802
repo._format.get_format_string()
1924
1803
self.repo_dir = repo.bzrdir
1925
pb.update('Moving repository to repository.backup')
1804
pb.update(gettext('Moving repository to repository.backup'))
1926
1805
self.repo_dir.transport.move('repository', 'repository.backup')
1927
1806
backup_transport = self.repo_dir.transport.clone('repository.backup')
1928
1807
repo._format.check_conversion_target(self.target_format)
1929
1808
self.source_repo = repo._format.open(self.repo_dir,
1931
1810
_override_transport=backup_transport)
1932
pb.update('Creating new repository')
1811
pb.update(gettext('Creating new repository'))
1933
1812
converted = self.target_format.initialize(self.repo_dir,
1934
1813
self.source_repo.is_shared())
1935
1814
converted.lock_write()
1937
pb.update('Copying content')
1816
pb.update(gettext('Copying content'))
1938
1817
self.source_repo.copy_content_into(converted)
1940
1819
converted.unlock()
1941
pb.update('Deleting old repository content')
1820
pb.update(gettext('Deleting old repository content'))
1942
1821
self.repo_dir.transport.delete_tree('repository.backup')
1943
ui.ui_factory.note('repository converted')
1822
ui.ui_factory.note(gettext('repository converted'))