13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
from bzrlib.lazy_import import lazy_import
18
18
lazy_import(globals(), """
57
57
from bzrlib import registry
58
58
from bzrlib.symbol_versioning import (
64
61
from bzrlib.trace import (
65
62
log_exception_quietly, note, mutter, mutter_callsite, warning)
1348
1345
return InterRepository.get(other, self).search_missing_revision_ids(
1349
1346
revision_id, find_ghosts)
1351
@deprecated_method(one_two)
1353
def missing_revision_ids(self, other, revision_id=None, find_ghosts=True):
1354
"""Return the revision ids that other has that this does not.
1356
These are returned in topological order.
1358
revision_id: only return revision ids included by revision_id.
1360
keys = self.search_missing_revision_ids(
1361
other, revision_id, find_ghosts).get_keys()
1364
parents = other.get_graph().get_parent_map(keys)
1367
return tsort.topo_sort(parents)
1370
1349
def open(base):
1371
1350
"""Open the repository rooted at base.
1916
1895
for record in self.texts.get_record_stream(text_keys, 'unordered', True):
1917
1896
if record.storage_kind == 'absent':
1918
1897
raise errors.RevisionNotPresent(record.key, self)
1919
yield text_keys[record.key], record.get_bytes_as('fulltext')
1898
yield text_keys[record.key], record.get_bytes_as('chunked')
1921
1900
def _generate_text_key_index(self, text_key_references=None,
1922
1901
ancestors=None):
2083
2062
inventories in memory, but will only parse a single inventory at a
2065
:param revision_ids: The expected revision ids of the inventories.
2086
2066
:return: An iterator of inventories.
2088
2068
if ((None in revision_ids)
2294
2274
implicitly lock for the user.
2298
@deprecated_method(one_six)
2299
def print_file(self, file, revision_id):
2300
"""Print `file` to stdout.
2302
FIXME RBC 20060125 as John Meinel points out this is a bad api
2303
- it writes to stdout, it assumes that that is valid etc. Fix
2304
by creating a new more flexible convenience function.
2306
tree = self.revision_tree(revision_id)
2307
# use inventory as it was in that revision
2308
file_id = tree.inventory.path2id(file)
2310
# TODO: jam 20060427 Write a test for this code path
2311
# it had a bug in it, and was raising the wrong
2313
raise errors.BzrError("%r is not present in revision %s" % (file, revision_id))
2314
tree.print_file(file_id)
2316
2277
def get_transaction(self):
2317
2278
return self.control_files.get_transaction()
2319
@deprecated_method(one_one)
2320
def get_parents(self, revision_ids):
2321
"""See StackedParentsProvider.get_parents"""
2322
parent_map = self.get_parent_map(revision_ids)
2323
return [parent_map.get(r, None) for r in revision_ids]
2325
2280
def get_parent_map(self, revision_ids):
2326
2281
"""See graph._StackedParentsProvider.get_parent_map"""
2327
2282
# revisions index works in keys; this just works in revisions
3089
3044
return searcher.get_result()
3091
@deprecated_method(one_two)
3093
def missing_revision_ids(self, revision_id=None, find_ghosts=True):
3094
"""Return the revision ids that source has that target does not.
3096
These are returned in topological order.
3098
:param revision_id: only return revision ids included by this
3100
:param find_ghosts: If True find missing revisions in deep history
3101
rather than just finding the surface difference.
3103
return list(self.search_missing_revision_ids(
3104
revision_id, find_ghosts).get_keys())
3106
3046
@needs_read_lock
3107
3047
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3108
3048
"""Return the revision ids that source has that target does not.
3852
3792
def _locked_insert_stream(self, stream, src_format):
3853
3793
to_serializer = self.target_repo._format._serializer
3854
3794
src_serializer = src_format._serializer
3795
if to_serializer == src_serializer:
3796
# If serializers match and the target is a pack repository, set the
3797
# write cache size on the new pack. This avoids poor performance
3798
# on transports where append is unbuffered (such as
3799
# RemoteTransport). This is safe to do because nothing should read
3800
# back from the target repository while a stream with matching
3801
# serialization is being inserted.
3802
# The exception is that a delta record from the source that should
3803
# be a fulltext may need to be expanded by the target (see
3804
# test_fetch_revisions_with_deltas_into_pack); but we take care to
3805
# explicitly flush any buffered writes first in that rare case.
3807
new_pack = self.target_repo._pack_collection._new_pack
3808
except AttributeError:
3809
# Not a pack repository
3812
new_pack.set_write_cache_size(1024*1024)
3855
3813
for substream_type, substream in stream:
3856
3814
if substream_type == 'texts':
3857
3815
self.target_repo.texts.insert_record_stream(substream)