1
# Copyright (C) 2006-2010 Canonical Ltd
1
# Copyright (C) 2006, 2007, 2010 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
39
40
SuccessfulSmartServerResponse,
41
42
from bzrlib.repository import _strip_NULL_ghosts, network_format_registry
42
from bzrlib.recordcounter import RecordCounter
43
43
from bzrlib import revision as _mod_revision
44
44
from bzrlib.versionedfile import (
45
45
NetworkRecordStream,
396
token = repository.lock_write(token=token).repository_token
396
token = repository.lock_write(token=token)
397
397
except errors.LockContention, e:
398
398
return FailedSmartServerResponse(('LockContention',))
399
399
except errors.UnlockableTransport:
503
503
yield pack_writer.begin()
504
504
yield pack_writer.bytes_record(src_format.network_name(), '')
505
505
for substream_type, substream in stream:
506
if substream_type == 'inventory-deltas':
507
# This doesn't feel like the ideal place to issue this warning;
508
# however we don't want to do it in the Repository that's
509
# generating the stream, because that might be on the server.
510
# Instead we try to observe it as the stream goes by.
511
ui.ui_factory.warn_cross_format_fetch(src_format,
506
513
for record in substream:
507
514
if record.storage_kind in ('chunked', 'fulltext'):
508
515
serialised = record_to_fulltext_bytes(record)
545
552
:ivar first_bytes: The first bytes to give the next NetworkRecordStream.
548
def __init__(self, byte_stream, record_counter):
555
def __init__(self, byte_stream):
549
556
"""Create a _ByteStreamDecoder."""
550
557
self.stream_decoder = pack.ContainerPushParser()
551
558
self.current_type = None
552
559
self.first_bytes = None
553
560
self.byte_stream = byte_stream
554
self._record_counter = record_counter
557
562
def iter_stream_decoder(self):
558
563
"""Iterate the contents of the pack from stream_decoder."""
584
589
def record_stream(self):
585
590
"""Yield substream_type, substream from the byte stream."""
586
def wrap_and_count(pb, rc, substream):
587
"""Yield records from stream while showing progress."""
590
if self.current_type != 'revisions' and self.key_count != 0:
591
# As we know the number of revisions now (in self.key_count)
592
# we can setup and use record_counter (rc).
593
if not rc.is_initialized():
594
rc.setup(self.key_count, self.key_count)
595
for record in substream.read():
597
if rc.is_initialized() and counter == rc.STEP:
598
rc.increment(counter)
599
pb.update('Estimate', rc.current, rc.max)
601
if self.current_type == 'revisions':
602
# Total records is proportional to number of revs
603
# to fetch. With remote, we used self.key_count to
604
# track the number of revs. Once we have the revs
605
# counts in self.key_count, the progress bar changes
606
# from 'Estimating..' to 'Estimate' above.
608
if counter == rc.STEP:
609
pb.update('Estimating..', self.key_count)
614
591
self.seed_state()
615
pb = ui.ui_factory.nested_progress_bar()
616
rc = self._record_counter
617
592
# Make and consume sub generators, one per substream type:
618
593
while self.first_bytes is not None:
619
594
substream = NetworkRecordStream(self.iter_substream_bytes())
620
595
# after substream is fully consumed, self.current_type is set to
621
596
# the next type, and self.first_bytes is set to the matching bytes.
622
yield self.current_type, wrap_and_count(pb, rc, substream)
624
pb.update('Done', rc.max, rc.max)
597
yield self.current_type, substream.read()
627
599
def seed_state(self):
628
600
"""Prepare the _ByteStreamDecoder to decode from the pack stream."""
633
605
list(self.iter_substream_bytes())
636
def _byte_stream_to_stream(byte_stream, record_counter=None):
608
def _byte_stream_to_stream(byte_stream):
637
609
"""Convert a byte stream into a format and a stream.
639
611
:param byte_stream: A bytes iterator, as output by _stream_to_byte_stream.
640
612
:return: (RepositoryFormat, stream_generator)
642
decoder = _ByteStreamDecoder(byte_stream, record_counter)
614
decoder = _ByteStreamDecoder(byte_stream)
643
615
for bytes in byte_stream:
644
616
decoder.stream_decoder.accept_bytes(bytes)
645
617
for record in decoder.stream_decoder.read_pending_records(max=1):
713
685
def _tarball_of_dir(self, dirname, compression, ofile):
715
686
filename = os.path.basename(ofile.name)
716
687
tarball = tarfile.open(fileobj=ofile, name=filename,
717
688
mode='w|' + compression)