~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/smart/repository.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2010-09-01 08:02:42 UTC
  • mfrom: (5390.3.3 faster-revert-593560)
  • Revision ID: pqm@pqm.ubuntu.com-20100901080242-esg62ody4frwmy66
(spiv) Avoid repeatedly calling self.target.all_file_ids() in
 InterTree.iter_changes. (Andrew Bennetts)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
20
20
import os
21
21
import Queue
22
22
import sys
23
 
import tarfile
24
23
import tempfile
25
24
import threading
26
25
 
30
29
    graph,
31
30
    osutils,
32
31
    pack,
 
32
    ui,
33
33
    versionedfile,
34
34
    )
35
35
from bzrlib.bzrdir import BzrDir
39
39
    SuccessfulSmartServerResponse,
40
40
    )
41
41
from bzrlib.repository import _strip_NULL_ghosts, network_format_registry
 
42
from bzrlib.recordcounter import RecordCounter
42
43
from bzrlib import revision as _mod_revision
43
44
from bzrlib.versionedfile import (
44
45
    NetworkRecordStream,
392
393
        if token == '':
393
394
            token = None
394
395
        try:
395
 
            token = repository.lock_write(token=token)
 
396
            token = repository.lock_write(token=token).repository_token
396
397
        except errors.LockContention, e:
397
398
            return FailedSmartServerResponse(('LockContention',))
398
399
        except errors.UnlockableTransport:
544
545
    :ivar first_bytes: The first bytes to give the next NetworkRecordStream.
545
546
    """
546
547
 
547
 
    def __init__(self, byte_stream):
 
548
    def __init__(self, byte_stream, record_counter):
548
549
        """Create a _ByteStreamDecoder."""
549
550
        self.stream_decoder = pack.ContainerPushParser()
550
551
        self.current_type = None
551
552
        self.first_bytes = None
552
553
        self.byte_stream = byte_stream
 
554
        self._record_counter = record_counter
 
555
        self.key_count = 0
553
556
 
554
557
    def iter_stream_decoder(self):
555
558
        """Iterate the contents of the pack from stream_decoder."""
580
583
 
581
584
    def record_stream(self):
582
585
        """Yield substream_type, substream from the byte stream."""
 
586
        def wrap_and_count(pb, rc, substream):
 
587
            """Yield records from stream while showing progress."""
 
588
            counter = 0
 
589
            if rc:
 
590
                if self.current_type != 'revisions' and self.key_count != 0:
 
591
                    # As we know the number of revisions now (in self.key_count)
 
592
                    # we can setup and use record_counter (rc).
 
593
                    if not rc.is_initialized():
 
594
                        rc.setup(self.key_count, self.key_count)
 
595
            for record in substream.read():
 
596
                if rc:
 
597
                    if rc.is_initialized() and counter == rc.STEP:
 
598
                        rc.increment(counter)
 
599
                        pb.update('Estimate', rc.current, rc.max)
 
600
                        counter = 0
 
601
                    if self.current_type == 'revisions':
 
602
                        # Total records is proportional to number of revs
 
603
                        # to fetch. With remote, we used self.key_count to
 
604
                        # track the number of revs. Once we have the revs
 
605
                        # counts in self.key_count, the progress bar changes
 
606
                        # from 'Estimating..' to 'Estimate' above.
 
607
                        self.key_count += 1
 
608
                        if counter == rc.STEP:
 
609
                            pb.update('Estimating..', self.key_count)
 
610
                            counter = 0
 
611
                counter += 1
 
612
                yield record
 
613
 
583
614
        self.seed_state()
 
615
        pb = ui.ui_factory.nested_progress_bar()
 
616
        rc = self._record_counter
584
617
        # Make and consume sub generators, one per substream type:
585
618
        while self.first_bytes is not None:
586
619
            substream = NetworkRecordStream(self.iter_substream_bytes())
587
620
            # after substream is fully consumed, self.current_type is set to
588
621
            # the next type, and self.first_bytes is set to the matching bytes.
589
 
            yield self.current_type, substream.read()
 
622
            yield self.current_type, wrap_and_count(pb, rc, substream)
 
623
        if rc:
 
624
            pb.update('Done', rc.max, rc.max)
 
625
        pb.finished()
590
626
 
591
627
    def seed_state(self):
592
628
        """Prepare the _ByteStreamDecoder to decode from the pack stream."""
597
633
        list(self.iter_substream_bytes())
598
634
 
599
635
 
600
 
def _byte_stream_to_stream(byte_stream):
 
636
def _byte_stream_to_stream(byte_stream, record_counter=None):
601
637
    """Convert a byte stream into a format and a stream.
602
638
 
603
639
    :param byte_stream: A bytes iterator, as output by _stream_to_byte_stream.
604
640
    :return: (RepositoryFormat, stream_generator)
605
641
    """
606
 
    decoder = _ByteStreamDecoder(byte_stream)
 
642
    decoder = _ByteStreamDecoder(byte_stream, record_counter)
607
643
    for bytes in byte_stream:
608
644
        decoder.stream_decoder.accept_bytes(bytes)
609
645
        for record in decoder.stream_decoder.read_pending_records(max=1):
675
711
            temp.close()
676
712
 
677
713
    def _tarball_of_dir(self, dirname, compression, ofile):
 
714
        import tarfile
678
715
        filename = os.path.basename(ofile.name)
679
716
        tarball = tarfile.open(fileobj=ofile, name=filename,
680
717
            mode='w|' + compression)