~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: Andrew Bennetts
  • Date: 2011-02-25 08:45:27 UTC
  • mto: This revision was merged to the branch mainline in revision 5695.
  • Revision ID: andrew.bennetts@canonical.com-20110225084527-0ucp7p00d00hoqon
Add another test.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
54
54
 
55
55
from cStringIO import StringIO
56
56
from itertools import izip
 
57
import gzip
57
58
import operator
58
59
import os
59
60
import sys
68
69
    index as _mod_index,
69
70
    lru_cache,
70
71
    pack,
 
72
    patiencediff,
71
73
    progress,
72
74
    static_tuple,
73
75
    trace,
79
81
from bzrlib import (
80
82
    errors,
81
83
    osutils,
82
 
    patiencediff,
83
84
    )
84
85
from bzrlib.errors import (
85
86
    FileExists,
882
883
            self._factory = KnitAnnotateFactory()
883
884
        else:
884
885
            self._factory = KnitPlainFactory()
885
 
        self._fallback_vfs = []
 
886
        self._immediate_fallback_vfs = []
886
887
        self._reload_func = reload_func
887
888
 
888
889
    def __repr__(self):
896
897
 
897
898
        :param a_versioned_files: A VersionedFiles object.
898
899
        """
899
 
        self._fallback_vfs.append(a_versioned_files)
 
900
        self._immediate_fallback_vfs.append(a_versioned_files)
900
901
 
901
902
    def add_lines(self, key, parents, lines, parent_texts=None,
902
903
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1069
1070
                    raise errors.KnitCorrupt(self,
1070
1071
                        "Missing basis parent %s for %s" % (
1071
1072
                        compression_parent, key))
1072
 
        for fallback_vfs in self._fallback_vfs:
 
1073
        for fallback_vfs in self._immediate_fallback_vfs:
1073
1074
            fallback_vfs.check()
1074
1075
 
1075
1076
    def _check_add(self, key, lines, random_id, check_content):
1195
1196
    def get_known_graph_ancestry(self, keys):
1196
1197
        """Get a KnownGraph instance with the ancestry of keys."""
1197
1198
        parent_map, missing_keys = self._index.find_ancestry(keys)
1198
 
        for fallback in self._fallback_vfs:
 
1199
        for fallback in self._transitive_fallbacks():
1199
1200
            if not missing_keys:
1200
1201
                break
1201
1202
            (f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1225
1226
            and so on.
1226
1227
        """
1227
1228
        result = {}
1228
 
        sources = [self._index] + self._fallback_vfs
 
1229
        sources = [self._index] + self._immediate_fallback_vfs
1229
1230
        source_results = []
1230
1231
        missing = set(keys)
1231
1232
        for source in sources:
1525
1526
                        yield KnitContentFactory(key, global_map[key],
1526
1527
                            record_details, None, raw_data, self._factory.annotated, None)
1527
1528
                else:
1528
 
                    vf = self._fallback_vfs[parent_maps.index(source) - 1]
 
1529
                    vf = self._immediate_fallback_vfs[parent_maps.index(source) - 1]
1529
1530
                    for record in vf.get_record_stream(keys, ordering,
1530
1531
                        include_delta_closure):
1531
1532
                        yield record
1541
1542
            # record entry 2 is the 'digest'.
1542
1543
            result[key] = details[2]
1543
1544
        missing.difference_update(set(result))
1544
 
        for source in self._fallback_vfs:
 
1545
        for source in self._immediate_fallback_vfs:
1545
1546
            if not missing:
1546
1547
                break
1547
1548
            new_result = source.get_sha1s(missing)
1618
1619
                raise RevisionNotPresent([record.key], self)
1619
1620
            elif ((record.storage_kind in knit_types)
1620
1621
                  and (compression_parent is None
1621
 
                       or not self._fallback_vfs
 
1622
                       or not self._immediate_fallback_vfs
1622
1623
                       or self._index.has_key(compression_parent)
1623
1624
                       or not self.has_key(compression_parent))):
1624
1625
                # we can insert the knit record literally if either it has no
1796
1797
        # vfs, and hope to find them there.  Note that if the keys are found
1797
1798
        # but had no changes or no content, the fallback may not return
1798
1799
        # anything.
1799
 
        if keys and not self._fallback_vfs:
 
1800
        if keys and not self._immediate_fallback_vfs:
1800
1801
            # XXX: strictly the second parameter is meant to be the file id
1801
1802
            # but it's not easily accessible here.
1802
1803
            raise RevisionNotPresent(keys, repr(self))
1803
 
        for source in self._fallback_vfs:
 
1804
        for source in self._immediate_fallback_vfs:
1804
1805
            if not keys:
1805
1806
                break
1806
1807
            source_keys = set()
1879
1880
        :return: the header and the decompressor stream.
1880
1881
                 as (stream, header_record)
1881
1882
        """
1882
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1883
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1883
1884
        try:
1884
1885
            # Current serialise
1885
1886
            rec = self._check_header(key, df.readline())
1894
1895
        # 4168 calls in 2880 217 internal
1895
1896
        # 4168 calls to _parse_record_header in 2121
1896
1897
        # 4168 calls to readlines in 330
1897
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
 
1898
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1898
1899
        try:
1899
1900
            record_contents = df.readlines()
1900
1901
        except Exception, e:
2015
2016
        """See VersionedFiles.keys."""
2016
2017
        if 'evil' in debug.debug_flags:
2017
2018
            trace.mutter_callsite(2, "keys scales with size of history")
2018
 
        sources = [self._index] + self._fallback_vfs
 
2019
        sources = [self._index] + self._immediate_fallback_vfs
2019
2020
        result = set()
2020
2021
        for source in sources:
2021
2022
            result.update(source.keys())
2061
2062
 
2062
2063
        missing_keys = set(nonlocal_keys)
2063
2064
        # Read from remote versioned file instances and provide to our caller.
2064
 
        for source in self.vf._fallback_vfs:
 
2065
        for source in self.vf._immediate_fallback_vfs:
2065
2066
            if not missing_keys:
2066
2067
                break
2067
2068
            # Loop over fallback repositories asking them for texts - ignore
3417
3418
            raise exc_class, exc_value, exc_traceback
3418
3419
 
3419
3420
 
3420
 
# Deprecated, use PatienceSequenceMatcher instead
3421
 
KnitSequenceMatcher = patiencediff.PatienceSequenceMatcher
3422
 
 
3423
 
 
3424
3421
def annotate_knit(knit, revision_id):
3425
3422
    """Annotate a knit with no cached annotations.
3426
3423
 
3524
3521
        return records, ann_keys
3525
3522
 
3526
3523
    def _get_needed_texts(self, key, pb=None):
3527
 
        # if True or len(self._vf._fallback_vfs) > 0:
3528
 
        if len(self._vf._fallback_vfs) > 0:
 
3524
        # if True or len(self._vf._immediate_fallback_vfs) > 0:
 
3525
        if len(self._vf._immediate_fallback_vfs) > 0:
3529
3526
            # If we have fallbacks, go to the generic path
3530
3527
            for v in annotate.Annotator._get_needed_texts(self, key, pb=pb):
3531
3528
                yield v