~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: Vincent Ladeuil
  • Date: 2010-10-26 08:08:23 UTC
  • mfrom: (5514.1.1 665100-content-type)
  • mto: This revision was merged to the branch mainline in revision 5516.
  • Revision ID: v.ladeuil+lp@free.fr-20101026080823-3wggo03b7cpn9908
Correctly set the Content-Type header when POSTing http requests

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
56
56
from itertools import izip
57
57
import operator
58
58
import os
 
59
import sys
59
60
 
60
61
from bzrlib.lazy_import import lazy_import
61
62
lazy_import(globals(), """
62
 
import gzip
63
 
 
64
63
from bzrlib import (
 
64
    annotate,
65
65
    debug,
66
66
    diff,
67
67
    graph as _mod_graph,
68
68
    index as _mod_index,
 
69
    lru_cache,
69
70
    pack,
70
71
    patiencediff,
 
72
    progress,
71
73
    static_tuple,
72
74
    trace,
73
75
    tsort,
74
76
    tuned_gzip,
75
77
    ui,
76
78
    )
77
 
 
78
 
from bzrlib.repofmt import pack_repo
79
79
""")
80
80
from bzrlib import (
81
 
    annotate,
82
81
    errors,
83
82
    osutils,
84
83
    )
85
84
from bzrlib.errors import (
 
85
    FileExists,
86
86
    NoSuchFile,
 
87
    KnitError,
87
88
    InvalidRevisionId,
88
89
    KnitCorrupt,
89
90
    KnitHeaderError,
90
91
    RevisionNotPresent,
 
92
    RevisionAlreadyPresent,
91
93
    SHA1KnitCorrupt,
92
94
    )
93
95
from bzrlib.osutils import (
94
96
    contains_whitespace,
 
97
    contains_linebreaks,
95
98
    sha_string,
96
99
    sha_strings,
97
100
    split_lines,
101
104
    adapter_registry,
102
105
    ConstantMapper,
103
106
    ContentFactory,
 
107
    ChunkedContentFactory,
104
108
    sort_groupcompress,
 
109
    VersionedFile,
105
110
    VersionedFiles,
106
111
    )
107
112
 
800
805
        writer.begin()
801
806
        index = _KnitGraphIndex(graph_index, lambda:True, parents=parents,
802
807
            deltas=delta, add_callback=graph_index.add_nodes)
803
 
        access = pack_repo._DirectPackAccess({})
 
808
        access = _DirectPackAccess({})
804
809
        access.set_writer(writer, graph_index, (transport, 'newpack'))
805
810
        result = KnitVersionedFiles(index, access,
806
811
            max_delta_chain=max_delta_chain)
877
882
            self._factory = KnitAnnotateFactory()
878
883
        else:
879
884
            self._factory = KnitPlainFactory()
880
 
        self._immediate_fallback_vfs = []
 
885
        self._fallback_vfs = []
881
886
        self._reload_func = reload_func
882
887
 
883
888
    def __repr__(self):
891
896
 
892
897
        :param a_versioned_files: A VersionedFiles object.
893
898
        """
894
 
        self._immediate_fallback_vfs.append(a_versioned_files)
 
899
        self._fallback_vfs.append(a_versioned_files)
895
900
 
896
901
    def add_lines(self, key, parents, lines, parent_texts=None,
897
902
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1064
1069
                    raise errors.KnitCorrupt(self,
1065
1070
                        "Missing basis parent %s for %s" % (
1066
1071
                        compression_parent, key))
1067
 
        for fallback_vfs in self._immediate_fallback_vfs:
 
1072
        for fallback_vfs in self._fallback_vfs:
1068
1073
            fallback_vfs.check()
1069
1074
 
1070
1075
    def _check_add(self, key, lines, random_id, check_content):
1190
1195
    def get_known_graph_ancestry(self, keys):
1191
1196
        """Get a KnownGraph instance with the ancestry of keys."""
1192
1197
        parent_map, missing_keys = self._index.find_ancestry(keys)
1193
 
        for fallback in self._transitive_fallbacks():
 
1198
        for fallback in self._fallback_vfs:
1194
1199
            if not missing_keys:
1195
1200
                break
1196
1201
            (f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1220
1225
            and so on.
1221
1226
        """
1222
1227
        result = {}
1223
 
        sources = [self._index] + self._immediate_fallback_vfs
 
1228
        sources = [self._index] + self._fallback_vfs
1224
1229
        source_results = []
1225
1230
        missing = set(keys)
1226
1231
        for source in sources:
1520
1525
                        yield KnitContentFactory(key, global_map[key],
1521
1526
                            record_details, None, raw_data, self._factory.annotated, None)
1522
1527
                else:
1523
 
                    vf = self._immediate_fallback_vfs[parent_maps.index(source) - 1]
 
1528
                    vf = self._fallback_vfs[parent_maps.index(source) - 1]
1524
1529
                    for record in vf.get_record_stream(keys, ordering,
1525
1530
                        include_delta_closure):
1526
1531
                        yield record
1536
1541
            # record entry 2 is the 'digest'.
1537
1542
            result[key] = details[2]
1538
1543
        missing.difference_update(set(result))
1539
 
        for source in self._immediate_fallback_vfs:
 
1544
        for source in self._fallback_vfs:
1540
1545
            if not missing:
1541
1546
                break
1542
1547
            new_result = source.get_sha1s(missing)
1613
1618
                raise RevisionNotPresent([record.key], self)
1614
1619
            elif ((record.storage_kind in knit_types)
1615
1620
                  and (compression_parent is None
1616
 
                       or not self._immediate_fallback_vfs
 
1621
                       or not self._fallback_vfs
1617
1622
                       or self._index.has_key(compression_parent)
1618
1623
                       or not self.has_key(compression_parent))):
1619
1624
                # we can insert the knit record literally if either it has no
1791
1796
        # vfs, and hope to find them there.  Note that if the keys are found
1792
1797
        # but had no changes or no content, the fallback may not return
1793
1798
        # anything.
1794
 
        if keys and not self._immediate_fallback_vfs:
 
1799
        if keys and not self._fallback_vfs:
1795
1800
            # XXX: strictly the second parameter is meant to be the file id
1796
1801
            # but it's not easily accessible here.
1797
1802
            raise RevisionNotPresent(keys, repr(self))
1798
 
        for source in self._immediate_fallback_vfs:
 
1803
        for source in self._fallback_vfs:
1799
1804
            if not keys:
1800
1805
                break
1801
1806
            source_keys = set()
1874
1879
        :return: the header and the decompressor stream.
1875
1880
                 as (stream, header_record)
1876
1881
        """
1877
 
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1882
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1878
1883
        try:
1879
1884
            # Current serialise
1880
1885
            rec = self._check_header(key, df.readline())
1889
1894
        # 4168 calls in 2880 217 internal
1890
1895
        # 4168 calls to _parse_record_header in 2121
1891
1896
        # 4168 calls to readlines in 330
1892
 
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
 
1897
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1893
1898
        try:
1894
1899
            record_contents = df.readlines()
1895
1900
        except Exception, e:
2010
2015
        """See VersionedFiles.keys."""
2011
2016
        if 'evil' in debug.debug_flags:
2012
2017
            trace.mutter_callsite(2, "keys scales with size of history")
2013
 
        sources = [self._index] + self._immediate_fallback_vfs
 
2018
        sources = [self._index] + self._fallback_vfs
2014
2019
        result = set()
2015
2020
        for source in sources:
2016
2021
            result.update(source.keys())
2056
2061
 
2057
2062
        missing_keys = set(nonlocal_keys)
2058
2063
        # Read from remote versioned file instances and provide to our caller.
2059
 
        for source in self.vf._immediate_fallback_vfs:
 
2064
        for source in self.vf._fallback_vfs:
2060
2065
            if not missing_keys:
2061
2066
                break
2062
2067
            # Loop over fallback repositories asking them for texts - ignore
3515
3520
        return records, ann_keys
3516
3521
 
3517
3522
    def _get_needed_texts(self, key, pb=None):
3518
 
        # if True or len(self._vf._immediate_fallback_vfs) > 0:
3519
 
        if len(self._vf._immediate_fallback_vfs) > 0:
 
3523
        # if True or len(self._vf._fallback_vfs) > 0:
 
3524
        if len(self._vf._fallback_vfs) > 0:
3520
3525
            # If we have fallbacks, go to the generic path
3521
3526
            for v in annotate.Annotator._get_needed_texts(self, key, pb=pb):
3522
3527
                yield v