~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: John Arbash Meinel
  • Date: 2008-07-09 19:57:36 UTC
  • mto: This revision was merged to the branch mainline in revision 3564.
  • Revision ID: john@arbash-meinel.com-20080709195736-s9cg26gnym3lf2d0
cleanup a few imports to be lazily loaded.

Show diffs side-by-side

added added

removed removed

Lines of Context:
64
64
from itertools import izip, chain
65
65
import operator
66
66
import os
67
 
import urllib
68
 
import sys
69
 
import warnings
70
 
from zlib import Z_DEFAULT_COMPRESSION
71
67
 
72
 
import bzrlib
73
68
from bzrlib.lazy_import import lazy_import
74
69
lazy_import(globals(), """
75
70
from bzrlib import (
76
71
    annotate,
 
72
    debug,
 
73
    diff,
77
74
    graph as _mod_graph,
78
75
    index as _mod_index,
79
76
    lru_cache,
80
77
    pack,
 
78
    progress,
81
79
    trace,
 
80
    tsort,
 
81
    tuned_gzip,
82
82
    )
83
83
""")
84
84
from bzrlib import (
85
 
    cache_utf8,
86
 
    debug,
87
 
    diff,
88
85
    errors,
89
86
    osutils,
90
87
    patiencediff,
91
 
    progress,
92
 
    merge,
93
 
    ui,
94
88
    )
95
89
from bzrlib.errors import (
96
90
    FileExists,
102
96
    RevisionNotPresent,
103
97
    RevisionAlreadyPresent,
104
98
    )
105
 
from bzrlib.graph import Graph
106
99
from bzrlib.osutils import (
107
100
    contains_whitespace,
108
101
    contains_linebreaks,
110
103
    sha_strings,
111
104
    split_lines,
112
105
    )
113
 
from bzrlib.tsort import topo_sort
114
 
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
115
 
import bzrlib.ui
116
106
from bzrlib.versionedfile import (
117
107
    AbsentContentFactory,
118
108
    adapter_registry,
122
112
    VersionedFile,
123
113
    VersionedFiles,
124
114
    )
125
 
import bzrlib.weave
126
115
 
127
116
 
128
117
# TODO: Split out code specific to this format into an associated object.
1181
1170
        global_map, parent_maps = self._get_parent_map_with_sources(keys)
1182
1171
        if ordering == 'topological':
1183
1172
            # Global topological sort
1184
 
            present_keys = topo_sort(global_map)
 
1173
            present_keys = tsort.topo_sort(global_map)
1185
1174
            # Now group by source:
1186
1175
            source_keys = []
1187
1176
            current_source = None
1495
1484
        :return: the header and the decompressor stream.
1496
1485
                 as (stream, header_record)
1497
1486
        """
1498
 
        df = GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1487
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1499
1488
        try:
1500
1489
            # Current serialise
1501
1490
            rec = self._check_header(key, df.readline())
1510
1499
        # 4168 calls in 2880 217 internal
1511
1500
        # 4168 calls to _parse_record_header in 2121
1512
1501
        # 4168 calls to readlines in 330
1513
 
        df = GzipFile(mode='rb', fileobj=StringIO(data))
 
1502
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1514
1503
        try:
1515
1504
            record_contents = df.readlines()
1516
1505
        except Exception, e:
1611
1600
                'data must be plain bytes was %s' % type(bytes))
1612
1601
        if lines and lines[-1][-1] != '\n':
1613
1602
            raise ValueError('corrupt lines value %r' % lines)
1614
 
        compressed_bytes = bytes_to_gzip(bytes)
 
1603
        compressed_bytes = tuned_gzip.bytes_to_gzip(bytes)
1615
1604
        return len(compressed_bytes), compressed_bytes
1616
1605
 
1617
1606
    def _split_header(self, line):
2698
2687
        # TODO: this code generates a parent maps of present ancestors; it
2699
2688
        # could be split out into a separate method, and probably should use
2700
2689
        # iter_ancestry instead. -- mbp and robertc 20080704
2701
 
        graph = Graph(self._knit)
 
2690
        graph = _mod_graph.Graph(self._knit)
2702
2691
        head_cache = _mod_graph.FrozenHeadsCache(graph)
2703
2692
        search = graph._make_breadth_first_searcher([key])
2704
2693
        keys = set()