1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1 |
# Copyright (C) 2005, 2006 by Canonical Ltd
|
2 |
# Written by Martin Pool.
|
|
3 |
# Modified by Johan Rydberg <jrydberg@gnu.org>
|
|
4 |
# Modified by Robert Collins <robert.collins@canonical.com>
|
|
1756.2.5
by Aaron Bentley
Reduced read_records calls to 1 |
5 |
# Modified by Aaron Bentley <aaron.bentley@utoronto.ca>
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
6 |
#
|
7 |
# This program is free software; you can redistribute it and/or modify
|
|
8 |
# it under the terms of the GNU General Public License as published by
|
|
9 |
# the Free Software Foundation; either version 2 of the License, or
|
|
10 |
# (at your option) any later version.
|
|
11 |
#
|
|
12 |
# This program is distributed in the hope that it will be useful,
|
|
13 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
15 |
# GNU General Public License for more details.
|
|
16 |
#
|
|
17 |
# You should have received a copy of the GNU General Public License
|
|
18 |
# along with this program; if not, write to the Free Software
|
|
19 |
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
20 |
||
21 |
"""Knit versionedfile implementation.
|
|
22 |
||
23 |
A knit is a versioned file implementation that supports efficient append only
|
|
24 |
updates.
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
25 |
|
26 |
Knit file layout:
|
|
27 |
lifeless: the data file is made up of "delta records". each delta record has a delta header
|
|
28 |
that contains; (1) a version id, (2) the size of the delta (in lines), and (3) the digest of
|
|
29 |
the -expanded data- (ie, the delta applied to the parent). the delta also ends with a
|
|
30 |
end-marker; simply "end VERSION"
|
|
31 |
||
32 |
delta can be line or full contents.a
|
|
33 |
... the 8's there are the index number of the annotation.
|
|
34 |
version robertc@robertcollins.net-20051003014215-ee2990904cc4c7ad 7 c7d23b2a5bd6ca00e8e266cec0ec228158ee9f9e
|
|
35 |
59,59,3
|
|
36 |
8
|
|
37 |
8 if ie.executable:
|
|
38 |
8 e.set('executable', 'yes')
|
|
39 |
130,130,2
|
|
40 |
8 if elt.get('executable') == 'yes':
|
|
41 |
8 ie.executable = True
|
|
42 |
end robertc@robertcollins.net-20051003014215-ee2990904cc4c7ad
|
|
43 |
||
44 |
||
45 |
whats in an index:
|
|
46 |
09:33 < jrydberg> lifeless: each index is made up of a tuple of; version id, options, position, size, parents
|
|
47 |
09:33 < jrydberg> lifeless: the parents are currently dictionary compressed
|
|
48 |
09:33 < jrydberg> lifeless: (meaning it currently does not support ghosts)
|
|
49 |
09:33 < lifeless> right
|
|
50 |
09:33 < jrydberg> lifeless: the position and size is the range in the data file
|
|
51 |
||
52 |
||
53 |
so the index sequence is the dictionary compressed sequence number used
|
|
54 |
in the deltas to provide line annotation
|
|
55 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
56 |
"""
|
57 |
||
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
58 |
# TODOS:
|
59 |
# 10:16 < lifeless> make partial index writes safe
|
|
60 |
# 10:16 < lifeless> implement 'knit.check()' like weave.check()
|
|
61 |
# 10:17 < lifeless> record known ghosts so we can detect when they are filled in rather than the current 'reweave
|
|
62 |
# always' approach.
|
|
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
63 |
# move sha1 out of the content so that join is faster at verifying parents
|
64 |
# record content length ?
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
65 |
|
66 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
67 |
from copy import copy |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
68 |
from cStringIO import StringIO |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
69 |
import difflib |
1596.2.28
by Robert Collins
more knit profile based tuning. |
70 |
from itertools import izip, chain |
1756.2.17
by Aaron Bentley
Fixes suggested by John Meinel |
71 |
import operator |
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
72 |
import os |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
73 |
import sys |
1756.2.29
by Aaron Bentley
Remove basis knit support |
74 |
import warnings |
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
75 |
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
76 |
import bzrlib |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
77 |
import bzrlib.errors as errors |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
78 |
from bzrlib.errors import FileExists, NoSuchFile, KnitError, \ |
79 |
InvalidRevisionId, KnitCorrupt, KnitHeaderError, \ |
|
80 |
RevisionNotPresent, RevisionAlreadyPresent |
|
1773.4.1
by Martin Pool
Add pyflakes makefile target; fix many warnings |
81 |
from bzrlib.tuned_gzip import GzipFile |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
82 |
from bzrlib.trace import mutter |
83 |
from bzrlib.osutils import contains_whitespace, contains_linebreaks, \ |
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
84 |
sha_strings
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
85 |
from bzrlib.versionedfile import VersionedFile, InterVersionedFile |
1756.2.29
by Aaron Bentley
Remove basis knit support |
86 |
from bzrlib.symbol_versioning import DEPRECATED_PARAMETER, deprecated_passed |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
87 |
from bzrlib.tsort import topo_sort |
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
88 |
import bzrlib.weave |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
89 |
|
90 |
||
91 |
# TODO: Split out code specific to this format into an associated object.
|
|
92 |
||
93 |
# TODO: Can we put in some kind of value to check that the index and data
|
|
94 |
# files belong together?
|
|
95 |
||
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
96 |
# TODO: accommodate binaries, perhaps by storing a byte count
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
97 |
|
98 |
# TODO: function to check whole file
|
|
99 |
||
100 |
# TODO: atomically append data, then measure backwards from the cursor
|
|
101 |
# position after writing to work out where it was located. we may need to
|
|
102 |
# bypass python file buffering.
|
|
103 |
||
104 |
DATA_SUFFIX = '.knit' |
|
105 |
INDEX_SUFFIX = '.kndx' |
|
106 |
||
107 |
||
108 |
class KnitContent(object): |
|
109 |
"""Content of a knit version to which deltas can be applied."""
|
|
110 |
||
111 |
def __init__(self, lines): |
|
112 |
self._lines = lines |
|
113 |
||
114 |
def annotate_iter(self): |
|
115 |
"""Yield tuples of (origin, text) for each content line."""
|
|
116 |
for origin, text in self._lines: |
|
117 |
yield origin, text |
|
118 |
||
119 |
def annotate(self): |
|
120 |
"""Return a list of (origin, text) tuples."""
|
|
121 |
return list(self.annotate_iter()) |
|
122 |
||
123 |
def line_delta_iter(self, new_lines): |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
124 |
"""Generate line-based delta from this content to new_lines."""
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
125 |
new_texts = [text for origin, text in new_lines._lines] |
126 |
old_texts = [text for origin, text in self._lines] |
|
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
127 |
s = KnitSequenceMatcher(None, old_texts, new_texts) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
128 |
for op in s.get_opcodes(): |
129 |
if op[0] == 'equal': |
|
130 |
continue
|
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
131 |
# ofrom oto length data
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
132 |
yield (op[1], op[2], op[4]-op[3], new_lines._lines[op[3]:op[4]]) |
133 |
||
134 |
def line_delta(self, new_lines): |
|
135 |
return list(self.line_delta_iter(new_lines)) |
|
136 |
||
137 |
def text(self): |
|
138 |
return [text for origin, text in self._lines] |
|
139 |
||
1756.3.7
by Aaron Bentley
Avoid re-parsing texts version components |
140 |
def copy(self): |
141 |
return KnitContent(self._lines[:]) |
|
142 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
143 |
|
144 |
class _KnitFactory(object): |
|
145 |
"""Base factory for creating content objects."""
|
|
146 |
||
147 |
def make(self, lines, version): |
|
148 |
num_lines = len(lines) |
|
149 |
return KnitContent(zip([version] * num_lines, lines)) |
|
150 |
||
151 |
||
152 |
class KnitAnnotateFactory(_KnitFactory): |
|
153 |
"""Factory for creating annotated Content objects."""
|
|
154 |
||
155 |
annotated = True |
|
156 |
||
157 |
def parse_fulltext(self, content, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
158 |
"""Convert fulltext to internal representation
|
159 |
||
160 |
fulltext content is of the format
|
|
161 |
revid(utf8) plaintext\n
|
|
162 |
internal representation is of the format:
|
|
163 |
(revid, plaintext)
|
|
164 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
165 |
lines = [] |
166 |
for line in content: |
|
167 |
origin, text = line.split(' ', 1) |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
168 |
lines.append((origin.decode('utf-8'), text)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
169 |
return KnitContent(lines) |
170 |
||
171 |
def parse_line_delta_iter(self, lines): |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
172 |
for result_item in self.parse_line_delta[lines]: |
173 |
yield result_item |
|
174 |
||
175 |
def parse_line_delta(self, lines, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
176 |
"""Convert a line based delta into internal representation.
|
177 |
||
178 |
line delta is in the form of:
|
|
179 |
intstart intend intcount
|
|
180 |
1..count lines:
|
|
181 |
revid(utf8) newline\n
|
|
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
182 |
internal representation is
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
183 |
(start, end, count, [1..count tuples (revid, newline)])
|
184 |
"""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
185 |
result = [] |
186 |
lines = iter(lines) |
|
187 |
next = lines.next |
|
188 |
# walk through the lines parsing.
|
|
189 |
for header in lines: |
|
190 |
start, end, count = [int(n) for n in header.split(',')] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
191 |
contents = [] |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
192 |
remaining = count |
193 |
while remaining: |
|
194 |
origin, text = next().split(' ', 1) |
|
195 |
remaining -= 1 |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
196 |
contents.append((origin.decode('utf-8'), text)) |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
197 |
result.append((start, end, count, contents)) |
198 |
return result |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
199 |
|
200 |
def lower_fulltext(self, content): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
201 |
"""convert a fulltext content record into a serializable form.
|
202 |
||
203 |
see parse_fulltext which this inverts.
|
|
204 |
"""
|
|
205 |
return ['%s %s' % (o.encode('utf-8'), t) for o, t in content._lines] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
206 |
|
207 |
def lower_line_delta(self, delta): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
208 |
"""convert a delta into a serializable form.
|
209 |
||
1628.1.2
by Robert Collins
More knit micro-optimisations. |
210 |
See parse_line_delta which this inverts.
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
211 |
"""
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
212 |
out = [] |
213 |
for start, end, c, lines in delta: |
|
214 |
out.append('%d,%d,%d\n' % (start, end, c)) |
|
215 |
for origin, text in lines: |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
216 |
out.append('%s %s' % (origin.encode('utf-8'), text)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
217 |
return out |
218 |
||
219 |
||
220 |
class KnitPlainFactory(_KnitFactory): |
|
221 |
"""Factory for creating plain Content objects."""
|
|
222 |
||
223 |
annotated = False |
|
224 |
||
225 |
def parse_fulltext(self, content, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
226 |
"""This parses an unannotated fulltext.
|
227 |
||
228 |
Note that this is not a noop - the internal representation
|
|
229 |
has (versionid, line) - its just a constant versionid.
|
|
230 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
231 |
return self.make(content, version) |
232 |
||
233 |
def parse_line_delta_iter(self, lines, version): |
|
234 |
while lines: |
|
235 |
header = lines.pop(0) |
|
236 |
start, end, c = [int(n) for n in header.split(',')] |
|
237 |
yield start, end, c, zip([version] * c, lines[:c]) |
|
238 |
del lines[:c] |
|
239 |
||
240 |
def parse_line_delta(self, lines, version): |
|
241 |
return list(self.parse_line_delta_iter(lines, version)) |
|
242 |
||
243 |
def lower_fulltext(self, content): |
|
244 |
return content.text() |
|
245 |
||
246 |
def lower_line_delta(self, delta): |
|
247 |
out = [] |
|
248 |
for start, end, c, lines in delta: |
|
249 |
out.append('%d,%d,%d\n' % (start, end, c)) |
|
250 |
out.extend([text for origin, text in lines]) |
|
251 |
return out |
|
252 |
||
253 |
||
254 |
def make_empty_knit(transport, relpath): |
|
255 |
"""Construct a empty knit at the specified location."""
|
|
1563.2.5
by Robert Collins
Remove unused transaction references from knit.py and the versionedfile interface. |
256 |
k = KnitVersionedFile(transport, relpath, 'w', KnitPlainFactory) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
257 |
k._data._open_file() |
258 |
||
259 |
||
260 |
class KnitVersionedFile(VersionedFile): |
|
261 |
"""Weave-like structure with faster random access.
|
|
262 |
||
263 |
A knit stores a number of texts and a summary of the relationships
|
|
264 |
between them. Texts are identified by a string version-id. Texts
|
|
265 |
are normally stored and retrieved as a series of lines, but can
|
|
266 |
also be passed as single strings.
|
|
267 |
||
268 |
Lines are stored with the trailing newline (if any) included, to
|
|
269 |
avoid special cases for files with no final newline. Lines are
|
|
270 |
composed of 8-bit characters, not unicode. The combination of
|
|
271 |
these approaches should mean any 'binary' file can be safely
|
|
272 |
stored and retrieved.
|
|
273 |
"""
|
|
274 |
||
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
275 |
def __init__(self, relpath, transport, file_mode=None, access_mode=None, |
1756.2.29
by Aaron Bentley
Remove basis knit support |
276 |
factory=None, basis_knit=DEPRECATED_PARAMETER, delta=True, |
277 |
create=False): |
|
1563.2.25
by Robert Collins
Merge in upstream. |
278 |
"""Construct a knit at location specified by relpath.
|
279 |
|
|
280 |
:param create: If not True, only open an existing knit.
|
|
281 |
"""
|
|
1756.2.29
by Aaron Bentley
Remove basis knit support |
282 |
if deprecated_passed(basis_knit): |
283 |
warnings.warn("KnitVersionedFile.__(): The basis_knit parameter is" |
|
284 |
" deprecated as of bzr 0.9.", |
|
285 |
DeprecationWarning, stacklevel=2) |
|
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
286 |
if access_mode is None: |
287 |
access_mode = 'w' |
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
288 |
super(KnitVersionedFile, self).__init__(access_mode) |
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
289 |
assert access_mode in ('r', 'w'), "invalid mode specified %r" % access_mode |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
290 |
self.transport = transport |
291 |
self.filename = relpath |
|
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
292 |
self.factory = factory or KnitAnnotateFactory() |
293 |
self.writable = (access_mode == 'w') |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
294 |
self.delta = delta |
295 |
||
296 |
self._index = _KnitIndex(transport, relpath + INDEX_SUFFIX, |
|
1666.1.6
by Robert Collins
Make knit the default format. |
297 |
access_mode, create=create, file_mode=file_mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
298 |
self._data = _KnitData(transport, relpath + DATA_SUFFIX, |
1666.1.6
by Robert Collins
Make knit the default format. |
299 |
access_mode, create=create and not len(self), file_mode=file_mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
300 |
|
1704.2.10
by Martin Pool
Add KnitVersionedFile.__repr__ method |
301 |
def __repr__(self): |
302 |
return '%s(%s)' % (self.__class__.__name__, |
|
303 |
self.transport.abspath(self.filename)) |
|
304 |
||
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
305 |
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta): |
306 |
"""See VersionedFile._add_delta()."""
|
|
307 |
self._check_add(version_id, []) # should we check the lines ? |
|
308 |
self._check_versions_present(parents) |
|
309 |
present_parents = [] |
|
310 |
ghosts = [] |
|
311 |
parent_texts = {} |
|
312 |
for parent in parents: |
|
313 |
if not self.has_version(parent): |
|
314 |
ghosts.append(parent) |
|
315 |
else: |
|
316 |
present_parents.append(parent) |
|
317 |
||
318 |
if delta_parent is None: |
|
319 |
# reconstitute as full text.
|
|
320 |
assert len(delta) == 1 or len(delta) == 0 |
|
321 |
if len(delta): |
|
322 |
assert delta[0][0] == 0 |
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
323 |
assert delta[0][1] == 0, delta[0][1] |
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
324 |
return super(KnitVersionedFile, self)._add_delta(version_id, |
325 |
parents, |
|
326 |
delta_parent, |
|
327 |
sha1, |
|
328 |
noeol, |
|
329 |
delta) |
|
330 |
||
331 |
digest = sha1 |
|
332 |
||
333 |
options = [] |
|
334 |
if noeol: |
|
335 |
options.append('no-eol') |
|
336 |
||
337 |
if delta_parent is not None: |
|
338 |
# determine the current delta chain length.
|
|
339 |
# To speed the extract of texts the delta chain is limited
|
|
340 |
# to a fixed number of deltas. This should minimize both
|
|
341 |
# I/O and the time spend applying deltas.
|
|
342 |
count = 0 |
|
343 |
delta_parents = [delta_parent] |
|
344 |
while count < 25: |
|
345 |
parent = delta_parents[0] |
|
346 |
method = self._index.get_method(parent) |
|
347 |
if method == 'fulltext': |
|
348 |
break
|
|
349 |
delta_parents = self._index.get_parents(parent) |
|
350 |
count = count + 1 |
|
351 |
if method == 'line-delta': |
|
352 |
# did not find a fulltext in the delta limit.
|
|
353 |
# just do a normal insertion.
|
|
354 |
return super(KnitVersionedFile, self)._add_delta(version_id, |
|
355 |
parents, |
|
356 |
delta_parent, |
|
357 |
sha1, |
|
358 |
noeol, |
|
359 |
delta) |
|
360 |
||
361 |
options.append('line-delta') |
|
362 |
store_lines = self.factory.lower_line_delta(delta) |
|
363 |
||
364 |
where, size = self._data.add_record(version_id, digest, store_lines) |
|
365 |
self._index.add_version(version_id, options, where, size, parents) |
|
366 |
||
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
367 |
def _add_raw_records(self, records, data): |
368 |
"""Add all the records 'records' with data pre-joined in 'data'.
|
|
369 |
||
370 |
:param records: A list of tuples(version_id, options, parents, size).
|
|
371 |
:param data: The data for the records. When it is written, the records
|
|
372 |
are adjusted to have pos pointing into data by the sum of
|
|
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
373 |
the preceding records sizes.
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
374 |
"""
|
375 |
# write all the data
|
|
376 |
pos = self._data.add_raw_record(data) |
|
377 |
index_entries = [] |
|
378 |
for (version_id, options, parents, size) in records: |
|
379 |
index_entries.append((version_id, options, pos, size, parents)) |
|
380 |
pos += size |
|
381 |
self._index.add_versions(index_entries) |
|
382 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
383 |
def clear_cache(self): |
384 |
"""Clear the data cache only."""
|
|
385 |
self._data.clear_cache() |
|
386 |
||
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
387 |
def copy_to(self, name, transport): |
388 |
"""See VersionedFile.copy_to()."""
|
|
389 |
# copy the current index to a temp index to avoid racing with local
|
|
390 |
# writes
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
391 |
transport.put(name + INDEX_SUFFIX + '.tmp', self.transport.get(self._index._filename),) |
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
392 |
# copy the data file
|
1711.7.25
by John Arbash Meinel
try/finally to close files, _KnitData was keeping a handle to a file it never used again, and using transport.rename() when it wanted transport.move() |
393 |
f = self._data._open_file() |
394 |
try: |
|
395 |
transport.put(name + DATA_SUFFIX, f) |
|
396 |
finally: |
|
397 |
f.close() |
|
398 |
# move the copied index into place
|
|
399 |
transport.move(name + INDEX_SUFFIX + '.tmp', name + INDEX_SUFFIX) |
|
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
400 |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
401 |
def create_empty(self, name, transport, mode=None): |
1563.2.25
by Robert Collins
Merge in upstream. |
402 |
return KnitVersionedFile(name, transport, factory=self.factory, delta=self.delta, create=True) |
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
403 |
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
404 |
def _fix_parents(self, version, new_parents): |
1594.2.7
by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc. |
405 |
"""Fix the parents list for version.
|
406 |
|
|
407 |
This is done by appending a new version to the index
|
|
408 |
with identical data except for the parents list.
|
|
409 |
the parents list must be a superset of the current
|
|
410 |
list.
|
|
411 |
"""
|
|
412 |
current_values = self._index._cache[version] |
|
413 |
assert set(current_values[4]).difference(set(new_parents)) == set() |
|
414 |
self._index.add_version(version, |
|
415 |
current_values[1], |
|
416 |
current_values[2], |
|
417 |
current_values[3], |
|
418 |
new_parents) |
|
419 |
||
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
420 |
def get_delta(self, version_id): |
421 |
"""Get a delta for constructing version from some other version."""
|
|
422 |
if not self.has_version(version_id): |
|
423 |
raise RevisionNotPresent(version_id, self.filename) |
|
424 |
||
425 |
parents = self.get_parents(version_id) |
|
426 |
if len(parents): |
|
427 |
parent = parents[0] |
|
428 |
else: |
|
429 |
parent = None |
|
430 |
data_pos, data_size = self._index.get_position(version_id) |
|
431 |
data, sha1 = self._data.read_records(((version_id, data_pos, data_size),))[version_id] |
|
432 |
version_idx = self._index.lookup(version_id) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
433 |
noeol = 'no-eol' in self._index.get_options(version_id) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
434 |
if 'fulltext' == self._index.get_method(version_id): |
435 |
new_content = self.factory.parse_fulltext(data, version_idx) |
|
436 |
if parent is not None: |
|
437 |
reference_content = self._get_content(parent) |
|
438 |
old_texts = reference_content.text() |
|
439 |
else: |
|
440 |
old_texts = [] |
|
441 |
new_texts = new_content.text() |
|
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
442 |
delta_seq = KnitSequenceMatcher(None, old_texts, new_texts) |
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
443 |
return parent, sha1, noeol, self._make_line_delta(delta_seq, new_content) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
444 |
else: |
445 |
delta = self.factory.parse_line_delta(data, version_idx) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
446 |
return parent, sha1, noeol, delta |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
447 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
448 |
def get_graph_with_ghosts(self): |
449 |
"""See VersionedFile.get_graph_with_ghosts()."""
|
|
450 |
graph_items = self._index.get_graph() |
|
451 |
return dict(graph_items) |
|
452 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
453 |
def get_sha1(self, version_id): |
454 |
"""See VersionedFile.get_sha1()."""
|
|
1756.3.22
by Aaron Bentley
Tweaks from review |
455 |
record_map = self._get_record_map([version_id]) |
456 |
method, content, digest, next = record_map[version_id] |
|
457 |
return digest |
|
1666.1.6
by Robert Collins
Make knit the default format. |
458 |
|
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
459 |
@staticmethod
|
460 |
def get_suffixes(): |
|
461 |
"""See VersionedFile.get_suffixes()."""
|
|
462 |
return [DATA_SUFFIX, INDEX_SUFFIX] |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
463 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
464 |
def has_ghost(self, version_id): |
465 |
"""True if there is a ghost reference in the file to version_id."""
|
|
466 |
# maybe we have it
|
|
467 |
if self.has_version(version_id): |
|
468 |
return False |
|
1759.2.2
by Jelmer Vernooij
Revert some of my spelling fixes and fix some typos after review by Aaron. |
469 |
# optimisable if needed by memoising the _ghosts set.
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
470 |
items = self._index.get_graph() |
471 |
for node, parents in items: |
|
472 |
for parent in parents: |
|
473 |
if parent not in self._index._cache: |
|
474 |
if parent == version_id: |
|
475 |
return True |
|
476 |
return False |
|
477 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
478 |
def versions(self): |
479 |
"""See VersionedFile.versions."""
|
|
480 |
return self._index.get_versions() |
|
481 |
||
482 |
def has_version(self, version_id): |
|
483 |
"""See VersionedFile.has_version."""
|
|
484 |
return self._index.has_version(version_id) |
|
485 |
||
486 |
__contains__ = has_version |
|
487 |
||
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
488 |
def _merge_annotations(self, content, parents, parent_texts={}, |
489 |
delta=None, annotated=None): |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
490 |
"""Merge annotations for content. This is done by comparing
|
1596.2.27
by Robert Collins
Note potential improvements in knit adds. |
491 |
the annotations based on changed to the text.
|
492 |
"""
|
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
493 |
if annotated: |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
494 |
delta_seq = None |
495 |
for parent_id in parents: |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
496 |
merge_content = self._get_content(parent_id, parent_texts) |
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
497 |
seq = KnitSequenceMatcher(None, merge_content.text(), content.text()) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
498 |
if delta_seq is None: |
499 |
# setup a delta seq to reuse.
|
|
500 |
delta_seq = seq |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
501 |
for i, j, n in seq.get_matching_blocks(): |
502 |
if n == 0: |
|
503 |
continue
|
|
504 |
# this appears to copy (origin, text) pairs across to the new
|
|
505 |
# content for any line that matches the last-checked parent.
|
|
506 |
# FIXME: save the sequence control data for delta compression
|
|
507 |
# against the most relevant parent rather than rediffing.
|
|
508 |
content._lines[j:j+n] = merge_content._lines[i:i+n] |
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
509 |
if delta: |
510 |
if not annotated: |
|
511 |
reference_content = self._get_content(parents[0], parent_texts) |
|
512 |
new_texts = content.text() |
|
513 |
old_texts = reference_content.text() |
|
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
514 |
delta_seq = KnitSequenceMatcher(None, old_texts, new_texts) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
515 |
return self._make_line_delta(delta_seq, content) |
516 |
||
517 |
def _make_line_delta(self, delta_seq, new_content): |
|
518 |
"""Generate a line delta from delta_seq and new_content."""
|
|
519 |
diff_hunks = [] |
|
520 |
for op in delta_seq.get_opcodes(): |
|
521 |
if op[0] == 'equal': |
|
522 |
continue
|
|
523 |
diff_hunks.append((op[1], op[2], op[4]-op[3], new_content._lines[op[3]:op[4]])) |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
524 |
return diff_hunks |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
525 |
|
1756.3.17
by Aaron Bentley
Combine get_components_positions with get_components_versions |
526 |
def _get_components_positions(self, version_ids): |
1756.3.19
by Aaron Bentley
Documentation and cleanups |
527 |
"""Produce a map of position data for the components of versions.
|
528 |
||
1756.3.22
by Aaron Bentley
Tweaks from review |
529 |
This data is intended to be used for retrieving the knit records.
|
1756.3.19
by Aaron Bentley
Documentation and cleanups |
530 |
|
531 |
A dict of version_id to (method, data_pos, data_size, next) is
|
|
532 |
returned.
|
|
533 |
method is the way referenced data should be applied.
|
|
534 |
data_pos is the position of the data in the knit.
|
|
535 |
data_size is the size of the data in the knit.
|
|
536 |
next is the build-parent of the version, or None for fulltexts.
|
|
537 |
"""
|
|
1756.3.9
by Aaron Bentley
More optimization refactoring |
538 |
component_data = {} |
539 |
for version_id in version_ids: |
|
540 |
cursor = version_id |
|
541 |
||
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
542 |
while cursor is not None and cursor not in component_data: |
1756.2.29
by Aaron Bentley
Remove basis knit support |
543 |
method = self._index.get_method(cursor) |
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
544 |
if method == 'fulltext': |
545 |
next = None |
|
546 |
else: |
|
1756.2.29
by Aaron Bentley
Remove basis knit support |
547 |
next = self.get_parents(cursor)[0] |
1756.3.17
by Aaron Bentley
Combine get_components_positions with get_components_versions |
548 |
data_pos, data_size = self._index.get_position(cursor) |
549 |
component_data[cursor] = (method, data_pos, data_size, next) |
|
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
550 |
cursor = next |
551 |
return component_data |
|
1756.3.18
by Aaron Bentley
More cleanup |
552 |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
553 |
def _get_content(self, version_id, parent_texts={}): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
554 |
"""Returns a content object that makes up the specified
|
555 |
version."""
|
|
556 |
if not self.has_version(version_id): |
|
557 |
raise RevisionNotPresent(version_id, self.filename) |
|
558 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
559 |
cached_version = parent_texts.get(version_id, None) |
560 |
if cached_version is not None: |
|
561 |
return cached_version |
|
562 |
||
1756.3.22
by Aaron Bentley
Tweaks from review |
563 |
text_map, contents_map = self._get_content_maps([version_id]) |
564 |
return contents_map[version_id] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
565 |
|
566 |
def _check_versions_present(self, version_ids): |
|
567 |
"""Check that all specified versions are present."""
|
|
568 |
version_ids = set(version_ids) |
|
569 |
for r in list(version_ids): |
|
570 |
if self._index.has_version(r): |
|
571 |
version_ids.remove(r) |
|
572 |
if version_ids: |
|
573 |
raise RevisionNotPresent(list(version_ids)[0], self.filename) |
|
574 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
575 |
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts): |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
576 |
"""See VersionedFile.add_lines_with_ghosts()."""
|
577 |
self._check_add(version_id, lines) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
578 |
return self._add(version_id, lines[:], parents, self.delta, parent_texts) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
579 |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
580 |
def _add_lines(self, version_id, parents, lines, parent_texts): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
581 |
"""See VersionedFile.add_lines."""
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
582 |
self._check_add(version_id, lines) |
583 |
self._check_versions_present(parents) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
584 |
return self._add(version_id, lines[:], parents, self.delta, parent_texts) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
585 |
|
586 |
def _check_add(self, version_id, lines): |
|
587 |
"""check that version_id and lines are safe to add."""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
588 |
assert self.writable, "knit is not opened for write" |
589 |
### FIXME escape. RBC 20060228
|
|
590 |
if contains_whitespace(version_id): |
|
1668.5.1
by Olaf Conradi
Fix bug in knits when raising InvalidRevisionId without the required |
591 |
raise InvalidRevisionId(version_id, self.filename) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
592 |
if self.has_version(version_id): |
593 |
raise RevisionAlreadyPresent(version_id, self.filename) |
|
1666.1.6
by Robert Collins
Make knit the default format. |
594 |
self._check_lines_not_unicode(lines) |
595 |
self._check_lines_are_lines(lines) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
596 |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
597 |
def _add(self, version_id, lines, parents, delta, parent_texts): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
598 |
"""Add a set of lines on top of version specified by parents.
|
599 |
||
600 |
If delta is true, compress the text as a line-delta against
|
|
601 |
the first parent.
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
602 |
|
603 |
Any versions not present will be converted into ghosts.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
604 |
"""
|
1596.2.28
by Robert Collins
more knit profile based tuning. |
605 |
# 461 0 6546.0390 43.9100 bzrlib.knit:489(_add)
|
606 |
# +400 0 889.4890 418.9790 +bzrlib.knit:192(lower_fulltext)
|
|
607 |
# +461 0 1364.8070 108.8030 +bzrlib.knit:996(add_record)
|
|
608 |
# +461 0 193.3940 41.5720 +bzrlib.knit:898(add_version)
|
|
609 |
# +461 0 134.0590 18.3810 +bzrlib.osutils:361(sha_strings)
|
|
610 |
# +461 0 36.3420 15.4540 +bzrlib.knit:146(make)
|
|
611 |
# +1383 0 8.0370 8.0370 +<len>
|
|
612 |
# +61 0 13.5770 7.9190 +bzrlib.knit:199(lower_line_delta)
|
|
613 |
# +61 0 963.3470 7.8740 +bzrlib.knit:427(_get_content)
|
|
614 |
# +61 0 973.9950 5.2950 +bzrlib.knit:136(line_delta)
|
|
615 |
# +61 0 1918.1800 5.2640 +bzrlib.knit:359(_merge_annotations)
|
|
616 |
||
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
617 |
present_parents = [] |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
618 |
ghosts = [] |
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
619 |
if parent_texts is None: |
620 |
parent_texts = {} |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
621 |
for parent in parents: |
622 |
if not self.has_version(parent): |
|
623 |
ghosts.append(parent) |
|
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
624 |
else: |
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
625 |
present_parents.append(parent) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
626 |
|
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
627 |
if delta and not len(present_parents): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
628 |
delta = False |
629 |
||
630 |
digest = sha_strings(lines) |
|
631 |
options = [] |
|
632 |
if lines: |
|
633 |
if lines[-1][-1] != '\n': |
|
634 |
options.append('no-eol') |
|
635 |
lines[-1] = lines[-1] + '\n' |
|
636 |
||
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
637 |
if len(present_parents) and delta: |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
638 |
# To speed the extract of texts the delta chain is limited
|
639 |
# to a fixed number of deltas. This should minimize both
|
|
640 |
# I/O and the time spend applying deltas.
|
|
641 |
count = 0 |
|
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
642 |
delta_parents = present_parents |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
643 |
while count < 25: |
644 |
parent = delta_parents[0] |
|
645 |
method = self._index.get_method(parent) |
|
646 |
if method == 'fulltext': |
|
647 |
break
|
|
648 |
delta_parents = self._index.get_parents(parent) |
|
649 |
count = count + 1 |
|
650 |
if method == 'line-delta': |
|
651 |
delta = False |
|
652 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
653 |
lines = self.factory.make(lines, version_id) |
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
654 |
if delta or (self.factory.annotated and len(present_parents) > 0): |
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
655 |
# Merge annotations from parent texts if so is needed.
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
656 |
delta_hunks = self._merge_annotations(lines, present_parents, parent_texts, |
657 |
delta, self.factory.annotated) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
658 |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
659 |
if delta: |
660 |
options.append('line-delta') |
|
661 |
store_lines = self.factory.lower_line_delta(delta_hunks) |
|
662 |
else: |
|
663 |
options.append('fulltext') |
|
664 |
store_lines = self.factory.lower_fulltext(lines) |
|
665 |
||
666 |
where, size = self._data.add_record(version_id, digest, store_lines) |
|
667 |
self._index.add_version(version_id, options, where, size, parents) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
668 |
return lines |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
669 |
|
1563.2.19
by Robert Collins
stub out a check for knits. |
670 |
def check(self, progress_bar=None): |
671 |
"""See VersionedFile.check()."""
|
|
672 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
673 |
def _clone_text(self, new_version_id, old_version_id, parents): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
674 |
"""See VersionedFile.clone_text()."""
|
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
675 |
# FIXME RBC 20060228 make fast by only inserting an index with null
|
676 |
# delta.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
677 |
self.add_lines(new_version_id, parents, self.get_lines(old_version_id)) |
678 |
||
679 |
def get_lines(self, version_id): |
|
680 |
"""See VersionedFile.get_lines()."""
|
|
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
681 |
return self.get_line_list([version_id])[0] |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
682 |
|
1756.3.12
by Aaron Bentley
Stuff all text-building data in record_map |
683 |
def _get_record_map(self, version_ids): |
1756.3.19
by Aaron Bentley
Documentation and cleanups |
684 |
"""Produce a dictionary of knit records.
|
685 |
|
|
686 |
The keys are version_ids, the values are tuples of (method, content,
|
|
687 |
digest, next).
|
|
688 |
method is the way the content should be applied.
|
|
689 |
content is a KnitContent object.
|
|
690 |
digest is the SHA1 digest of this version id after all steps are done
|
|
691 |
next is the build-parent of the version, i.e. the leftmost ancestor.
|
|
692 |
If the method is fulltext, next will be None.
|
|
693 |
"""
|
|
1756.3.12
by Aaron Bentley
Stuff all text-building data in record_map |
694 |
position_map = self._get_components_positions(version_ids) |
1756.3.22
by Aaron Bentley
Tweaks from review |
695 |
# c = component_id, m = method, p = position, s = size, n = next
|
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
696 |
records = [(c, p, s) for c, (m, p, s, n) in position_map.iteritems()] |
1756.3.12
by Aaron Bentley
Stuff all text-building data in record_map |
697 |
record_map = {} |
698 |
for component_id, content, digest in\ |
|
699 |
self._data.read_records_iter(records): |
|
700 |
method, position, size, next = position_map[component_id] |
|
701 |
record_map[component_id] = method, content, digest, next |
|
702 |
||
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
703 |
return record_map |
1756.2.5
by Aaron Bentley
Reduced read_records calls to 1 |
704 |
|
1756.2.7
by Aaron Bentley
Implement get_text in terms of get_texts |
705 |
def get_text(self, version_id): |
706 |
"""See VersionedFile.get_text"""
|
|
707 |
return self.get_texts([version_id])[0] |
|
708 |
||
1756.2.1
by Aaron Bentley
Implement get_texts |
709 |
def get_texts(self, version_ids): |
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
710 |
return [''.join(l) for l in self.get_line_list(version_ids)] |
711 |
||
712 |
def get_line_list(self, version_ids): |
|
1756.2.1
by Aaron Bentley
Implement get_texts |
713 |
"""Return the texts of listed versions as a list of strings."""
|
1756.3.13
by Aaron Bentley
Refactor get_line_list into _get_content |
714 |
text_map, content_map = self._get_content_maps(version_ids) |
715 |
return [text_map[v] for v in version_ids] |
|
716 |
||
717 |
def _get_content_maps(self, version_ids): |
|
1756.3.19
by Aaron Bentley
Documentation and cleanups |
718 |
"""Produce maps of text and KnitContents
|
719 |
|
|
720 |
:return: (text_map, content_map) where text_map contains the texts for
|
|
721 |
the requested versions and content_map contains the KnitContents.
|
|
1756.3.22
by Aaron Bentley
Tweaks from review |
722 |
Both dicts take version_ids as their keys.
|
1756.3.19
by Aaron Bentley
Documentation and cleanups |
723 |
"""
|
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
724 |
for version_id in version_ids: |
1756.2.1
by Aaron Bentley
Implement get_texts |
725 |
if not self.has_version(version_id): |
726 |
raise RevisionNotPresent(version_id, self.filename) |
|
1756.3.12
by Aaron Bentley
Stuff all text-building data in record_map |
727 |
record_map = self._get_record_map(version_ids) |
1756.2.5
by Aaron Bentley
Reduced read_records calls to 1 |
728 |
|
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
729 |
text_map = {} |
1756.3.7
by Aaron Bentley
Avoid re-parsing texts version components |
730 |
content_map = {} |
1756.3.14
by Aaron Bentley
Handle the intermediate and final representations of no-final-eol texts |
731 |
final_content = {} |
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
732 |
for version_id in version_ids: |
733 |
components = [] |
|
734 |
cursor = version_id |
|
735 |
while cursor is not None: |
|
1756.3.12
by Aaron Bentley
Stuff all text-building data in record_map |
736 |
method, data, digest, next = record_map[cursor] |
1756.3.10
by Aaron Bentley
Optimize selection and retrieval of records |
737 |
components.append((cursor, method, data, digest)) |
738 |
if cursor in content_map: |
|
739 |
break
|
|
740 |
cursor = next |
|
741 |
||
1756.2.1
by Aaron Bentley
Implement get_texts |
742 |
content = None |
1756.2.7
by Aaron Bentley
Implement get_text in terms of get_texts |
743 |
for component_id, method, data, digest in reversed(components): |
1756.3.7
by Aaron Bentley
Avoid re-parsing texts version components |
744 |
if component_id in content_map: |
745 |
content = content_map[component_id] |
|
1756.3.8
by Aaron Bentley
Avoid unused calls, use generators, sets instead of lists |
746 |
else: |
747 |
version_idx = self._index.lookup(component_id) |
|
748 |
if method == 'fulltext': |
|
749 |
assert content is None |
|
750 |
content = self.factory.parse_fulltext(data, version_idx) |
|
751 |
elif method == 'line-delta': |
|
752 |
delta = self.factory.parse_line_delta(data[:], |
|
753 |
version_idx) |
|
754 |
content = content.copy() |
|
755 |
content._lines = self._apply_delta(content._lines, |
|
756 |
delta) |
|
1756.3.14
by Aaron Bentley
Handle the intermediate and final representations of no-final-eol texts |
757 |
content_map[component_id] = content |
1756.2.1
by Aaron Bentley
Implement get_texts |
758 |
|
759 |
if 'no-eol' in self._index.get_options(version_id): |
|
1756.3.14
by Aaron Bentley
Handle the intermediate and final representations of no-final-eol texts |
760 |
content = content.copy() |
1756.2.1
by Aaron Bentley
Implement get_texts |
761 |
line = content._lines[-1][1].rstrip('\n') |
762 |
content._lines[-1] = (content._lines[-1][0], line) |
|
1756.3.14
by Aaron Bentley
Handle the intermediate and final representations of no-final-eol texts |
763 |
final_content[version_id] = content |
1756.2.1
by Aaron Bentley
Implement get_texts |
764 |
|
765 |
# digest here is the digest from the last applied component.
|
|
1756.3.6
by Aaron Bentley
More multi-text extraction |
766 |
text = content.text() |
767 |
if sha_strings(text) != digest: |
|
1756.2.8
by Aaron Bentley
Implement get_line_list, cleanups |
768 |
raise KnitCorrupt(self.filename, |
769 |
'sha-1 does not match %s' % version_id) |
|
1756.2.1
by Aaron Bentley
Implement get_texts |
770 |
|
1756.3.6
by Aaron Bentley
More multi-text extraction |
771 |
text_map[version_id] = text |
1756.3.14
by Aaron Bentley
Handle the intermediate and final representations of no-final-eol texts |
772 |
return text_map, final_content |
1756.2.1
by Aaron Bentley
Implement get_texts |
773 |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
774 |
def iter_lines_added_or_present_in_versions(self, version_ids=None): |
775 |
"""See VersionedFile.iter_lines_added_or_present_in_versions()."""
|
|
776 |
if version_ids is None: |
|
777 |
version_ids = self.versions() |
|
1759.2.2
by Jelmer Vernooij
Revert some of my spelling fixes and fix some typos after review by Aaron. |
778 |
# we don't care about inclusions, the caller cares.
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
779 |
# but we need to setup a list of records to visit.
|
780 |
# we need version_id, position, length
|
|
781 |
version_id_records = [] |
|
1594.3.1
by Robert Collins
Merge transaction finalisation and ensure iter_lines_added_or_present in knits does a old-to-new read in the knit. |
782 |
requested_versions = list(version_ids) |
783 |
# filter for available versions
|
|
784 |
for version_id in requested_versions: |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
785 |
if not self.has_version(version_id): |
786 |
raise RevisionNotPresent(version_id, self.filename) |
|
1594.3.1
by Robert Collins
Merge transaction finalisation and ensure iter_lines_added_or_present in knits does a old-to-new read in the knit. |
787 |
# get a in-component-order queue:
|
788 |
version_ids = [] |
|
789 |
for version_id in self.versions(): |
|
790 |
if version_id in requested_versions: |
|
791 |
version_ids.append(version_id) |
|
792 |
data_pos, length = self._index.get_position(version_id) |
|
793 |
version_id_records.append((version_id, data_pos, length)) |
|
794 |
||
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
795 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
796 |
count = 0 |
|
797 |
total = len(version_id_records) |
|
798 |
try: |
|
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
799 |
pb.update('Walking content.', count, total) |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
800 |
for version_id, data, sha_value in \ |
801 |
self._data.read_records_iter(version_id_records): |
|
802 |
pb.update('Walking content.', count, total) |
|
803 |
method = self._index.get_method(version_id) |
|
804 |
version_idx = self._index.lookup(version_id) |
|
805 |
assert method in ('fulltext', 'line-delta') |
|
806 |
if method == 'fulltext': |
|
807 |
content = self.factory.parse_fulltext(data, version_idx) |
|
808 |
for line in content.text(): |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
809 |
yield line |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
810 |
else: |
811 |
delta = self.factory.parse_line_delta(data, version_idx) |
|
812 |
for start, end, count, lines in delta: |
|
813 |
for origin, line in lines: |
|
814 |
yield line |
|
815 |
count +=1 |
|
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
816 |
pb.update('Walking content.', total, total) |
817 |
pb.finished() |
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
818 |
except: |
819 |
pb.update('Walking content.', total, total) |
|
820 |
pb.finished() |
|
821 |
raise
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
822 |
|
1563.2.18
by Robert Collins
get knit repositories really using knits for text storage. |
823 |
def num_versions(self): |
824 |
"""See VersionedFile.num_versions()."""
|
|
825 |
return self._index.num_versions() |
|
826 |
||
827 |
__len__ = num_versions |
|
828 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
829 |
def annotate_iter(self, version_id): |
830 |
"""See VersionedFile.annotate_iter."""
|
|
831 |
content = self._get_content(version_id) |
|
832 |
for origin, text in content.annotate_iter(): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
833 |
yield origin, text |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
834 |
|
835 |
def get_parents(self, version_id): |
|
836 |
"""See VersionedFile.get_parents."""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
837 |
# perf notes:
|
838 |
# optimism counts!
|
|
839 |
# 52554 calls in 1264 872 internal down from 3674
|
|
840 |
try: |
|
841 |
return self._index.get_parents(version_id) |
|
842 |
except KeyError: |
|
843 |
raise RevisionNotPresent(version_id, self.filename) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
844 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
845 |
def get_parents_with_ghosts(self, version_id): |
846 |
"""See VersionedFile.get_parents."""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
847 |
try: |
848 |
return self._index.get_parents_with_ghosts(version_id) |
|
849 |
except KeyError: |
|
850 |
raise RevisionNotPresent(version_id, self.filename) |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
851 |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
852 |
def get_ancestry(self, versions): |
853 |
"""See VersionedFile.get_ancestry."""
|
|
854 |
if isinstance(versions, basestring): |
|
855 |
versions = [versions] |
|
856 |
if not versions: |
|
857 |
return [] |
|
858 |
self._check_versions_present(versions) |
|
859 |
return self._index.get_ancestry(versions) |
|
860 |
||
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
861 |
def get_ancestry_with_ghosts(self, versions): |
862 |
"""See VersionedFile.get_ancestry_with_ghosts."""
|
|
863 |
if isinstance(versions, basestring): |
|
864 |
versions = [versions] |
|
865 |
if not versions: |
|
866 |
return [] |
|
867 |
self._check_versions_present(versions) |
|
868 |
return self._index.get_ancestry_with_ghosts(versions) |
|
869 |
||
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
870 |
#@deprecated_method(zero_eight)
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
871 |
def walk(self, version_ids): |
872 |
"""See VersionedFile.walk."""
|
|
873 |
# We take the short path here, and extract all relevant texts
|
|
874 |
# and put them in a weave and let that do all the work. Far
|
|
875 |
# from optimal, but is much simpler.
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
876 |
# FIXME RB 20060228 this really is inefficient!
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
877 |
from bzrlib.weave import Weave |
878 |
||
879 |
w = Weave(self.filename) |
|
880 |
ancestry = self.get_ancestry(version_ids) |
|
881 |
sorted_graph = topo_sort(self._index.get_graph()) |
|
882 |
version_list = [vid for vid in sorted_graph if vid in ancestry] |
|
883 |
||
884 |
for version_id in version_list: |
|
885 |
lines = self.get_lines(version_id) |
|
886 |
w.add_lines(version_id, self.get_parents(version_id), lines) |
|
887 |
||
888 |
for lineno, insert_id, dset, line in w.walk(version_ids): |
|
889 |
yield lineno, insert_id, dset, line |
|
890 |
||
1664.2.3
by Aaron Bentley
Add failing test case |
891 |
def plan_merge(self, ver_a, ver_b): |
1664.2.11
by Aaron Bentley
Clarifications from merge review |
892 |
"""See VersionedFile.plan_merge."""
|
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
893 |
ancestors_b = set(self.get_ancestry(ver_b)) |
894 |
def status_a(revision, text): |
|
895 |
if revision in ancestors_b: |
|
896 |
return 'killed-b', text |
|
897 |
else: |
|
898 |
return 'new-a', text |
|
899 |
||
900 |
ancestors_a = set(self.get_ancestry(ver_a)) |
|
901 |
def status_b(revision, text): |
|
902 |
if revision in ancestors_a: |
|
903 |
return 'killed-a', text |
|
904 |
else: |
|
905 |
return 'new-b', text |
|
906 |
||
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
907 |
annotated_a = self.annotate(ver_a) |
908 |
annotated_b = self.annotate(ver_b) |
|
1664.2.11
by Aaron Bentley
Clarifications from merge review |
909 |
plain_a = [t for (a, t) in annotated_a] |
910 |
plain_b = [t for (a, t) in annotated_b] |
|
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
911 |
blocks = KnitSequenceMatcher(None, plain_a, plain_b).get_matching_blocks() |
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
912 |
a_cur = 0 |
913 |
b_cur = 0 |
|
914 |
for ai, bi, l in blocks: |
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
915 |
# process all mismatched sections
|
916 |
# (last mismatched section is handled because blocks always
|
|
917 |
# includes a 0-length last block)
|
|
918 |
for revision, text in annotated_a[a_cur:ai]: |
|
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
919 |
yield status_a(revision, text) |
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
920 |
for revision, text in annotated_b[b_cur:bi]: |
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
921 |
yield status_b(revision, text) |
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
922 |
|
1664.2.11
by Aaron Bentley
Clarifications from merge review |
923 |
# and now the matched section
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
924 |
a_cur = ai + l |
925 |
b_cur = bi + l |
|
926 |
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]): |
|
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
927 |
assert text_a == text_b |
928 |
yield "unchanged", text_a |
|
929 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
930 |
|
931 |
class _KnitComponentFile(object): |
|
932 |
"""One of the files used to implement a knit database"""
|
|
933 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
934 |
def __init__(self, transport, filename, mode, file_mode=None): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
935 |
self._transport = transport |
936 |
self._filename = filename |
|
937 |
self._mode = mode |
|
1666.1.6
by Robert Collins
Make knit the default format. |
938 |
self._file_mode=file_mode |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
939 |
|
940 |
def write_header(self): |
|
1666.1.6
by Robert Collins
Make knit the default format. |
941 |
if self._transport.append(self._filename, StringIO(self.HEADER), |
942 |
mode=self._file_mode): |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
943 |
raise KnitCorrupt(self._filename, 'misaligned after writing header') |
944 |
||
945 |
def check_header(self, fp): |
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
946 |
line = fp.readline() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
947 |
if line != self.HEADER: |
948 |
raise KnitHeaderError(badline=line) |
|
949 |
||
950 |
def commit(self): |
|
951 |
"""Commit is a nop."""
|
|
952 |
||
953 |
def __repr__(self): |
|
954 |
return '%s(%s)' % (self.__class__.__name__, self._filename) |
|
955 |
||
956 |
||
957 |
class _KnitIndex(_KnitComponentFile): |
|
958 |
"""Manages knit index file.
|
|
959 |
||
960 |
The index is already kept in memory and read on startup, to enable
|
|
961 |
fast lookups of revision information. The cursor of the index
|
|
962 |
file is always pointing to the end, making it easy to append
|
|
963 |
entries.
|
|
964 |
||
965 |
_cache is a cache for fast mapping from version id to a Index
|
|
966 |
object.
|
|
967 |
||
968 |
_history is a cache for fast mapping from indexes to version ids.
|
|
969 |
||
970 |
The index data format is dictionary compressed when it comes to
|
|
971 |
parent references; a index entry may only have parents that with a
|
|
972 |
lover index number. As a result, the index is topological sorted.
|
|
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
973 |
|
974 |
Duplicate entries may be written to the index for a single version id
|
|
975 |
if this is done then the latter one completely replaces the former:
|
|
976 |
this allows updates to correct version and parent information.
|
|
977 |
Note that the two entries may share the delta, and that successive
|
|
978 |
annotations and references MUST point to the first entry.
|
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
979 |
|
980 |
The index file on disc contains a header, followed by one line per knit
|
|
981 |
record. The same revision can be present in an index file more than once.
|
|
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
982 |
The first occurrence gets assigned a sequence number starting from 0.
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
983 |
|
984 |
The format of a single line is
|
|
985 |
REVISION_ID FLAGS BYTE_OFFSET LENGTH( PARENT_ID|PARENT_SEQUENCE_ID)* :\n
|
|
986 |
REVISION_ID is a utf8-encoded revision id
|
|
987 |
FLAGS is a comma separated list of flags about the record. Values include
|
|
988 |
no-eol, line-delta, fulltext.
|
|
989 |
BYTE_OFFSET is the ascii representation of the byte offset in the data file
|
|
990 |
that the the compressed data starts at.
|
|
991 |
LENGTH is the ascii representation of the length of the data file.
|
|
992 |
PARENT_ID a utf-8 revision id prefixed by a '.' that is a parent of
|
|
993 |
REVISION_ID.
|
|
994 |
PARENT_SEQUENCE_ID the ascii representation of the sequence number of a
|
|
995 |
revision id already in the knit that is a parent of REVISION_ID.
|
|
996 |
The ' :' marker is the end of record marker.
|
|
997 |
|
|
998 |
partial writes:
|
|
999 |
when a write is interrupted to the index file, it will result in a line that
|
|
1000 |
does not end in ' :'. If the ' :' is not present at the end of a line, or at
|
|
1001 |
the end of the file, then the record that is missing it will be ignored by
|
|
1002 |
the parser.
|
|
1003 |
||
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
1004 |
When writing new records to the index file, the data is preceded by '\n'
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
1005 |
to ensure that records always start on new lines even if the last write was
|
1006 |
interrupted. As a result its normal for the last line in the index to be
|
|
1007 |
missing a trailing newline. One can be added with no harmful effects.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1008 |
"""
|
1009 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
1010 |
HEADER = "# bzr knit index 8\n" |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1011 |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1012 |
# speed of knit parsing went from 280 ms to 280 ms with slots addition.
|
1013 |
# __slots__ = ['_cache', '_history', '_transport', '_filename']
|
|
1014 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1015 |
def _cache_version(self, version_id, options, pos, size, parents): |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1016 |
"""Cache a version record in the history array and index cache.
|
1017 |
|
|
1018 |
This is inlined into __init__ for performance. KEEP IN SYNC.
|
|
1019 |
(It saves 60ms, 25% of the __init__ overhead on local 4000 record
|
|
1020 |
indexes).
|
|
1021 |
"""
|
|
1596.2.14
by Robert Collins
Make knit parsing non quadratic? |
1022 |
# only want the _history index to reference the 1st index entry
|
1023 |
# for version_id
|
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1024 |
if version_id not in self._cache: |
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1025 |
index = len(self._history) |
1596.2.14
by Robert Collins
Make knit parsing non quadratic? |
1026 |
self._history.append(version_id) |
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1027 |
else: |
1028 |
index = self._cache[version_id][5] |
|
1029 |
self._cache[version_id] = (version_id, |
|
1030 |
options, |
|
1031 |
pos, |
|
1032 |
size, |
|
1033 |
parents, |
|
1034 |
index) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1035 |
|
1666.1.6
by Robert Collins
Make knit the default format. |
1036 |
def __init__(self, transport, filename, mode, create=False, file_mode=None): |
1037 |
_KnitComponentFile.__init__(self, transport, filename, mode, file_mode) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1038 |
self._cache = {} |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
1039 |
# position in _history is the 'official' index for a revision
|
1040 |
# but the values may have come from a newer entry.
|
|
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
1041 |
# so - wc -l of a knit index is != the number of unique names
|
1773.4.1
by Martin Pool
Add pyflakes makefile target; fix many warnings |
1042 |
# in the knit.
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1043 |
self._history = [] |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
1044 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1045 |
try: |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
1046 |
count = 0 |
1047 |
total = 1 |
|
1048 |
try: |
|
1049 |
pb.update('read knit index', count, total) |
|
1050 |
fp = self._transport.get(self._filename) |
|
1711.7.25
by John Arbash Meinel
try/finally to close files, _KnitData was keeping a handle to a file it never used again, and using transport.rename() when it wanted transport.move() |
1051 |
try: |
1052 |
self.check_header(fp) |
|
1053 |
# readlines reads the whole file at once:
|
|
1054 |
# bad for transports like http, good for local disk
|
|
1055 |
# we save 60 ms doing this one change (
|
|
1056 |
# from calling readline each time to calling
|
|
1057 |
# readlines once.
|
|
1058 |
# probably what we want for nice behaviour on
|
|
1059 |
# http is a incremental readlines that yields, or
|
|
1060 |
# a check for local vs non local indexes,
|
|
1061 |
for l in fp.readlines(): |
|
1062 |
rec = l.split() |
|
1063 |
if len(rec) < 5 or rec[-1] != ':': |
|
1064 |
# corrupt line.
|
|
1065 |
# FIXME: in the future we should determine if its a
|
|
1066 |
# short write - and ignore it
|
|
1067 |
# or a different failure, and raise. RBC 20060407
|
|
1068 |
continue
|
|
1069 |
count += 1 |
|
1070 |
total += 1 |
|
1071 |
#pb.update('read knit index', count, total)
|
|
1072 |
# See self._parse_parents
|
|
1073 |
parents = [] |
|
1074 |
for value in rec[4:-1]: |
|
1075 |
if '.' == value[0]: |
|
1076 |
# uncompressed reference
|
|
1077 |
parents.append(value[1:]) |
|
1078 |
else: |
|
1079 |
# this is 15/4000ms faster than isinstance,
|
|
1080 |
# (in lsprof)
|
|
1081 |
# this function is called thousands of times a
|
|
1082 |
# second so small variations add up.
|
|
1083 |
assert value.__class__ is str |
|
1084 |
parents.append(self._history[int(value)]) |
|
1085 |
# end self._parse_parents
|
|
1086 |
# self._cache_version(rec[0],
|
|
1087 |
# rec[1].split(','),
|
|
1088 |
# int(rec[2]),
|
|
1089 |
# int(rec[3]),
|
|
1090 |
# parents)
|
|
1091 |
# --- self._cache_version
|
|
1092 |
# only want the _history index to reference the 1st
|
|
1093 |
# index entry for version_id
|
|
1094 |
version_id = rec[0] |
|
1095 |
if version_id not in self._cache: |
|
1096 |
index = len(self._history) |
|
1097 |
self._history.append(version_id) |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1098 |
else: |
1711.7.25
by John Arbash Meinel
try/finally to close files, _KnitData was keeping a handle to a file it never used again, and using transport.rename() when it wanted transport.move() |
1099 |
index = self._cache[version_id][5] |
1100 |
self._cache[version_id] = (version_id, |
|
1101 |
rec[1].split(','), |
|
1102 |
int(rec[2]), |
|
1103 |
int(rec[3]), |
|
1104 |
parents, |
|
1105 |
index) |
|
1106 |
# --- self._cache_version
|
|
1107 |
finally: |
|
1108 |
fp.close() |
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
1109 |
except NoSuchFile, e: |
1110 |
if mode != 'w' or not create: |
|
1111 |
raise
|
|
1112 |
self.write_header() |
|
1113 |
finally: |
|
1114 |
pb.update('read knit index', total, total) |
|
1115 |
pb.finished() |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1116 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1117 |
def _parse_parents(self, compressed_parents): |
1118 |
"""convert a list of string parent values into version ids.
|
|
1119 |
||
1120 |
ints are looked up in the index.
|
|
1121 |
.FOO values are ghosts and converted in to FOO.
|
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1122 |
|
1123 |
NOTE: the function is retained here for clarity, and for possible
|
|
1124 |
use in partial index reads. However bulk processing now has
|
|
1125 |
it inlined in __init__ for inner-loop optimisation.
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1126 |
"""
|
1127 |
result = [] |
|
1128 |
for value in compressed_parents: |
|
1596.2.15
by Robert Collins
Microprofiling of knit parsing. |
1129 |
if value[-1] == '.': |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1130 |
# uncompressed reference
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1131 |
result.append(value[1:]) |
1132 |
else: |
|
1596.2.15
by Robert Collins
Microprofiling of knit parsing. |
1133 |
# this is 15/4000ms faster than isinstance,
|
1134 |
# this function is called thousands of times a
|
|
1135 |
# second so small variations add up.
|
|
1136 |
assert value.__class__ is str |
|
1596.2.11
by Robert Collins
Remove utf8 debugging code |
1137 |
result.append(self._history[int(value)]) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1138 |
return result |
1139 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1140 |
def get_graph(self): |
1141 |
graph = [] |
|
1142 |
for version_id, index in self._cache.iteritems(): |
|
1143 |
graph.append((version_id, index[4])) |
|
1144 |
return graph |
|
1145 |
||
1146 |
def get_ancestry(self, versions): |
|
1147 |
"""See VersionedFile.get_ancestry."""
|
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1148 |
# get a graph of all the mentioned versions:
|
1149 |
graph = {} |
|
1150 |
pending = set(versions) |
|
1151 |
while len(pending): |
|
1152 |
version = pending.pop() |
|
1153 |
parents = self._cache[version][4] |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1154 |
# got the parents ok
|
1155 |
# trim ghosts
|
|
1156 |
parents = [parent for parent in parents if parent in self._cache] |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1157 |
for parent in parents: |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1158 |
# if not completed and not a ghost
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1159 |
if parent not in graph: |
1160 |
pending.add(parent) |
|
1161 |
graph[version] = parents |
|
1162 |
return topo_sort(graph.items()) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1163 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1164 |
def get_ancestry_with_ghosts(self, versions): |
1165 |
"""See VersionedFile.get_ancestry_with_ghosts."""
|
|
1166 |
# get a graph of all the mentioned versions:
|
|
1167 |
graph = {} |
|
1168 |
pending = set(versions) |
|
1169 |
while len(pending): |
|
1170 |
version = pending.pop() |
|
1171 |
try: |
|
1172 |
parents = self._cache[version][4] |
|
1173 |
except KeyError: |
|
1174 |
# ghost, fake it
|
|
1175 |
graph[version] = [] |
|
1176 |
pass
|
|
1177 |
else: |
|
1178 |
# got the parents ok
|
|
1179 |
for parent in parents: |
|
1180 |
if parent not in graph: |
|
1181 |
pending.add(parent) |
|
1182 |
graph[version] = parents |
|
1183 |
return topo_sort(graph.items()) |
|
1184 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1185 |
def num_versions(self): |
1186 |
return len(self._history) |
|
1187 |
||
1188 |
__len__ = num_versions |
|
1189 |
||
1190 |
def get_versions(self): |
|
1191 |
return self._history |
|
1192 |
||
1193 |
def idx_to_name(self, idx): |
|
1194 |
return self._history[idx] |
|
1195 |
||
1196 |
def lookup(self, version_id): |
|
1197 |
assert version_id in self._cache |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1198 |
return self._cache[version_id][5] |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1199 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1200 |
def _version_list_to_index(self, versions): |
1201 |
result_list = [] |
|
1202 |
for version in versions: |
|
1203 |
if version in self._cache: |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1204 |
# -- inlined lookup() --
|
1205 |
result_list.append(str(self._cache[version][5])) |
|
1206 |
# -- end lookup () --
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1207 |
else: |
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
1208 |
result_list.append('.' + version.encode('utf-8')) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1209 |
return ' '.join(result_list) |
1210 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1211 |
def add_version(self, version_id, options, pos, size, parents): |
1212 |
"""Add a version record to the index."""
|
|
1692.4.1
by Robert Collins
Multiple merges: |
1213 |
self.add_versions(((version_id, options, pos, size, parents),)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1214 |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1215 |
def add_versions(self, versions): |
1216 |
"""Add multiple versions to the index.
|
|
1217 |
|
|
1218 |
:param versions: a list of tuples:
|
|
1219 |
(version_id, options, pos, size, parents).
|
|
1220 |
"""
|
|
1221 |
lines = [] |
|
1222 |
for version_id, options, pos, size, parents in versions: |
|
1223 |
line = "\n%s %s %s %s %s :" % (version_id.encode('utf-8'), |
|
1224 |
','.join(options), |
|
1225 |
pos, |
|
1226 |
size, |
|
1227 |
self._version_list_to_index(parents)) |
|
1692.4.1
by Robert Collins
Multiple merges: |
1228 |
assert isinstance(line, str), \ |
1229 |
'content must be utf-8 encoded: %r' % (line,) |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1230 |
lines.append(line) |
1231 |
self._transport.append(self._filename, StringIO(''.join(lines))) |
|
1232 |
# cache after writing, so that a failed write leads to missing cache
|
|
1233 |
# entries not extra ones. XXX TODO: RBC 20060502 in the event of a
|
|
1234 |
# failure, reload the index or flush it or some such, to prevent
|
|
1235 |
# writing records that did complete twice.
|
|
1236 |
for version_id, options, pos, size, parents in versions: |
|
1237 |
self._cache_version(version_id, options, pos, size, parents) |
|
1238 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1239 |
def has_version(self, version_id): |
1240 |
"""True if the version is in the index."""
|
|
1241 |
return self._cache.has_key(version_id) |
|
1242 |
||
1243 |
def get_position(self, version_id): |
|
1244 |
"""Return data position and size of specified version."""
|
|
1245 |
return (self._cache[version_id][2], \ |
|
1246 |
self._cache[version_id][3]) |
|
1247 |
||
1248 |
def get_method(self, version_id): |
|
1249 |
"""Return compression method of specified version."""
|
|
1250 |
options = self._cache[version_id][1] |
|
1251 |
if 'fulltext' in options: |
|
1252 |
return 'fulltext' |
|
1253 |
else: |
|
1254 |
assert 'line-delta' in options |
|
1255 |
return 'line-delta' |
|
1256 |
||
1257 |
def get_options(self, version_id): |
|
1258 |
return self._cache[version_id][1] |
|
1259 |
||
1260 |
def get_parents(self, version_id): |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1261 |
"""Return parents of specified version ignoring ghosts."""
|
1262 |
return [parent for parent in self._cache[version_id][4] |
|
1263 |
if parent in self._cache] |
|
1264 |
||
1265 |
def get_parents_with_ghosts(self, version_id): |
|
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
1266 |
"""Return parents of specified version with ghosts."""
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1267 |
return self._cache[version_id][4] |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1268 |
|
1269 |
def check_versions_present(self, version_ids): |
|
1270 |
"""Check that all specified versions are present."""
|
|
1271 |
version_ids = set(version_ids) |
|
1272 |
for version_id in list(version_ids): |
|
1273 |
if version_id in self._cache: |
|
1274 |
version_ids.remove(version_id) |
|
1275 |
if version_ids: |
|
1276 |
raise RevisionNotPresent(list(version_ids)[0], self.filename) |
|
1277 |
||
1278 |
||
1279 |
class _KnitData(_KnitComponentFile): |
|
1280 |
"""Contents of the knit data file"""
|
|
1281 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
1282 |
HEADER = "# bzr knit data 8\n" |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1283 |
|
1666.1.6
by Robert Collins
Make knit the default format. |
1284 |
def __init__(self, transport, filename, mode, create=False, file_mode=None): |
1563.2.5
by Robert Collins
Remove unused transaction references from knit.py and the versionedfile interface. |
1285 |
_KnitComponentFile.__init__(self, transport, filename, mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1286 |
self._checked = False |
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1287 |
if create: |
1666.1.6
by Robert Collins
Make knit the default format. |
1288 |
self._transport.put(self._filename, StringIO(''), mode=file_mode) |
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1289 |
|
1290 |
def clear_cache(self): |
|
1291 |
"""Clear the record cache."""
|
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1292 |
pass
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1293 |
|
1294 |
def _open_file(self): |
|
1711.7.25
by John Arbash Meinel
try/finally to close files, _KnitData was keeping a handle to a file it never used again, and using transport.rename() when it wanted transport.move() |
1295 |
try: |
1296 |
return self._transport.get(self._filename) |
|
1297 |
except NoSuchFile: |
|
1298 |
pass
|
|
1299 |
return None |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1300 |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1301 |
def _record_to_data(self, version_id, digest, lines): |
1302 |
"""Convert version_id, digest, lines into a raw data block.
|
|
1303 |
|
|
1304 |
:return: (len, a StringIO instance with the raw data ready to read.)
|
|
1305 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1306 |
sio = StringIO() |
1307 |
data_file = GzipFile(None, mode='wb', fileobj=sio) |
|
1596.2.28
by Robert Collins
more knit profile based tuning. |
1308 |
data_file.writelines(chain( |
1309 |
["version %s %d %s\n" % (version_id.encode('utf-8'), |
|
1310 |
len(lines), |
|
1311 |
digest)], |
|
1312 |
lines, |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1313 |
["end %s\n" % version_id.encode('utf-8')])) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1314 |
data_file.close() |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1315 |
length= sio.tell() |
1596.2.28
by Robert Collins
more knit profile based tuning. |
1316 |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1317 |
sio.seek(0) |
1318 |
return length, sio |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1319 |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1320 |
def add_raw_record(self, raw_data): |
1692.4.1
by Robert Collins
Multiple merges: |
1321 |
"""Append a prepared record to the data file.
|
1322 |
|
|
1323 |
:return: the offset in the data file raw_data was written.
|
|
1324 |
"""
|
|
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1325 |
assert isinstance(raw_data, str), 'data must be plain bytes' |
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1326 |
return self._transport.append(self._filename, StringIO(raw_data)) |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1327 |
|
1328 |
def add_record(self, version_id, digest, lines): |
|
1329 |
"""Write new text record to disk. Returns the position in the
|
|
1330 |
file where it was written."""
|
|
1331 |
size, sio = self._record_to_data(version_id, digest, lines) |
|
1332 |
# write to disk
|
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1333 |
start_pos = self._transport.append(self._filename, sio) |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1334 |
return start_pos, size |
1335 |
||
1336 |
def _parse_record_header(self, version_id, raw_data): |
|
1337 |
"""Parse a record header for consistency.
|
|
1338 |
||
1339 |
:return: the header and the decompressor stream.
|
|
1340 |
as (stream, header_record)
|
|
1341 |
"""
|
|
1342 |
df = GzipFile(mode='rb', fileobj=StringIO(raw_data)) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1343 |
rec = df.readline().split() |
1344 |
if len(rec) != 4: |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1345 |
raise KnitCorrupt(self._filename, 'unexpected number of elements in record header') |
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1346 |
if rec[1].decode('utf-8')!= version_id: |
1594.3.3
by Robert Collins
Bugfix error message output in knit error raising. |
1347 |
raise KnitCorrupt(self._filename, |
1594.3.4
by Robert Collins
Change urllib ranges implementation to be one coalesced range per http request. |
1348 |
'unexpected version, wanted %r, got %r' % ( |
1349 |
version_id, rec[1])) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1350 |
return df, rec |
1351 |
||
1352 |
def _parse_record(self, version_id, data): |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1353 |
# profiling notes:
|
1354 |
# 4168 calls in 2880 217 internal
|
|
1355 |
# 4168 calls to _parse_record_header in 2121
|
|
1356 |
# 4168 calls to readlines in 330
|
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1357 |
df, rec = self._parse_record_header(version_id, data) |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1358 |
record_contents = df.readlines() |
1359 |
l = record_contents.pop() |
|
1360 |
assert len(record_contents) == int(rec[2]) |
|
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1361 |
if l.decode('utf-8') != 'end %s\n' % version_id: |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1362 |
raise KnitCorrupt(self._filename, 'unexpected version end line %r, wanted %r' |
1363 |
% (l, version_id)) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1364 |
df.close() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1365 |
return record_contents, rec[3] |
1366 |
||
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1367 |
def read_records_iter_raw(self, records): |
1368 |
"""Read text records from data file and yield raw data.
|
|
1369 |
||
1370 |
This unpacks enough of the text record to validate the id is
|
|
1371 |
as expected but thats all.
|
|
1372 |
||
1373 |
It will actively recompress currently cached records on the
|
|
1374 |
basis that that is cheaper than I/O activity.
|
|
1375 |
"""
|
|
1376 |
# setup an iterator of the external records:
|
|
1377 |
# uses readv so nice and fast we hope.
|
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1378 |
if len(records): |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1379 |
# grab the disk data needed.
|
1380 |
raw_records = self._transport.readv(self._filename, |
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1381 |
[(pos, size) for version_id, pos, size in records]) |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1382 |
|
1383 |
for version_id, pos, size in records: |
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1384 |
pos, data = raw_records.next() |
1385 |
# validate the header
|
|
1386 |
df, rec = self._parse_record_header(version_id, data) |
|
1387 |
df.close() |
|
1388 |
yield version_id, data |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1389 |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1390 |
def read_records_iter(self, records): |
1391 |
"""Read text records from data file and yield result.
|
|
1392 |
||
1393 |
Each passed record is a tuple of (version_id, pos, len) and
|
|
1394 |
will be read in the given order. Yields (version_id,
|
|
1395 |
contents, digest).
|
|
1396 |
"""
|
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1397 |
if len(records) == 0: |
1398 |
return
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1399 |
# profiling notes:
|
1400 |
# 60890 calls for 4168 extractions in 5045, 683 internal.
|
|
1401 |
# 4168 calls to readv in 1411
|
|
1402 |
# 4168 calls to parse_record in 2880
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1403 |
|
1756.3.23
by Aaron Bentley
Remove knit caches |
1404 |
# Get unique records, sorted by position
|
1405 |
needed_records = sorted(set(records), key=operator.itemgetter(1)) |
|
1406 |
||
1407 |
# We take it that the transport optimizes the fetching as good
|
|
1408 |
# as possible (ie, reads continuous ranges.)
|
|
1409 |
response = self._transport.readv(self._filename, |
|
1410 |
[(pos, size) for version_id, pos, size in needed_records]) |
|
1411 |
||
1412 |
record_map = {} |
|
1413 |
for (record_id, pos, size), (pos, data) in \ |
|
1414 |
izip(iter(needed_records), response): |
|
1415 |
content, digest = self._parse_record(record_id, data) |
|
1416 |
record_map[record_id] = (digest, content) |
|
1417 |
||
1418 |
for version_id, pos, size in records: |
|
1419 |
digest, content = record_map[version_id] |
|
1420 |
yield version_id, content, digest |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1421 |
|
1422 |
def read_records(self, records): |
|
1423 |
"""Read records into a dictionary."""
|
|
1424 |
components = {} |
|
1425 |
for record_id, content, digest in self.read_records_iter(records): |
|
1426 |
components[record_id] = (content, digest) |
|
1427 |
return components |
|
1428 |
||
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1429 |
|
1430 |
class InterKnit(InterVersionedFile): |
|
1431 |
"""Optimised code paths for knit to knit operations."""
|
|
1432 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1433 |
_matching_file_from_factory = KnitVersionedFile |
1434 |
_matching_file_to_factory = KnitVersionedFile |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1435 |
|
1436 |
@staticmethod
|
|
1437 |
def is_compatible(source, target): |
|
1438 |
"""Be compatible with knits. """
|
|
1439 |
try: |
|
1440 |
return (isinstance(source, KnitVersionedFile) and |
|
1441 |
isinstance(target, KnitVersionedFile)) |
|
1442 |
except AttributeError: |
|
1443 |
return False |
|
1444 |
||
1563.2.31
by Robert Collins
Convert Knit repositories to use knits. |
1445 |
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False): |
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1446 |
"""See InterVersionedFile.join."""
|
1447 |
assert isinstance(self.source, KnitVersionedFile) |
|
1448 |
assert isinstance(self.target, KnitVersionedFile) |
|
1449 |
||
1684.3.2
by Robert Collins
Factor out version_ids-to-join selection in InterVersionedfile. |
1450 |
version_ids = self._get_source_version_ids(version_ids, ignore_missing) |
1563.2.31
by Robert Collins
Convert Knit repositories to use knits. |
1451 |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1452 |
if not version_ids: |
1453 |
return 0 |
|
1454 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1455 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
1456 |
try: |
|
1457 |
version_ids = list(version_ids) |
|
1458 |
if None in version_ids: |
|
1459 |
version_ids.remove(None) |
|
1460 |
||
1461 |
self.source_ancestry = set(self.source.get_ancestry(version_ids)) |
|
1462 |
this_versions = set(self.target._index.get_versions()) |
|
1463 |
needed_versions = self.source_ancestry - this_versions |
|
1464 |
cross_check_versions = self.source_ancestry.intersection(this_versions) |
|
1465 |
mismatched_versions = set() |
|
1466 |
for version in cross_check_versions: |
|
1467 |
# scan to include needed parents.
|
|
1468 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1469 |
n2 = set(self.source.get_parents_with_ghosts(version)) |
|
1470 |
if n1 != n2: |
|
1471 |
# FIXME TEST this check for cycles being introduced works
|
|
1472 |
# the logic is we have a cycle if in our graph we are an
|
|
1473 |
# ancestor of any of the n2 revisions.
|
|
1474 |
for parent in n2: |
|
1475 |
if parent in n1: |
|
1476 |
# safe
|
|
1477 |
continue
|
|
1478 |
else: |
|
1479 |
parent_ancestors = self.source.get_ancestry(parent) |
|
1480 |
if version in parent_ancestors: |
|
1481 |
raise errors.GraphCycleError([parent, version]) |
|
1482 |
# ensure this parent will be available later.
|
|
1483 |
new_parents = n2.difference(n1) |
|
1484 |
needed_versions.update(new_parents.difference(this_versions)) |
|
1485 |
mismatched_versions.add(version) |
|
1486 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1487 |
if not needed_versions and not mismatched_versions: |
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1488 |
return 0 |
1489 |
full_list = topo_sort(self.source.get_graph()) |
|
1490 |
||
1491 |
version_list = [i for i in full_list if (not self.target.has_version(i) |
|
1492 |
and i in needed_versions)] |
|
1493 |
||
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1494 |
# plan the join:
|
1495 |
copy_queue = [] |
|
1496 |
copy_queue_records = [] |
|
1497 |
copy_set = set() |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1498 |
for version_id in version_list: |
1499 |
options = self.source._index.get_options(version_id) |
|
1500 |
parents = self.source._index.get_parents_with_ghosts(version_id) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1501 |
# check that its will be a consistent copy:
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1502 |
for parent in parents: |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1503 |
# if source has the parent, we must :
|
1504 |
# * already have it or
|
|
1505 |
# * have it scheduled already
|
|
1759.2.2
by Jelmer Vernooij
Revert some of my spelling fixes and fix some typos after review by Aaron. |
1506 |
# otherwise we don't care
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1507 |
assert (self.target.has_version(parent) or |
1508 |
parent in copy_set or |
|
1509 |
not self.source.has_version(parent)) |
|
1510 |
data_pos, data_size = self.source._index.get_position(version_id) |
|
1511 |
copy_queue_records.append((version_id, data_pos, data_size)) |
|
1512 |
copy_queue.append((version_id, options, parents)) |
|
1513 |
copy_set.add(version_id) |
|
1514 |
||
1515 |
# data suck the join:
|
|
1516 |
count = 0 |
|
1517 |
total = len(version_list) |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1518 |
raw_datum = [] |
1519 |
raw_records = [] |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1520 |
for (version_id, raw_data), \ |
1521 |
(version_id2, options, parents) in \ |
|
1522 |
izip(self.source._data.read_records_iter_raw(copy_queue_records), |
|
1523 |
copy_queue): |
|
1524 |
assert version_id == version_id2, 'logic error, inconsistent results' |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1525 |
count = count + 1 |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1526 |
pb.update("Joining knit", count, total) |
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1527 |
raw_records.append((version_id, options, parents, len(raw_data))) |
1528 |
raw_datum.append(raw_data) |
|
1529 |
self.target._add_raw_records(raw_records, ''.join(raw_datum)) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1530 |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1531 |
for version in mismatched_versions: |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1532 |
# FIXME RBC 20060309 is this needed?
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1533 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
1534 |
n2 = set(self.source.get_parents_with_ghosts(version)) |
|
1535 |
# write a combined record to our history preserving the current
|
|
1536 |
# parents as first in the list
|
|
1537 |
new_parents = self.target.get_parents_with_ghosts(version) + list(n2.difference(n1)) |
|
1538 |
self.target.fix_parents(version, new_parents) |
|
1539 |
return count |
|
1540 |
finally: |
|
1541 |
pb.finished() |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1542 |
|
1543 |
||
1544 |
InterVersionedFile.register_optimiser(InterKnit) |
|
1596.2.24
by Robert Collins
Gzipfile was slightly slower than ideal. |
1545 |
|
1546 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1547 |
class WeaveToKnit(InterVersionedFile): |
1548 |
"""Optimised code paths for weave to knit operations."""
|
|
1549 |
||
1550 |
_matching_file_from_factory = bzrlib.weave.WeaveFile |
|
1551 |
_matching_file_to_factory = KnitVersionedFile |
|
1552 |
||
1553 |
@staticmethod
|
|
1554 |
def is_compatible(source, target): |
|
1555 |
"""Be compatible with weaves to knits."""
|
|
1556 |
try: |
|
1557 |
return (isinstance(source, bzrlib.weave.Weave) and |
|
1558 |
isinstance(target, KnitVersionedFile)) |
|
1559 |
except AttributeError: |
|
1560 |
return False |
|
1561 |
||
1562 |
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False): |
|
1563 |
"""See InterVersionedFile.join."""
|
|
1564 |
assert isinstance(self.source, bzrlib.weave.Weave) |
|
1565 |
assert isinstance(self.target, KnitVersionedFile) |
|
1566 |
||
1567 |
version_ids = self._get_source_version_ids(version_ids, ignore_missing) |
|
1568 |
||
1569 |
if not version_ids: |
|
1570 |
return 0 |
|
1571 |
||
1572 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
|
1573 |
try: |
|
1574 |
version_ids = list(version_ids) |
|
1575 |
||
1576 |
self.source_ancestry = set(self.source.get_ancestry(version_ids)) |
|
1577 |
this_versions = set(self.target._index.get_versions()) |
|
1578 |
needed_versions = self.source_ancestry - this_versions |
|
1579 |
cross_check_versions = self.source_ancestry.intersection(this_versions) |
|
1580 |
mismatched_versions = set() |
|
1581 |
for version in cross_check_versions: |
|
1582 |
# scan to include needed parents.
|
|
1583 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1584 |
n2 = set(self.source.get_parents(version)) |
|
1585 |
# if all of n2's parents are in n1, then its fine.
|
|
1586 |
if n2.difference(n1): |
|
1587 |
# FIXME TEST this check for cycles being introduced works
|
|
1588 |
# the logic is we have a cycle if in our graph we are an
|
|
1589 |
# ancestor of any of the n2 revisions.
|
|
1590 |
for parent in n2: |
|
1591 |
if parent in n1: |
|
1592 |
# safe
|
|
1593 |
continue
|
|
1594 |
else: |
|
1595 |
parent_ancestors = self.source.get_ancestry(parent) |
|
1596 |
if version in parent_ancestors: |
|
1597 |
raise errors.GraphCycleError([parent, version]) |
|
1598 |
# ensure this parent will be available later.
|
|
1599 |
new_parents = n2.difference(n1) |
|
1600 |
needed_versions.update(new_parents.difference(this_versions)) |
|
1601 |
mismatched_versions.add(version) |
|
1602 |
||
1603 |
if not needed_versions and not mismatched_versions: |
|
1604 |
return 0 |
|
1605 |
full_list = topo_sort(self.source.get_graph()) |
|
1606 |
||
1607 |
version_list = [i for i in full_list if (not self.target.has_version(i) |
|
1608 |
and i in needed_versions)] |
|
1609 |
||
1610 |
# do the join:
|
|
1611 |
count = 0 |
|
1612 |
total = len(version_list) |
|
1613 |
for version_id in version_list: |
|
1614 |
pb.update("Converting to knit", count, total) |
|
1615 |
parents = self.source.get_parents(version_id) |
|
1616 |
# check that its will be a consistent copy:
|
|
1617 |
for parent in parents: |
|
1618 |
# if source has the parent, we must already have it
|
|
1619 |
assert (self.target.has_version(parent)) |
|
1620 |
self.target.add_lines( |
|
1621 |
version_id, parents, self.source.get_lines(version_id)) |
|
1622 |
count = count + 1 |
|
1623 |
||
1624 |
for version in mismatched_versions: |
|
1625 |
# FIXME RBC 20060309 is this needed?
|
|
1626 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1627 |
n2 = set(self.source.get_parents(version)) |
|
1628 |
# write a combined record to our history preserving the current
|
|
1629 |
# parents as first in the list
|
|
1630 |
new_parents = self.target.get_parents_with_ghosts(version) + list(n2.difference(n1)) |
|
1631 |
self.target.fix_parents(version, new_parents) |
|
1632 |
return count |
|
1633 |
finally: |
|
1634 |
pb.finished() |
|
1635 |
||
1636 |
||
1637 |
InterVersionedFile.register_optimiser(WeaveToKnit) |
|
1638 |
||
1639 |
||
1711.2.11
by John Arbash Meinel
Rename patiencediff.SequenceMatcher => PatienceSequenceMatcher and knit.SequenceMatcher => KnitSequenceMatcher |
1640 |
class KnitSequenceMatcher(difflib.SequenceMatcher): |
1596.2.35
by Robert Collins
Subclass SequenceMatcher to get a slightly faster (in our case) find_longest_match routine. |
1641 |
"""Knit tuned sequence matcher.
|
1642 |
||
1643 |
This is based on profiling of difflib which indicated some improvements
|
|
1644 |
for our usage pattern.
|
|
1645 |
"""
|
|
1646 |
||
1647 |
def find_longest_match(self, alo, ahi, blo, bhi): |
|
1648 |
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
|
|
1649 |
||
1650 |
If isjunk is not defined:
|
|
1651 |
||
1652 |
Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
|
|
1653 |
alo <= i <= i+k <= ahi
|
|
1654 |
blo <= j <= j+k <= bhi
|
|
1655 |
and for all (i',j',k') meeting those conditions,
|
|
1656 |
k >= k'
|
|
1657 |
i <= i'
|
|
1658 |
and if i == i', j <= j'
|
|
1659 |
||
1660 |
In other words, of all maximal matching blocks, return one that
|
|
1661 |
starts earliest in a, and of all those maximal matching blocks that
|
|
1662 |
start earliest in a, return the one that starts earliest in b.
|
|
1663 |
||
1664 |
>>> s = SequenceMatcher(None, " abcd", "abcd abcd")
|
|
1665 |
>>> s.find_longest_match(0, 5, 0, 9)
|
|
1666 |
(0, 4, 5)
|
|
1667 |
||
1668 |
If isjunk is defined, first the longest matching block is
|
|
1669 |
determined as above, but with the additional restriction that no
|
|
1670 |
junk element appears in the block. Then that block is extended as
|
|
1671 |
far as possible by matching (only) junk elements on both sides. So
|
|
1672 |
the resulting block never matches on junk except as identical junk
|
|
1673 |
happens to be adjacent to an "interesting" match.
|
|
1674 |
||
1675 |
Here's the same example as before, but considering blanks to be
|
|
1676 |
junk. That prevents " abcd" from matching the " abcd" at the tail
|
|
1677 |
end of the second sequence directly. Instead only the "abcd" can
|
|
1678 |
match, and matches the leftmost "abcd" in the second sequence:
|
|
1679 |
||
1680 |
>>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd")
|
|
1681 |
>>> s.find_longest_match(0, 5, 0, 9)
|
|
1682 |
(1, 0, 4)
|
|
1683 |
||
1684 |
If no blocks match, return (alo, blo, 0).
|
|
1685 |
||
1686 |
>>> s = SequenceMatcher(None, "ab", "c")
|
|
1687 |
>>> s.find_longest_match(0, 2, 0, 1)
|
|
1688 |
(0, 0, 0)
|
|
1689 |
"""
|
|
1690 |
||
1691 |
# CAUTION: stripping common prefix or suffix would be incorrect.
|
|
1692 |
# E.g.,
|
|
1693 |
# ab
|
|
1694 |
# acab
|
|
1695 |
# Longest matching block is "ab", but if common prefix is
|
|
1696 |
# stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
|
|
1697 |
# strip, so ends up claiming that ab is changed to acab by
|
|
1698 |
# inserting "ca" in the middle. That's minimal but unintuitive:
|
|
1699 |
# "it's obvious" that someone inserted "ac" at the front.
|
|
1700 |
# Windiff ends up at the same place as diff, but by pairing up
|
|
1701 |
# the unique 'b's and then matching the first two 'a's.
|
|
1702 |
||
1703 |
a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.isbjunk |
|
1704 |
besti, bestj, bestsize = alo, blo, 0 |
|
1705 |
# find longest junk-free match
|
|
1706 |
# during an iteration of the loop, j2len[j] = length of longest
|
|
1707 |
# junk-free match ending with a[i-1] and b[j]
|
|
1708 |
j2len = {} |
|
1709 |
# nothing = []
|
|
1710 |
b2jget = b2j.get |
|
1711 |
for i in xrange(alo, ahi): |
|
1712 |
# look at all instances of a[i] in b; note that because
|
|
1713 |
# b2j has no junk keys, the loop is skipped if a[i] is junk
|
|
1714 |
j2lenget = j2len.get |
|
1715 |
newj2len = {} |
|
1716 |
||
1759.2.1
by Jelmer Vernooij
Fix some types (found using aspell). |
1717 |
# changing b2j.get(a[i], nothing) to a try:KeyError pair produced the
|
1596.2.35
by Robert Collins
Subclass SequenceMatcher to get a slightly faster (in our case) find_longest_match routine. |
1718 |
# following improvement
|
1719 |
# 704 0 4650.5320 2620.7410 bzrlib.knit:1336(find_longest_match)
|
|
1720 |
# +326674 0 1655.1210 1655.1210 +<method 'get' of 'dict' objects>
|
|
1721 |
# +76519 0 374.6700 374.6700 +<method 'has_key' of 'dict' objects>
|
|
1722 |
# to
|
|
1723 |
# 704 0 3733.2820 2209.6520 bzrlib.knit:1336(find_longest_match)
|
|
1724 |
# +211400 0 1147.3520 1147.3520 +<method 'get' of 'dict' objects>
|
|
1725 |
# +76519 0 376.2780 376.2780 +<method 'has_key' of 'dict' objects>
|
|
1726 |
||
1727 |
try: |
|
1728 |
js = b2j[a[i]] |
|
1729 |
except KeyError: |
|
1730 |
pass
|
|
1731 |
else: |
|
1732 |
for j in js: |
|
1733 |
# a[i] matches b[j]
|
|
1734 |
if j >= blo: |
|
1735 |
if j >= bhi: |
|
1736 |
break
|
|
1737 |
k = newj2len[j] = 1 + j2lenget(-1 + j, 0) |
|
1738 |
if k > bestsize: |
|
1739 |
besti, bestj, bestsize = 1 + i-k, 1 + j-k, k |
|
1740 |
j2len = newj2len |
|
1741 |
||
1742 |
# Extend the best by non-junk elements on each end. In particular,
|
|
1743 |
# "popular" non-junk elements aren't in b2j, which greatly speeds
|
|
1744 |
# the inner loop above, but also means "the best" match so far
|
|
1745 |
# doesn't contain any junk *or* popular non-junk elements.
|
|
1746 |
while besti > alo and bestj > blo and \ |
|
1747 |
not isbjunk(b[bestj-1]) and \ |
|
1748 |
a[besti-1] == b[bestj-1]: |
|
1749 |
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 |
|
1750 |
while besti+bestsize < ahi and bestj+bestsize < bhi and \ |
|
1751 |
not isbjunk(b[bestj+bestsize]) and \ |
|
1752 |
a[besti+bestsize] == b[bestj+bestsize]: |
|
1753 |
bestsize += 1 |
|
1754 |
||
1755 |
# Now that we have a wholly interesting match (albeit possibly
|
|
1756 |
# empty!), we may as well suck up the matching junk on each
|
|
1757 |
# side of it too. Can't think of a good reason not to, and it
|
|
1758 |
# saves post-processing the (possibly considerable) expense of
|
|
1759 |
# figuring out what to do with it. In the case of an empty
|
|
1760 |
# interesting match, this is clearly the right thing to do,
|
|
1761 |
# because no other kind of match is possible in the regions.
|
|
1762 |
while besti > alo and bestj > blo and \ |
|
1763 |
isbjunk(b[bestj-1]) and \ |
|
1764 |
a[besti-1] == b[bestj-1]: |
|
1765 |
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 |
|
1766 |
while besti+bestsize < ahi and bestj+bestsize < bhi and \ |
|
1767 |
isbjunk(b[bestj+bestsize]) and \ |
|
1768 |
a[besti+bestsize] == b[bestj+bestsize]: |
|
1769 |
bestsize = bestsize + 1 |
|
1770 |
||
1771 |
return besti, bestj, bestsize |
|
1772 |