1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1 |
# Copyright (C) 2005, 2006 by Canonical Ltd
|
2 |
# Written by Martin Pool.
|
|
3 |
# Modified by Johan Rydberg <jrydberg@gnu.org>
|
|
4 |
# Modified by Robert Collins <robert.collins@canonical.com>
|
|
5 |
#
|
|
6 |
# This program is free software; you can redistribute it and/or modify
|
|
7 |
# it under the terms of the GNU General Public License as published by
|
|
8 |
# the Free Software Foundation; either version 2 of the License, or
|
|
9 |
# (at your option) any later version.
|
|
10 |
#
|
|
11 |
# This program is distributed in the hope that it will be useful,
|
|
12 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14 |
# GNU General Public License for more details.
|
|
15 |
#
|
|
16 |
# You should have received a copy of the GNU General Public License
|
|
17 |
# along with this program; if not, write to the Free Software
|
|
18 |
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
19 |
||
20 |
"""Knit versionedfile implementation.
|
|
21 |
||
22 |
A knit is a versioned file implementation that supports efficient append only
|
|
23 |
updates.
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
24 |
|
25 |
Knit file layout:
|
|
26 |
lifeless: the data file is made up of "delta records". each delta record has a delta header
|
|
27 |
that contains; (1) a version id, (2) the size of the delta (in lines), and (3) the digest of
|
|
28 |
the -expanded data- (ie, the delta applied to the parent). the delta also ends with a
|
|
29 |
end-marker; simply "end VERSION"
|
|
30 |
||
31 |
delta can be line or full contents.a
|
|
32 |
... the 8's there are the index number of the annotation.
|
|
33 |
version robertc@robertcollins.net-20051003014215-ee2990904cc4c7ad 7 c7d23b2a5bd6ca00e8e266cec0ec228158ee9f9e
|
|
34 |
59,59,3
|
|
35 |
8
|
|
36 |
8 if ie.executable:
|
|
37 |
8 e.set('executable', 'yes')
|
|
38 |
130,130,2
|
|
39 |
8 if elt.get('executable') == 'yes':
|
|
40 |
8 ie.executable = True
|
|
41 |
end robertc@robertcollins.net-20051003014215-ee2990904cc4c7ad
|
|
42 |
||
43 |
||
44 |
whats in an index:
|
|
45 |
09:33 < jrydberg> lifeless: each index is made up of a tuple of; version id, options, position, size, parents
|
|
46 |
09:33 < jrydberg> lifeless: the parents are currently dictionary compressed
|
|
47 |
09:33 < jrydberg> lifeless: (meaning it currently does not support ghosts)
|
|
48 |
09:33 < lifeless> right
|
|
49 |
09:33 < jrydberg> lifeless: the position and size is the range in the data file
|
|
50 |
||
51 |
||
52 |
so the index sequence is the dictionary compressed sequence number used
|
|
53 |
in the deltas to provide line annotation
|
|
54 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
55 |
"""
|
56 |
||
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
57 |
# TODOS:
|
58 |
# 10:16 < lifeless> make partial index writes safe
|
|
59 |
# 10:16 < lifeless> implement 'knit.check()' like weave.check()
|
|
60 |
# 10:17 < lifeless> record known ghosts so we can detect when they are filled in rather than the current 'reweave
|
|
61 |
# always' approach.
|
|
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
62 |
# move sha1 out of the content so that join is faster at verifying parents
|
63 |
# record content length ?
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
64 |
|
65 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
66 |
from copy import copy |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
67 |
from cStringIO import StringIO |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
68 |
import difflib |
1596.2.28
by Robert Collins
more knit profile based tuning. |
69 |
from itertools import izip, chain |
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
70 |
import os |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
71 |
import sys |
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
72 |
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
73 |
import bzrlib |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
74 |
import bzrlib.errors as errors |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
75 |
from bzrlib.errors import FileExists, NoSuchFile, KnitError, \ |
76 |
InvalidRevisionId, KnitCorrupt, KnitHeaderError, \ |
|
77 |
RevisionNotPresent, RevisionAlreadyPresent |
|
1641.1.1
by Robert Collins
* Various microoptimisations to knit and gzip - reducing function call |
78 |
from bzrlib.tuned_gzip import * |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
79 |
from bzrlib.trace import mutter |
80 |
from bzrlib.osutils import contains_whitespace, contains_linebreaks, \ |
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
81 |
sha_strings
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
82 |
from bzrlib.versionedfile import VersionedFile, InterVersionedFile |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
83 |
from bzrlib.tsort import topo_sort |
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
84 |
import bzrlib.weave |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
85 |
|
86 |
||
87 |
# TODO: Split out code specific to this format into an associated object.
|
|
88 |
||
89 |
# TODO: Can we put in some kind of value to check that the index and data
|
|
90 |
# files belong together?
|
|
91 |
||
92 |
# TODO: accomodate binaries, perhaps by storing a byte count
|
|
93 |
||
94 |
# TODO: function to check whole file
|
|
95 |
||
96 |
# TODO: atomically append data, then measure backwards from the cursor
|
|
97 |
# position after writing to work out where it was located. we may need to
|
|
98 |
# bypass python file buffering.
|
|
99 |
||
100 |
DATA_SUFFIX = '.knit' |
|
101 |
INDEX_SUFFIX = '.kndx' |
|
102 |
||
103 |
||
104 |
class KnitContent(object): |
|
105 |
"""Content of a knit version to which deltas can be applied."""
|
|
106 |
||
107 |
def __init__(self, lines): |
|
108 |
self._lines = lines |
|
109 |
||
110 |
def annotate_iter(self): |
|
111 |
"""Yield tuples of (origin, text) for each content line."""
|
|
112 |
for origin, text in self._lines: |
|
113 |
yield origin, text |
|
114 |
||
115 |
def annotate(self): |
|
116 |
"""Return a list of (origin, text) tuples."""
|
|
117 |
return list(self.annotate_iter()) |
|
118 |
||
119 |
def line_delta_iter(self, new_lines): |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
120 |
"""Generate line-based delta from this content to new_lines."""
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
121 |
new_texts = [text for origin, text in new_lines._lines] |
122 |
old_texts = [text for origin, text in self._lines] |
|
1596.2.35
by Robert Collins
Subclass SequenceMatcher to get a slightly faster (in our case) find_longest_match routine. |
123 |
s = SequenceMatcher(None, old_texts, new_texts) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
124 |
for op in s.get_opcodes(): |
125 |
if op[0] == 'equal': |
|
126 |
continue
|
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
127 |
# ofrom oto length data
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
128 |
yield (op[1], op[2], op[4]-op[3], new_lines._lines[op[3]:op[4]]) |
129 |
||
130 |
def line_delta(self, new_lines): |
|
131 |
return list(self.line_delta_iter(new_lines)) |
|
132 |
||
133 |
def text(self): |
|
134 |
return [text for origin, text in self._lines] |
|
135 |
||
136 |
||
137 |
class _KnitFactory(object): |
|
138 |
"""Base factory for creating content objects."""
|
|
139 |
||
140 |
def make(self, lines, version): |
|
141 |
num_lines = len(lines) |
|
142 |
return KnitContent(zip([version] * num_lines, lines)) |
|
143 |
||
144 |
||
145 |
class KnitAnnotateFactory(_KnitFactory): |
|
146 |
"""Factory for creating annotated Content objects."""
|
|
147 |
||
148 |
annotated = True |
|
149 |
||
150 |
def parse_fulltext(self, content, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
151 |
"""Convert fulltext to internal representation
|
152 |
||
153 |
fulltext content is of the format
|
|
154 |
revid(utf8) plaintext\n
|
|
155 |
internal representation is of the format:
|
|
156 |
(revid, plaintext)
|
|
157 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
158 |
lines = [] |
159 |
for line in content: |
|
160 |
origin, text = line.split(' ', 1) |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
161 |
lines.append((origin.decode('utf-8'), text)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
162 |
return KnitContent(lines) |
163 |
||
164 |
def parse_line_delta_iter(self, lines): |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
165 |
for result_item in self.parse_line_delta[lines]: |
166 |
yield result_item |
|
167 |
||
168 |
def parse_line_delta(self, lines, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
169 |
"""Convert a line based delta into internal representation.
|
170 |
||
171 |
line delta is in the form of:
|
|
172 |
intstart intend intcount
|
|
173 |
1..count lines:
|
|
174 |
revid(utf8) newline\n
|
|
175 |
internal represnetation is
|
|
176 |
(start, end, count, [1..count tuples (revid, newline)])
|
|
177 |
"""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
178 |
result = [] |
179 |
lines = iter(lines) |
|
180 |
next = lines.next |
|
181 |
# walk through the lines parsing.
|
|
182 |
for header in lines: |
|
183 |
start, end, count = [int(n) for n in header.split(',')] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
184 |
contents = [] |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
185 |
remaining = count |
186 |
while remaining: |
|
187 |
origin, text = next().split(' ', 1) |
|
188 |
remaining -= 1 |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
189 |
contents.append((origin.decode('utf-8'), text)) |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
190 |
result.append((start, end, count, contents)) |
191 |
return result |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
192 |
|
193 |
def lower_fulltext(self, content): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
194 |
"""convert a fulltext content record into a serializable form.
|
195 |
||
196 |
see parse_fulltext which this inverts.
|
|
197 |
"""
|
|
198 |
return ['%s %s' % (o.encode('utf-8'), t) for o, t in content._lines] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
199 |
|
200 |
def lower_line_delta(self, delta): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
201 |
"""convert a delta into a serializable form.
|
202 |
||
1628.1.2
by Robert Collins
More knit micro-optimisations. |
203 |
See parse_line_delta which this inverts.
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
204 |
"""
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
205 |
out = [] |
206 |
for start, end, c, lines in delta: |
|
207 |
out.append('%d,%d,%d\n' % (start, end, c)) |
|
208 |
for origin, text in lines: |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
209 |
out.append('%s %s' % (origin.encode('utf-8'), text)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
210 |
return out |
211 |
||
212 |
||
213 |
class KnitPlainFactory(_KnitFactory): |
|
214 |
"""Factory for creating plain Content objects."""
|
|
215 |
||
216 |
annotated = False |
|
217 |
||
218 |
def parse_fulltext(self, content, version): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
219 |
"""This parses an unannotated fulltext.
|
220 |
||
221 |
Note that this is not a noop - the internal representation
|
|
222 |
has (versionid, line) - its just a constant versionid.
|
|
223 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
224 |
return self.make(content, version) |
225 |
||
226 |
def parse_line_delta_iter(self, lines, version): |
|
227 |
while lines: |
|
228 |
header = lines.pop(0) |
|
229 |
start, end, c = [int(n) for n in header.split(',')] |
|
230 |
yield start, end, c, zip([version] * c, lines[:c]) |
|
231 |
del lines[:c] |
|
232 |
||
233 |
def parse_line_delta(self, lines, version): |
|
234 |
return list(self.parse_line_delta_iter(lines, version)) |
|
235 |
||
236 |
def lower_fulltext(self, content): |
|
237 |
return content.text() |
|
238 |
||
239 |
def lower_line_delta(self, delta): |
|
240 |
out = [] |
|
241 |
for start, end, c, lines in delta: |
|
242 |
out.append('%d,%d,%d\n' % (start, end, c)) |
|
243 |
out.extend([text for origin, text in lines]) |
|
244 |
return out |
|
245 |
||
246 |
||
247 |
def make_empty_knit(transport, relpath): |
|
248 |
"""Construct a empty knit at the specified location."""
|
|
1563.2.5
by Robert Collins
Remove unused transaction references from knit.py and the versionedfile interface. |
249 |
k = KnitVersionedFile(transport, relpath, 'w', KnitPlainFactory) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
250 |
k._data._open_file() |
251 |
||
252 |
||
253 |
class KnitVersionedFile(VersionedFile): |
|
254 |
"""Weave-like structure with faster random access.
|
|
255 |
||
256 |
A knit stores a number of texts and a summary of the relationships
|
|
257 |
between them. Texts are identified by a string version-id. Texts
|
|
258 |
are normally stored and retrieved as a series of lines, but can
|
|
259 |
also be passed as single strings.
|
|
260 |
||
261 |
Lines are stored with the trailing newline (if any) included, to
|
|
262 |
avoid special cases for files with no final newline. Lines are
|
|
263 |
composed of 8-bit characters, not unicode. The combination of
|
|
264 |
these approaches should mean any 'binary' file can be safely
|
|
265 |
stored and retrieved.
|
|
266 |
"""
|
|
267 |
||
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
268 |
def __init__(self, relpath, transport, file_mode=None, access_mode=None, factory=None, |
1563.2.25
by Robert Collins
Merge in upstream. |
269 |
basis_knit=None, delta=True, create=False): |
270 |
"""Construct a knit at location specified by relpath.
|
|
271 |
|
|
272 |
:param create: If not True, only open an existing knit.
|
|
273 |
"""
|
|
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
274 |
if access_mode is None: |
275 |
access_mode = 'w' |
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
276 |
super(KnitVersionedFile, self).__init__(access_mode) |
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
277 |
assert access_mode in ('r', 'w'), "invalid mode specified %r" % access_mode |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
278 |
assert not basis_knit or isinstance(basis_knit, KnitVersionedFile), \ |
279 |
type(basis_knit) |
|
280 |
||
281 |
self.transport = transport |
|
282 |
self.filename = relpath |
|
283 |
self.basis_knit = basis_knit |
|
1563.2.16
by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable. |
284 |
self.factory = factory or KnitAnnotateFactory() |
285 |
self.writable = (access_mode == 'w') |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
286 |
self.delta = delta |
287 |
||
288 |
self._index = _KnitIndex(transport, relpath + INDEX_SUFFIX, |
|
1666.1.6
by Robert Collins
Make knit the default format. |
289 |
access_mode, create=create, file_mode=file_mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
290 |
self._data = _KnitData(transport, relpath + DATA_SUFFIX, |
1666.1.6
by Robert Collins
Make knit the default format. |
291 |
access_mode, create=create and not len(self), file_mode=file_mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
292 |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
293 |
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta): |
294 |
"""See VersionedFile._add_delta()."""
|
|
295 |
self._check_add(version_id, []) # should we check the lines ? |
|
296 |
self._check_versions_present(parents) |
|
297 |
present_parents = [] |
|
298 |
ghosts = [] |
|
299 |
parent_texts = {} |
|
300 |
for parent in parents: |
|
301 |
if not self.has_version(parent): |
|
302 |
ghosts.append(parent) |
|
303 |
else: |
|
304 |
present_parents.append(parent) |
|
305 |
||
306 |
if delta_parent is None: |
|
307 |
# reconstitute as full text.
|
|
308 |
assert len(delta) == 1 or len(delta) == 0 |
|
309 |
if len(delta): |
|
310 |
assert delta[0][0] == 0 |
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
311 |
assert delta[0][1] == 0, delta[0][1] |
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
312 |
return super(KnitVersionedFile, self)._add_delta(version_id, |
313 |
parents, |
|
314 |
delta_parent, |
|
315 |
sha1, |
|
316 |
noeol, |
|
317 |
delta) |
|
318 |
||
319 |
digest = sha1 |
|
320 |
||
321 |
options = [] |
|
322 |
if noeol: |
|
323 |
options.append('no-eol') |
|
324 |
||
325 |
if delta_parent is not None: |
|
326 |
# determine the current delta chain length.
|
|
327 |
# To speed the extract of texts the delta chain is limited
|
|
328 |
# to a fixed number of deltas. This should minimize both
|
|
329 |
# I/O and the time spend applying deltas.
|
|
330 |
count = 0 |
|
331 |
delta_parents = [delta_parent] |
|
332 |
while count < 25: |
|
333 |
parent = delta_parents[0] |
|
334 |
method = self._index.get_method(parent) |
|
335 |
if method == 'fulltext': |
|
336 |
break
|
|
337 |
delta_parents = self._index.get_parents(parent) |
|
338 |
count = count + 1 |
|
339 |
if method == 'line-delta': |
|
340 |
# did not find a fulltext in the delta limit.
|
|
341 |
# just do a normal insertion.
|
|
342 |
return super(KnitVersionedFile, self)._add_delta(version_id, |
|
343 |
parents, |
|
344 |
delta_parent, |
|
345 |
sha1, |
|
346 |
noeol, |
|
347 |
delta) |
|
348 |
||
349 |
options.append('line-delta') |
|
350 |
store_lines = self.factory.lower_line_delta(delta) |
|
351 |
||
352 |
where, size = self._data.add_record(version_id, digest, store_lines) |
|
353 |
self._index.add_version(version_id, options, where, size, parents) |
|
354 |
||
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
355 |
def _add_raw_records(self, records, data): |
356 |
"""Add all the records 'records' with data pre-joined in 'data'.
|
|
357 |
||
358 |
:param records: A list of tuples(version_id, options, parents, size).
|
|
359 |
:param data: The data for the records. When it is written, the records
|
|
360 |
are adjusted to have pos pointing into data by the sum of
|
|
361 |
the preceeding records sizes.
|
|
362 |
"""
|
|
363 |
# write all the data
|
|
364 |
pos = self._data.add_raw_record(data) |
|
365 |
index_entries = [] |
|
366 |
for (version_id, options, parents, size) in records: |
|
367 |
index_entries.append((version_id, options, pos, size, parents)) |
|
368 |
pos += size |
|
369 |
self._index.add_versions(index_entries) |
|
370 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
371 |
def clear_cache(self): |
372 |
"""Clear the data cache only."""
|
|
373 |
self._data.clear_cache() |
|
374 |
||
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
375 |
def copy_to(self, name, transport): |
376 |
"""See VersionedFile.copy_to()."""
|
|
377 |
# copy the current index to a temp index to avoid racing with local
|
|
378 |
# writes
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
379 |
transport.put(name + INDEX_SUFFIX + '.tmp', self.transport.get(self._index._filename),) |
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
380 |
# copy the data file
|
381 |
transport.put(name + DATA_SUFFIX, self._data._open_file()) |
|
382 |
# rename the copied index into place
|
|
383 |
transport.rename(name + INDEX_SUFFIX + '.tmp', name + INDEX_SUFFIX) |
|
384 |
||
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
385 |
def create_empty(self, name, transport, mode=None): |
1563.2.25
by Robert Collins
Merge in upstream. |
386 |
return KnitVersionedFile(name, transport, factory=self.factory, delta=self.delta, create=True) |
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
387 |
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
388 |
def _fix_parents(self, version, new_parents): |
1594.2.7
by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc. |
389 |
"""Fix the parents list for version.
|
390 |
|
|
391 |
This is done by appending a new version to the index
|
|
392 |
with identical data except for the parents list.
|
|
393 |
the parents list must be a superset of the current
|
|
394 |
list.
|
|
395 |
"""
|
|
396 |
current_values = self._index._cache[version] |
|
397 |
assert set(current_values[4]).difference(set(new_parents)) == set() |
|
398 |
self._index.add_version(version, |
|
399 |
current_values[1], |
|
400 |
current_values[2], |
|
401 |
current_values[3], |
|
402 |
new_parents) |
|
403 |
||
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
404 |
def get_delta(self, version_id): |
405 |
"""Get a delta for constructing version from some other version."""
|
|
406 |
if not self.has_version(version_id): |
|
407 |
raise RevisionNotPresent(version_id, self.filename) |
|
408 |
||
409 |
parents = self.get_parents(version_id) |
|
410 |
if len(parents): |
|
411 |
parent = parents[0] |
|
412 |
else: |
|
413 |
parent = None |
|
414 |
data_pos, data_size = self._index.get_position(version_id) |
|
415 |
data, sha1 = self._data.read_records(((version_id, data_pos, data_size),))[version_id] |
|
416 |
version_idx = self._index.lookup(version_id) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
417 |
noeol = 'no-eol' in self._index.get_options(version_id) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
418 |
if 'fulltext' == self._index.get_method(version_id): |
419 |
new_content = self.factory.parse_fulltext(data, version_idx) |
|
420 |
if parent is not None: |
|
421 |
reference_content = self._get_content(parent) |
|
422 |
old_texts = reference_content.text() |
|
423 |
else: |
|
424 |
old_texts = [] |
|
425 |
new_texts = new_content.text() |
|
426 |
delta_seq = SequenceMatcher(None, old_texts, new_texts) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
427 |
return parent, sha1, noeol, self._make_line_delta(delta_seq, new_content) |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
428 |
else: |
429 |
delta = self.factory.parse_line_delta(data, version_idx) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
430 |
return parent, sha1, noeol, delta |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
431 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
432 |
def get_graph_with_ghosts(self): |
433 |
"""See VersionedFile.get_graph_with_ghosts()."""
|
|
434 |
graph_items = self._index.get_graph() |
|
435 |
return dict(graph_items) |
|
436 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
437 |
def get_sha1(self, version_id): |
438 |
"""See VersionedFile.get_sha1()."""
|
|
439 |
components = self._get_components(version_id) |
|
440 |
return components[-1][-1][-1] |
|
441 |
||
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
442 |
@staticmethod
|
443 |
def get_suffixes(): |
|
444 |
"""See VersionedFile.get_suffixes()."""
|
|
445 |
return [DATA_SUFFIX, INDEX_SUFFIX] |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
446 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
447 |
def has_ghost(self, version_id): |
448 |
"""True if there is a ghost reference in the file to version_id."""
|
|
449 |
# maybe we have it
|
|
450 |
if self.has_version(version_id): |
|
451 |
return False |
|
452 |
# optimisable if needed by memoising the _ghosts set.
|
|
453 |
items = self._index.get_graph() |
|
454 |
for node, parents in items: |
|
455 |
for parent in parents: |
|
456 |
if parent not in self._index._cache: |
|
457 |
if parent == version_id: |
|
458 |
return True |
|
459 |
return False |
|
460 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
461 |
def versions(self): |
462 |
"""See VersionedFile.versions."""
|
|
463 |
return self._index.get_versions() |
|
464 |
||
465 |
def has_version(self, version_id): |
|
466 |
"""See VersionedFile.has_version."""
|
|
467 |
return self._index.has_version(version_id) |
|
468 |
||
469 |
__contains__ = has_version |
|
470 |
||
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
471 |
def _merge_annotations(self, content, parents, parent_texts={}, |
472 |
delta=None, annotated=None): |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
473 |
"""Merge annotations for content. This is done by comparing
|
1596.2.27
by Robert Collins
Note potential improvements in knit adds. |
474 |
the annotations based on changed to the text.
|
475 |
"""
|
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
476 |
if annotated: |
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
477 |
delta_seq = None |
478 |
for parent_id in parents: |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
479 |
merge_content = self._get_content(parent_id, parent_texts) |
480 |
seq = SequenceMatcher(None, merge_content.text(), content.text()) |
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
481 |
if delta_seq is None: |
482 |
# setup a delta seq to reuse.
|
|
483 |
delta_seq = seq |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
484 |
for i, j, n in seq.get_matching_blocks(): |
485 |
if n == 0: |
|
486 |
continue
|
|
487 |
# this appears to copy (origin, text) pairs across to the new
|
|
488 |
# content for any line that matches the last-checked parent.
|
|
489 |
# FIXME: save the sequence control data for delta compression
|
|
490 |
# against the most relevant parent rather than rediffing.
|
|
491 |
content._lines[j:j+n] = merge_content._lines[i:i+n] |
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
492 |
if delta: |
493 |
if not annotated: |
|
494 |
reference_content = self._get_content(parents[0], parent_texts) |
|
495 |
new_texts = content.text() |
|
496 |
old_texts = reference_content.text() |
|
497 |
delta_seq = SequenceMatcher(None, old_texts, new_texts) |
|
498 |
return self._make_line_delta(delta_seq, content) |
|
499 |
||
500 |
def _make_line_delta(self, delta_seq, new_content): |
|
501 |
"""Generate a line delta from delta_seq and new_content."""
|
|
502 |
diff_hunks = [] |
|
503 |
for op in delta_seq.get_opcodes(): |
|
504 |
if op[0] == 'equal': |
|
505 |
continue
|
|
506 |
diff_hunks.append((op[1], op[2], op[4]-op[3], new_content._lines[op[3]:op[4]])) |
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
507 |
return diff_hunks |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
508 |
|
509 |
def _get_components(self, version_id): |
|
510 |
"""Return a list of (version_id, method, data) tuples that
|
|
511 |
makes up version specified by version_id of the knit.
|
|
512 |
||
513 |
The components should be applied in the order of the returned
|
|
514 |
list.
|
|
515 |
||
516 |
The basis knit will be used to the largest extent possible
|
|
517 |
since it is assumed that accesses to it is faster.
|
|
518 |
"""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
519 |
#profile notes:
|
520 |
# 4168 calls in 14912, 2289 internal
|
|
521 |
# 4168 in 9711 to read_records
|
|
522 |
# 52554 in 1250 to get_parents
|
|
523 |
# 170166 in 865 to list.append
|
|
524 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
525 |
# needed_revisions holds a list of (method, version_id) of
|
526 |
# versions that is needed to be fetched to construct the final
|
|
527 |
# version of the file.
|
|
528 |
#
|
|
529 |
# basis_revisions is a list of versions that needs to be
|
|
530 |
# fetched but exists in the basis knit.
|
|
531 |
||
532 |
basis = self.basis_knit |
|
533 |
needed_versions = [] |
|
534 |
basis_versions = [] |
|
535 |
cursor = version_id |
|
536 |
||
537 |
while 1: |
|
538 |
picked_knit = self |
|
539 |
if basis and basis._index.has_version(cursor): |
|
540 |
picked_knit = basis |
|
541 |
basis_versions.append(cursor) |
|
542 |
method = picked_knit._index.get_method(cursor) |
|
543 |
needed_versions.append((method, cursor)) |
|
544 |
if method == 'fulltext': |
|
545 |
break
|
|
546 |
cursor = picked_knit.get_parents(cursor)[0] |
|
547 |
||
548 |
components = {} |
|
549 |
if basis_versions: |
|
550 |
records = [] |
|
551 |
for comp_id in basis_versions: |
|
552 |
data_pos, data_size = basis._index.get_data_position(comp_id) |
|
553 |
records.append((piece_id, data_pos, data_size)) |
|
554 |
components.update(basis._data.read_records(records)) |
|
555 |
||
556 |
records = [] |
|
557 |
for comp_id in [vid for method, vid in needed_versions |
|
558 |
if vid not in basis_versions]: |
|
559 |
data_pos, data_size = self._index.get_position(comp_id) |
|
560 |
records.append((comp_id, data_pos, data_size)) |
|
561 |
components.update(self._data.read_records(records)) |
|
562 |
||
563 |
# get_data_records returns a mapping with the version id as
|
|
564 |
# index and the value as data. The order the components need
|
|
565 |
# to be applied is held by needed_versions (reversed).
|
|
566 |
out = [] |
|
567 |
for method, comp_id in reversed(needed_versions): |
|
568 |
out.append((comp_id, method, components[comp_id])) |
|
569 |
||
570 |
return out |
|
571 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
572 |
def _get_content(self, version_id, parent_texts={}): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
573 |
"""Returns a content object that makes up the specified
|
574 |
version."""
|
|
575 |
if not self.has_version(version_id): |
|
576 |
raise RevisionNotPresent(version_id, self.filename) |
|
577 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
578 |
cached_version = parent_texts.get(version_id, None) |
579 |
if cached_version is not None: |
|
580 |
return cached_version |
|
581 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
582 |
if self.basis_knit and version_id in self.basis_knit: |
583 |
return self.basis_knit._get_content(version_id) |
|
584 |
||
585 |
content = None |
|
586 |
components = self._get_components(version_id) |
|
587 |
for component_id, method, (data, digest) in components: |
|
588 |
version_idx = self._index.lookup(component_id) |
|
589 |
if method == 'fulltext': |
|
590 |
assert content is None |
|
591 |
content = self.factory.parse_fulltext(data, version_idx) |
|
592 |
elif method == 'line-delta': |
|
593 |
delta = self.factory.parse_line_delta(data, version_idx) |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
594 |
content._lines = self._apply_delta(content._lines, delta) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
595 |
|
596 |
if 'no-eol' in self._index.get_options(version_id): |
|
597 |
line = content._lines[-1][1].rstrip('\n') |
|
598 |
content._lines[-1] = (content._lines[-1][0], line) |
|
599 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
600 |
# digest here is the digest from the last applied component.
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
601 |
if sha_strings(content.text()) != digest: |
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
602 |
import pdb;pdb.set_trace() |
603 |
raise KnitCorrupt(self.filename, 'sha-1 does not match %s' % version_id) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
604 |
|
605 |
return content |
|
606 |
||
607 |
def _check_versions_present(self, version_ids): |
|
608 |
"""Check that all specified versions are present."""
|
|
609 |
version_ids = set(version_ids) |
|
610 |
for r in list(version_ids): |
|
611 |
if self._index.has_version(r): |
|
612 |
version_ids.remove(r) |
|
613 |
if version_ids: |
|
614 |
raise RevisionNotPresent(list(version_ids)[0], self.filename) |
|
615 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
616 |
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts): |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
617 |
"""See VersionedFile.add_lines_with_ghosts()."""
|
618 |
self._check_add(version_id, lines) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
619 |
return self._add(version_id, lines[:], parents, self.delta, parent_texts) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
620 |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
621 |
def _add_lines(self, version_id, parents, lines, parent_texts): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
622 |
"""See VersionedFile.add_lines."""
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
623 |
self._check_add(version_id, lines) |
624 |
self._check_versions_present(parents) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
625 |
return self._add(version_id, lines[:], parents, self.delta, parent_texts) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
626 |
|
627 |
def _check_add(self, version_id, lines): |
|
628 |
"""check that version_id and lines are safe to add."""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
629 |
assert self.writable, "knit is not opened for write" |
630 |
### FIXME escape. RBC 20060228
|
|
631 |
if contains_whitespace(version_id): |
|
632 |
raise InvalidRevisionId(version_id) |
|
633 |
if self.has_version(version_id): |
|
634 |
raise RevisionAlreadyPresent(version_id, self.filename) |
|
1666.1.6
by Robert Collins
Make knit the default format. |
635 |
self._check_lines_not_unicode(lines) |
636 |
self._check_lines_are_lines(lines) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
637 |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
638 |
def _add(self, version_id, lines, parents, delta, parent_texts): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
639 |
"""Add a set of lines on top of version specified by parents.
|
640 |
||
641 |
If delta is true, compress the text as a line-delta against
|
|
642 |
the first parent.
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
643 |
|
644 |
Any versions not present will be converted into ghosts.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
645 |
"""
|
1596.2.28
by Robert Collins
more knit profile based tuning. |
646 |
# 461 0 6546.0390 43.9100 bzrlib.knit:489(_add)
|
647 |
# +400 0 889.4890 418.9790 +bzrlib.knit:192(lower_fulltext)
|
|
648 |
# +461 0 1364.8070 108.8030 +bzrlib.knit:996(add_record)
|
|
649 |
# +461 0 193.3940 41.5720 +bzrlib.knit:898(add_version)
|
|
650 |
# +461 0 134.0590 18.3810 +bzrlib.osutils:361(sha_strings)
|
|
651 |
# +461 0 36.3420 15.4540 +bzrlib.knit:146(make)
|
|
652 |
# +1383 0 8.0370 8.0370 +<len>
|
|
653 |
# +61 0 13.5770 7.9190 +bzrlib.knit:199(lower_line_delta)
|
|
654 |
# +61 0 963.3470 7.8740 +bzrlib.knit:427(_get_content)
|
|
655 |
# +61 0 973.9950 5.2950 +bzrlib.knit:136(line_delta)
|
|
656 |
# +61 0 1918.1800 5.2640 +bzrlib.knit:359(_merge_annotations)
|
|
657 |
||
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
658 |
present_parents = [] |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
659 |
ghosts = [] |
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
660 |
if parent_texts is None: |
661 |
parent_texts = {} |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
662 |
for parent in parents: |
663 |
if not self.has_version(parent): |
|
664 |
ghosts.append(parent) |
|
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
665 |
else: |
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
666 |
present_parents.append(parent) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
667 |
|
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
668 |
if delta and not len(present_parents): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
669 |
delta = False |
670 |
||
671 |
digest = sha_strings(lines) |
|
672 |
options = [] |
|
673 |
if lines: |
|
674 |
if lines[-1][-1] != '\n': |
|
675 |
options.append('no-eol') |
|
676 |
lines[-1] = lines[-1] + '\n' |
|
677 |
||
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
678 |
if len(present_parents) and delta: |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
679 |
# To speed the extract of texts the delta chain is limited
|
680 |
# to a fixed number of deltas. This should minimize both
|
|
681 |
# I/O and the time spend applying deltas.
|
|
682 |
count = 0 |
|
1596.2.10
by Robert Collins
Reviewer feedback on knit branches. |
683 |
delta_parents = present_parents |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
684 |
while count < 25: |
685 |
parent = delta_parents[0] |
|
686 |
method = self._index.get_method(parent) |
|
687 |
if method == 'fulltext': |
|
688 |
break
|
|
689 |
delta_parents = self._index.get_parents(parent) |
|
690 |
count = count + 1 |
|
691 |
if method == 'line-delta': |
|
692 |
delta = False |
|
693 |
||
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
694 |
lines = self.factory.make(lines, version_id) |
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
695 |
if delta or (self.factory.annotated and len(present_parents) > 0): |
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
696 |
# Merge annotations from parent texts if so is needed.
|
1596.2.34
by Robert Collins
Optimise knit add to only diff once per parent, not once per parent + once for the delta generation. |
697 |
delta_hunks = self._merge_annotations(lines, present_parents, parent_texts, |
698 |
delta, self.factory.annotated) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
699 |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
700 |
if delta: |
701 |
options.append('line-delta') |
|
702 |
store_lines = self.factory.lower_line_delta(delta_hunks) |
|
703 |
else: |
|
704 |
options.append('fulltext') |
|
705 |
store_lines = self.factory.lower_fulltext(lines) |
|
706 |
||
707 |
where, size = self._data.add_record(version_id, digest, store_lines) |
|
708 |
self._index.add_version(version_id, options, where, size, parents) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
709 |
return lines |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
710 |
|
1563.2.19
by Robert Collins
stub out a check for knits. |
711 |
def check(self, progress_bar=None): |
712 |
"""See VersionedFile.check()."""
|
|
713 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
714 |
def _clone_text(self, new_version_id, old_version_id, parents): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
715 |
"""See VersionedFile.clone_text()."""
|
716 |
# FIXME RBC 20060228 make fast by only inserting an index with null delta.
|
|
717 |
self.add_lines(new_version_id, parents, self.get_lines(old_version_id)) |
|
718 |
||
719 |
def get_lines(self, version_id): |
|
720 |
"""See VersionedFile.get_lines()."""
|
|
721 |
return self._get_content(version_id).text() |
|
722 |
||
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
723 |
def iter_lines_added_or_present_in_versions(self, version_ids=None): |
724 |
"""See VersionedFile.iter_lines_added_or_present_in_versions()."""
|
|
725 |
if version_ids is None: |
|
726 |
version_ids = self.versions() |
|
727 |
# we dont care about inclusions, the caller cares.
|
|
728 |
# but we need to setup a list of records to visit.
|
|
729 |
# we need version_id, position, length
|
|
730 |
version_id_records = [] |
|
1594.3.1
by Robert Collins
Merge transaction finalisation and ensure iter_lines_added_or_present in knits does a old-to-new read in the knit. |
731 |
requested_versions = list(version_ids) |
732 |
# filter for available versions
|
|
733 |
for version_id in requested_versions: |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
734 |
if not self.has_version(version_id): |
735 |
raise RevisionNotPresent(version_id, self.filename) |
|
1594.3.1
by Robert Collins
Merge transaction finalisation and ensure iter_lines_added_or_present in knits does a old-to-new read in the knit. |
736 |
# get a in-component-order queue:
|
737 |
version_ids = [] |
|
738 |
for version_id in self.versions(): |
|
739 |
if version_id in requested_versions: |
|
740 |
version_ids.append(version_id) |
|
741 |
data_pos, length = self._index.get_position(version_id) |
|
742 |
version_id_records.append((version_id, data_pos, length)) |
|
743 |
||
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
744 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
745 |
count = 0 |
|
746 |
total = len(version_id_records) |
|
747 |
try: |
|
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
748 |
pb.update('Walking content.', count, total) |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
749 |
for version_id, data, sha_value in \ |
750 |
self._data.read_records_iter(version_id_records): |
|
751 |
pb.update('Walking content.', count, total) |
|
752 |
method = self._index.get_method(version_id) |
|
753 |
version_idx = self._index.lookup(version_id) |
|
754 |
assert method in ('fulltext', 'line-delta') |
|
755 |
if method == 'fulltext': |
|
756 |
content = self.factory.parse_fulltext(data, version_idx) |
|
757 |
for line in content.text(): |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
758 |
yield line |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
759 |
else: |
760 |
delta = self.factory.parse_line_delta(data, version_idx) |
|
761 |
for start, end, count, lines in delta: |
|
762 |
for origin, line in lines: |
|
763 |
yield line |
|
764 |
count +=1 |
|
1594.2.19
by Robert Collins
More coalescing tweaks, and knit feedback. |
765 |
pb.update('Walking content.', total, total) |
766 |
pb.finished() |
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
767 |
except: |
768 |
pb.update('Walking content.', total, total) |
|
769 |
pb.finished() |
|
770 |
raise
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
771 |
|
1563.2.18
by Robert Collins
get knit repositories really using knits for text storage. |
772 |
def num_versions(self): |
773 |
"""See VersionedFile.num_versions()."""
|
|
774 |
return self._index.num_versions() |
|
775 |
||
776 |
__len__ = num_versions |
|
777 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
778 |
def annotate_iter(self, version_id): |
779 |
"""See VersionedFile.annotate_iter."""
|
|
780 |
content = self._get_content(version_id) |
|
781 |
for origin, text in content.annotate_iter(): |
|
1596.2.7
by Robert Collins
Remove the requirement for reannotation in knit joins. |
782 |
yield origin, text |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
783 |
|
784 |
def get_parents(self, version_id): |
|
785 |
"""See VersionedFile.get_parents."""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
786 |
# perf notes:
|
787 |
# optimism counts!
|
|
788 |
# 52554 calls in 1264 872 internal down from 3674
|
|
789 |
try: |
|
790 |
return self._index.get_parents(version_id) |
|
791 |
except KeyError: |
|
792 |
raise RevisionNotPresent(version_id, self.filename) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
793 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
794 |
def get_parents_with_ghosts(self, version_id): |
795 |
"""See VersionedFile.get_parents."""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
796 |
try: |
797 |
return self._index.get_parents_with_ghosts(version_id) |
|
798 |
except KeyError: |
|
799 |
raise RevisionNotPresent(version_id, self.filename) |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
800 |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
801 |
def get_ancestry(self, versions): |
802 |
"""See VersionedFile.get_ancestry."""
|
|
803 |
if isinstance(versions, basestring): |
|
804 |
versions = [versions] |
|
805 |
if not versions: |
|
806 |
return [] |
|
807 |
self._check_versions_present(versions) |
|
808 |
return self._index.get_ancestry(versions) |
|
809 |
||
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
810 |
def get_ancestry_with_ghosts(self, versions): |
811 |
"""See VersionedFile.get_ancestry_with_ghosts."""
|
|
812 |
if isinstance(versions, basestring): |
|
813 |
versions = [versions] |
|
814 |
if not versions: |
|
815 |
return [] |
|
816 |
self._check_versions_present(versions) |
|
817 |
return self._index.get_ancestry_with_ghosts(versions) |
|
818 |
||
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
819 |
#@deprecated_method(zero_eight)
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
820 |
def walk(self, version_ids): |
821 |
"""See VersionedFile.walk."""
|
|
822 |
# We take the short path here, and extract all relevant texts
|
|
823 |
# and put them in a weave and let that do all the work. Far
|
|
824 |
# from optimal, but is much simpler.
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
825 |
# FIXME RB 20060228 this really is inefficient!
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
826 |
from bzrlib.weave import Weave |
827 |
||
828 |
w = Weave(self.filename) |
|
829 |
ancestry = self.get_ancestry(version_ids) |
|
830 |
sorted_graph = topo_sort(self._index.get_graph()) |
|
831 |
version_list = [vid for vid in sorted_graph if vid in ancestry] |
|
832 |
||
833 |
for version_id in version_list: |
|
834 |
lines = self.get_lines(version_id) |
|
835 |
w.add_lines(version_id, self.get_parents(version_id), lines) |
|
836 |
||
837 |
for lineno, insert_id, dset, line in w.walk(version_ids): |
|
838 |
yield lineno, insert_id, dset, line |
|
839 |
||
1664.2.3
by Aaron Bentley
Add failing test case |
840 |
def plan_merge(self, ver_a, ver_b): |
1664.2.11
by Aaron Bentley
Clarifications from merge review |
841 |
"""See VersionedFile.plan_merge."""
|
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
842 |
ancestors_b = set(self.get_ancestry(ver_b)) |
843 |
def status_a(revision, text): |
|
844 |
if revision in ancestors_b: |
|
845 |
return 'killed-b', text |
|
846 |
else: |
|
847 |
return 'new-a', text |
|
848 |
||
849 |
ancestors_a = set(self.get_ancestry(ver_a)) |
|
850 |
def status_b(revision, text): |
|
851 |
if revision in ancestors_a: |
|
852 |
return 'killed-a', text |
|
853 |
else: |
|
854 |
return 'new-b', text |
|
855 |
||
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
856 |
annotated_a = self.annotate(ver_a) |
857 |
annotated_b = self.annotate(ver_b) |
|
1664.2.11
by Aaron Bentley
Clarifications from merge review |
858 |
plain_a = [t for (a, t) in annotated_a] |
859 |
plain_b = [t for (a, t) in annotated_b] |
|
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
860 |
blocks = SequenceMatcher(None, plain_a, plain_b).get_matching_blocks() |
861 |
a_cur = 0 |
|
862 |
b_cur = 0 |
|
863 |
for ai, bi, l in blocks: |
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
864 |
# process all mismatched sections
|
865 |
# (last mismatched section is handled because blocks always
|
|
866 |
# includes a 0-length last block)
|
|
867 |
for revision, text in annotated_a[a_cur:ai]: |
|
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
868 |
yield status_a(revision, text) |
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
869 |
for revision, text in annotated_b[b_cur:bi]: |
1664.2.6
by Aaron Bentley
Got plan-merge passing tests |
870 |
yield status_b(revision, text) |
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
871 |
|
1664.2.11
by Aaron Bentley
Clarifications from merge review |
872 |
# and now the matched section
|
1664.2.13
by Aaron Bentley
Knit plan_merge uses slices instead of xenumerate |
873 |
a_cur = ai + l |
874 |
b_cur = bi + l |
|
875 |
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]): |
|
1664.2.4
by Aaron Bentley
Identify unchanged lines correctly |
876 |
assert text_a == text_b |
877 |
yield "unchanged", text_a |
|
878 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
879 |
|
880 |
class _KnitComponentFile(object): |
|
881 |
"""One of the files used to implement a knit database"""
|
|
882 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
883 |
def __init__(self, transport, filename, mode, file_mode=None): |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
884 |
self._transport = transport |
885 |
self._filename = filename |
|
886 |
self._mode = mode |
|
1666.1.6
by Robert Collins
Make knit the default format. |
887 |
self._file_mode=file_mode |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
888 |
|
889 |
def write_header(self): |
|
1666.1.6
by Robert Collins
Make knit the default format. |
890 |
if self._transport.append(self._filename, StringIO(self.HEADER), |
891 |
mode=self._file_mode): |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
892 |
raise KnitCorrupt(self._filename, 'misaligned after writing header') |
893 |
||
894 |
def check_header(self, fp): |
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
895 |
line = fp.readline() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
896 |
if line != self.HEADER: |
897 |
raise KnitHeaderError(badline=line) |
|
898 |
||
899 |
def commit(self): |
|
900 |
"""Commit is a nop."""
|
|
901 |
||
902 |
def __repr__(self): |
|
903 |
return '%s(%s)' % (self.__class__.__name__, self._filename) |
|
904 |
||
905 |
||
906 |
class _KnitIndex(_KnitComponentFile): |
|
907 |
"""Manages knit index file.
|
|
908 |
||
909 |
The index is already kept in memory and read on startup, to enable
|
|
910 |
fast lookups of revision information. The cursor of the index
|
|
911 |
file is always pointing to the end, making it easy to append
|
|
912 |
entries.
|
|
913 |
||
914 |
_cache is a cache for fast mapping from version id to a Index
|
|
915 |
object.
|
|
916 |
||
917 |
_history is a cache for fast mapping from indexes to version ids.
|
|
918 |
||
919 |
The index data format is dictionary compressed when it comes to
|
|
920 |
parent references; a index entry may only have parents that with a
|
|
921 |
lover index number. As a result, the index is topological sorted.
|
|
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
922 |
|
923 |
Duplicate entries may be written to the index for a single version id
|
|
924 |
if this is done then the latter one completely replaces the former:
|
|
925 |
this allows updates to correct version and parent information.
|
|
926 |
Note that the two entries may share the delta, and that successive
|
|
927 |
annotations and references MUST point to the first entry.
|
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
928 |
|
929 |
The index file on disc contains a header, followed by one line per knit
|
|
930 |
record. The same revision can be present in an index file more than once.
|
|
931 |
The first occurence gets assigned a sequence number starting from 0.
|
|
932 |
|
|
933 |
The format of a single line is
|
|
934 |
REVISION_ID FLAGS BYTE_OFFSET LENGTH( PARENT_ID|PARENT_SEQUENCE_ID)* :\n
|
|
935 |
REVISION_ID is a utf8-encoded revision id
|
|
936 |
FLAGS is a comma separated list of flags about the record. Values include
|
|
937 |
no-eol, line-delta, fulltext.
|
|
938 |
BYTE_OFFSET is the ascii representation of the byte offset in the data file
|
|
939 |
that the the compressed data starts at.
|
|
940 |
LENGTH is the ascii representation of the length of the data file.
|
|
941 |
PARENT_ID a utf-8 revision id prefixed by a '.' that is a parent of
|
|
942 |
REVISION_ID.
|
|
943 |
PARENT_SEQUENCE_ID the ascii representation of the sequence number of a
|
|
944 |
revision id already in the knit that is a parent of REVISION_ID.
|
|
945 |
The ' :' marker is the end of record marker.
|
|
946 |
|
|
947 |
partial writes:
|
|
948 |
when a write is interrupted to the index file, it will result in a line that
|
|
949 |
does not end in ' :'. If the ' :' is not present at the end of a line, or at
|
|
950 |
the end of the file, then the record that is missing it will be ignored by
|
|
951 |
the parser.
|
|
952 |
||
953 |
When writing new records to the index file, the data is preceeded by '\n'
|
|
954 |
to ensure that records always start on new lines even if the last write was
|
|
955 |
interrupted. As a result its normal for the last line in the index to be
|
|
956 |
missing a trailing newline. One can be added with no harmful effects.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
957 |
"""
|
958 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
959 |
HEADER = "# bzr knit index 8\n" |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
960 |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
961 |
# speed of knit parsing went from 280 ms to 280 ms with slots addition.
|
962 |
# __slots__ = ['_cache', '_history', '_transport', '_filename']
|
|
963 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
964 |
def _cache_version(self, version_id, options, pos, size, parents): |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
965 |
"""Cache a version record in the history array and index cache.
|
966 |
|
|
967 |
This is inlined into __init__ for performance. KEEP IN SYNC.
|
|
968 |
(It saves 60ms, 25% of the __init__ overhead on local 4000 record
|
|
969 |
indexes).
|
|
970 |
"""
|
|
1596.2.14
by Robert Collins
Make knit parsing non quadratic? |
971 |
# only want the _history index to reference the 1st index entry
|
972 |
# for version_id
|
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
973 |
if version_id not in self._cache: |
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
974 |
index = len(self._history) |
1596.2.14
by Robert Collins
Make knit parsing non quadratic? |
975 |
self._history.append(version_id) |
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
976 |
else: |
977 |
index = self._cache[version_id][5] |
|
978 |
self._cache[version_id] = (version_id, |
|
979 |
options, |
|
980 |
pos, |
|
981 |
size, |
|
982 |
parents, |
|
983 |
index) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
984 |
|
1666.1.6
by Robert Collins
Make knit the default format. |
985 |
def __init__(self, transport, filename, mode, create=False, file_mode=None): |
986 |
_KnitComponentFile.__init__(self, transport, filename, mode, file_mode) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
987 |
self._cache = {} |
1563.2.11
by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis. |
988 |
# position in _history is the 'official' index for a revision
|
989 |
# but the values may have come from a newer entry.
|
|
990 |
# so - wc -l of a knit index is != the number of uniqe names
|
|
991 |
# in the weave.
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
992 |
self._history = [] |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
993 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
994 |
try: |
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
995 |
count = 0 |
996 |
total = 1 |
|
997 |
try: |
|
998 |
pb.update('read knit index', count, total) |
|
999 |
fp = self._transport.get(self._filename) |
|
1000 |
self.check_header(fp) |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1001 |
# readlines reads the whole file at once:
|
1002 |
# bad for transports like http, good for local disk
|
|
1003 |
# we save 60 ms doing this one change (
|
|
1004 |
# from calling readline each time to calling
|
|
1005 |
# readlines once.
|
|
1006 |
# probably what we want for nice behaviour on
|
|
1007 |
# http is a incremental readlines that yields, or
|
|
1008 |
# a check for local vs non local indexes,
|
|
1009 |
for l in fp.readlines(): |
|
1010 |
rec = l.split() |
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
1011 |
if len(rec) < 5 or rec[-1] != ':': |
1012 |
# corrupt line.
|
|
1013 |
# FIXME: in the future we should determine if its a
|
|
1014 |
# short write - and ignore it
|
|
1015 |
# or a different failure, and raise. RBC 20060407
|
|
1016 |
continue
|
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
1017 |
count += 1 |
1018 |
total += 1 |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1019 |
#pb.update('read knit index', count, total)
|
1020 |
# See self._parse_parents
|
|
1021 |
parents = [] |
|
1641.1.2
by Robert Collins
Change knit index files to be robust in the presence of partial writes. |
1022 |
for value in rec[4:-1]: |
1596.2.19
by Robert Collins
Shave 20% off id2path. |
1023 |
if '.' == value[0]: |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1024 |
# uncompressed reference
|
1025 |
parents.append(value[1:]) |
|
1026 |
else: |
|
1027 |
# this is 15/4000ms faster than isinstance,
|
|
1028 |
# (in lsprof)
|
|
1029 |
# this function is called thousands of times a
|
|
1030 |
# second so small variations add up.
|
|
1031 |
assert value.__class__ is str |
|
1032 |
parents.append(self._history[int(value)]) |
|
1033 |
# end self._parse_parents
|
|
1034 |
# self._cache_version(rec[0],
|
|
1035 |
# rec[1].split(','),
|
|
1036 |
# int(rec[2]),
|
|
1037 |
# int(rec[3]),
|
|
1038 |
# parents)
|
|
1039 |
# --- self._cache_version
|
|
1040 |
# only want the _history index to reference the 1st
|
|
1041 |
# index entry for version_id
|
|
1042 |
version_id = rec[0] |
|
1043 |
if version_id not in self._cache: |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1044 |
index = len(self._history) |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1045 |
self._history.append(version_id) |
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1046 |
else: |
1047 |
index = self._cache[version_id][5] |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1048 |
self._cache[version_id] = (version_id, |
1049 |
rec[1].split(','), |
|
1050 |
int(rec[2]), |
|
1051 |
int(rec[3]), |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1052 |
parents, |
1053 |
index) |
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1054 |
# --- self._cache_version
|
1594.2.17
by Robert Collins
Better readv coalescing, now with test, and progress during knit index reading. |
1055 |
except NoSuchFile, e: |
1056 |
if mode != 'w' or not create: |
|
1057 |
raise
|
|
1058 |
self.write_header() |
|
1059 |
finally: |
|
1060 |
pb.update('read knit index', total, total) |
|
1061 |
pb.finished() |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1062 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1063 |
def _parse_parents(self, compressed_parents): |
1064 |
"""convert a list of string parent values into version ids.
|
|
1065 |
||
1066 |
ints are looked up in the index.
|
|
1067 |
.FOO values are ghosts and converted in to FOO.
|
|
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1068 |
|
1069 |
NOTE: the function is retained here for clarity, and for possible
|
|
1070 |
use in partial index reads. However bulk processing now has
|
|
1071 |
it inlined in __init__ for inner-loop optimisation.
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1072 |
"""
|
1073 |
result = [] |
|
1074 |
for value in compressed_parents: |
|
1596.2.15
by Robert Collins
Microprofiling of knit parsing. |
1075 |
if value[-1] == '.': |
1596.2.18
by Robert Collins
More microopimisations on index reading, now down to 16000 records/seconds. |
1076 |
# uncompressed reference
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1077 |
result.append(value[1:]) |
1078 |
else: |
|
1596.2.15
by Robert Collins
Microprofiling of knit parsing. |
1079 |
# this is 15/4000ms faster than isinstance,
|
1080 |
# this function is called thousands of times a
|
|
1081 |
# second so small variations add up.
|
|
1082 |
assert value.__class__ is str |
|
1596.2.11
by Robert Collins
Remove utf8 debugging code |
1083 |
result.append(self._history[int(value)]) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1084 |
return result |
1085 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1086 |
def get_graph(self): |
1087 |
graph = [] |
|
1088 |
for version_id, index in self._cache.iteritems(): |
|
1089 |
graph.append((version_id, index[4])) |
|
1090 |
return graph |
|
1091 |
||
1092 |
def get_ancestry(self, versions): |
|
1093 |
"""See VersionedFile.get_ancestry."""
|
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1094 |
# get a graph of all the mentioned versions:
|
1095 |
graph = {} |
|
1096 |
pending = set(versions) |
|
1097 |
while len(pending): |
|
1098 |
version = pending.pop() |
|
1099 |
parents = self._cache[version][4] |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1100 |
# got the parents ok
|
1101 |
# trim ghosts
|
|
1102 |
parents = [parent for parent in parents if parent in self._cache] |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1103 |
for parent in parents: |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1104 |
# if not completed and not a ghost
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1105 |
if parent not in graph: |
1106 |
pending.add(parent) |
|
1107 |
graph[version] = parents |
|
1108 |
return topo_sort(graph.items()) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1109 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1110 |
def get_ancestry_with_ghosts(self, versions): |
1111 |
"""See VersionedFile.get_ancestry_with_ghosts."""
|
|
1112 |
# get a graph of all the mentioned versions:
|
|
1113 |
graph = {} |
|
1114 |
pending = set(versions) |
|
1115 |
while len(pending): |
|
1116 |
version = pending.pop() |
|
1117 |
try: |
|
1118 |
parents = self._cache[version][4] |
|
1119 |
except KeyError: |
|
1120 |
# ghost, fake it
|
|
1121 |
graph[version] = [] |
|
1122 |
pass
|
|
1123 |
else: |
|
1124 |
# got the parents ok
|
|
1125 |
for parent in parents: |
|
1126 |
if parent not in graph: |
|
1127 |
pending.add(parent) |
|
1128 |
graph[version] = parents |
|
1129 |
return topo_sort(graph.items()) |
|
1130 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1131 |
def num_versions(self): |
1132 |
return len(self._history) |
|
1133 |
||
1134 |
__len__ = num_versions |
|
1135 |
||
1136 |
def get_versions(self): |
|
1137 |
return self._history |
|
1138 |
||
1139 |
def idx_to_name(self, idx): |
|
1140 |
return self._history[idx] |
|
1141 |
||
1142 |
def lookup(self, version_id): |
|
1143 |
assert version_id in self._cache |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1144 |
return self._cache[version_id][5] |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1145 |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1146 |
def _version_list_to_index(self, versions): |
1147 |
result_list = [] |
|
1148 |
for version in versions: |
|
1149 |
if version in self._cache: |
|
1628.1.1
by Robert Collins
Cache the index number of versions in the knit index's self._cache so that |
1150 |
# -- inlined lookup() --
|
1151 |
result_list.append(str(self._cache[version][5])) |
|
1152 |
# -- end lookup () --
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1153 |
else: |
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
1154 |
result_list.append('.' + version.encode('utf-8')) |
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1155 |
return ' '.join(result_list) |
1156 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1157 |
def add_version(self, version_id, options, pos, size, parents): |
1158 |
"""Add a version record to the index."""
|
|
1692.4.1
by Robert Collins
Multiple merges: |
1159 |
self.add_versions(((version_id, options, pos, size, parents),)) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1160 |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1161 |
def add_versions(self, versions): |
1162 |
"""Add multiple versions to the index.
|
|
1163 |
|
|
1164 |
:param versions: a list of tuples:
|
|
1165 |
(version_id, options, pos, size, parents).
|
|
1166 |
"""
|
|
1167 |
lines = [] |
|
1168 |
for version_id, options, pos, size, parents in versions: |
|
1169 |
line = "\n%s %s %s %s %s :" % (version_id.encode('utf-8'), |
|
1170 |
','.join(options), |
|
1171 |
pos, |
|
1172 |
size, |
|
1173 |
self._version_list_to_index(parents)) |
|
1692.4.1
by Robert Collins
Multiple merges: |
1174 |
assert isinstance(line, str), \ |
1175 |
'content must be utf-8 encoded: %r' % (line,) |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1176 |
lines.append(line) |
1177 |
self._transport.append(self._filename, StringIO(''.join(lines))) |
|
1178 |
# cache after writing, so that a failed write leads to missing cache
|
|
1179 |
# entries not extra ones. XXX TODO: RBC 20060502 in the event of a
|
|
1180 |
# failure, reload the index or flush it or some such, to prevent
|
|
1181 |
# writing records that did complete twice.
|
|
1182 |
for version_id, options, pos, size, parents in versions: |
|
1183 |
self._cache_version(version_id, options, pos, size, parents) |
|
1184 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1185 |
def has_version(self, version_id): |
1186 |
"""True if the version is in the index."""
|
|
1187 |
return self._cache.has_key(version_id) |
|
1188 |
||
1189 |
def get_position(self, version_id): |
|
1190 |
"""Return data position and size of specified version."""
|
|
1191 |
return (self._cache[version_id][2], \ |
|
1192 |
self._cache[version_id][3]) |
|
1193 |
||
1194 |
def get_method(self, version_id): |
|
1195 |
"""Return compression method of specified version."""
|
|
1196 |
options = self._cache[version_id][1] |
|
1197 |
if 'fulltext' in options: |
|
1198 |
return 'fulltext' |
|
1199 |
else: |
|
1200 |
assert 'line-delta' in options |
|
1201 |
return 'line-delta' |
|
1202 |
||
1203 |
def get_options(self, version_id): |
|
1204 |
return self._cache[version_id][1] |
|
1205 |
||
1206 |
def get_parents(self, version_id): |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
1207 |
"""Return parents of specified version ignoring ghosts."""
|
1208 |
return [parent for parent in self._cache[version_id][4] |
|
1209 |
if parent in self._cache] |
|
1210 |
||
1211 |
def get_parents_with_ghosts(self, version_id): |
|
1212 |
"""Return parents of specified version wth ghosts."""
|
|
1213 |
return self._cache[version_id][4] |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1214 |
|
1215 |
def check_versions_present(self, version_ids): |
|
1216 |
"""Check that all specified versions are present."""
|
|
1217 |
version_ids = set(version_ids) |
|
1218 |
for version_id in list(version_ids): |
|
1219 |
if version_id in self._cache: |
|
1220 |
version_ids.remove(version_id) |
|
1221 |
if version_ids: |
|
1222 |
raise RevisionNotPresent(list(version_ids)[0], self.filename) |
|
1223 |
||
1224 |
||
1225 |
class _KnitData(_KnitComponentFile): |
|
1226 |
"""Contents of the knit data file"""
|
|
1227 |
||
1666.1.6
by Robert Collins
Make knit the default format. |
1228 |
HEADER = "# bzr knit data 8\n" |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1229 |
|
1666.1.6
by Robert Collins
Make knit the default format. |
1230 |
def __init__(self, transport, filename, mode, create=False, file_mode=None): |
1563.2.5
by Robert Collins
Remove unused transaction references from knit.py and the versionedfile interface. |
1231 |
_KnitComponentFile.__init__(self, transport, filename, mode) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1232 |
self._file = None |
1233 |
self._checked = False |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
1234 |
if create: |
1666.1.6
by Robert Collins
Make knit the default format. |
1235 |
self._transport.put(self._filename, StringIO(''), mode=file_mode) |
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1236 |
self._records = {} |
1237 |
||
1238 |
def clear_cache(self): |
|
1239 |
"""Clear the record cache."""
|
|
1240 |
self._records = {} |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1241 |
|
1242 |
def _open_file(self): |
|
1243 |
if self._file is None: |
|
1244 |
try: |
|
1245 |
self._file = self._transport.get(self._filename) |
|
1246 |
except NoSuchFile: |
|
1247 |
pass
|
|
1248 |
return self._file |
|
1249 |
||
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1250 |
def _record_to_data(self, version_id, digest, lines): |
1251 |
"""Convert version_id, digest, lines into a raw data block.
|
|
1252 |
|
|
1253 |
:return: (len, a StringIO instance with the raw data ready to read.)
|
|
1254 |
"""
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1255 |
sio = StringIO() |
1256 |
data_file = GzipFile(None, mode='wb', fileobj=sio) |
|
1596.2.28
by Robert Collins
more knit profile based tuning. |
1257 |
data_file.writelines(chain( |
1258 |
["version %s %d %s\n" % (version_id.encode('utf-8'), |
|
1259 |
len(lines), |
|
1260 |
digest)], |
|
1261 |
lines, |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1262 |
["end %s\n" % version_id.encode('utf-8')])) |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1263 |
data_file.close() |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1264 |
length= sio.tell() |
1596.2.28
by Robert Collins
more knit profile based tuning. |
1265 |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1266 |
sio.seek(0) |
1267 |
return length, sio |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1268 |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1269 |
def add_raw_record(self, raw_data): |
1692.4.1
by Robert Collins
Multiple merges: |
1270 |
"""Append a prepared record to the data file.
|
1271 |
|
|
1272 |
:return: the offset in the data file raw_data was written.
|
|
1273 |
"""
|
|
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1274 |
assert isinstance(raw_data, str), 'data must be plain bytes' |
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1275 |
return self._transport.append(self._filename, StringIO(raw_data)) |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1276 |
|
1277 |
def add_record(self, version_id, digest, lines): |
|
1278 |
"""Write new text record to disk. Returns the position in the
|
|
1279 |
file where it was written."""
|
|
1280 |
size, sio = self._record_to_data(version_id, digest, lines) |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1281 |
# cache
|
1282 |
self._records[version_id] = (digest, lines) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1283 |
# write to disk
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1284 |
start_pos = self._transport.append(self._filename, sio) |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1285 |
return start_pos, size |
1286 |
||
1287 |
def _parse_record_header(self, version_id, raw_data): |
|
1288 |
"""Parse a record header for consistency.
|
|
1289 |
||
1290 |
:return: the header and the decompressor stream.
|
|
1291 |
as (stream, header_record)
|
|
1292 |
"""
|
|
1293 |
df = GzipFile(mode='rb', fileobj=StringIO(raw_data)) |
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1294 |
rec = df.readline().split() |
1295 |
if len(rec) != 4: |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1296 |
raise KnitCorrupt(self._filename, 'unexpected number of elements in record header') |
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1297 |
if rec[1].decode('utf-8')!= version_id: |
1594.3.3
by Robert Collins
Bugfix error message output in knit error raising. |
1298 |
raise KnitCorrupt(self._filename, |
1594.3.4
by Robert Collins
Change urllib ranges implementation to be one coalesced range per http request. |
1299 |
'unexpected version, wanted %r, got %r' % ( |
1300 |
version_id, rec[1])) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1301 |
return df, rec |
1302 |
||
1303 |
def _parse_record(self, version_id, data): |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1304 |
# profiling notes:
|
1305 |
# 4168 calls in 2880 217 internal
|
|
1306 |
# 4168 calls to _parse_record_header in 2121
|
|
1307 |
# 4168 calls to readlines in 330
|
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1308 |
df, rec = self._parse_record_header(version_id, data) |
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1309 |
record_contents = df.readlines() |
1310 |
l = record_contents.pop() |
|
1311 |
assert len(record_contents) == int(rec[2]) |
|
1596.2.9
by Robert Collins
Utf8 safety in knit indexes. |
1312 |
if l.decode('utf-8') != 'end %s\n' % version_id: |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1313 |
raise KnitCorrupt(self._filename, 'unexpected version end line %r, wanted %r' |
1314 |
% (l, version_id)) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1315 |
df.close() |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1316 |
return record_contents, rec[3] |
1317 |
||
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1318 |
def read_records_iter_raw(self, records): |
1319 |
"""Read text records from data file and yield raw data.
|
|
1320 |
||
1321 |
This unpacks enough of the text record to validate the id is
|
|
1322 |
as expected but thats all.
|
|
1323 |
||
1324 |
It will actively recompress currently cached records on the
|
|
1325 |
basis that that is cheaper than I/O activity.
|
|
1326 |
"""
|
|
1327 |
needed_records = [] |
|
1328 |
for version_id, pos, size in records: |
|
1329 |
if version_id not in self._records: |
|
1330 |
needed_records.append((version_id, pos, size)) |
|
1331 |
||
1332 |
# setup an iterator of the external records:
|
|
1333 |
# uses readv so nice and fast we hope.
|
|
1334 |
if len(needed_records): |
|
1335 |
# grab the disk data needed.
|
|
1336 |
raw_records = self._transport.readv(self._filename, |
|
1337 |
[(pos, size) for version_id, pos, size in needed_records]) |
|
1338 |
||
1339 |
for version_id, pos, size in records: |
|
1340 |
if version_id in self._records: |
|
1341 |
# compress a new version
|
|
1342 |
size, sio = self._record_to_data(version_id, |
|
1343 |
self._records[version_id][0], |
|
1344 |
self._records[version_id][1]) |
|
1345 |
yield version_id, sio.getvalue() |
|
1346 |
else: |
|
1347 |
pos, data = raw_records.next() |
|
1348 |
# validate the header
|
|
1349 |
df, rec = self._parse_record_header(version_id, data) |
|
1350 |
df.close() |
|
1351 |
yield version_id, data |
|
1352 |
||
1353 |
||
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1354 |
def read_records_iter(self, records): |
1355 |
"""Read text records from data file and yield result.
|
|
1356 |
||
1357 |
Each passed record is a tuple of (version_id, pos, len) and
|
|
1358 |
will be read in the given order. Yields (version_id,
|
|
1359 |
contents, digest).
|
|
1360 |
"""
|
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1361 |
# profiling notes:
|
1362 |
# 60890 calls for 4168 extractions in 5045, 683 internal.
|
|
1363 |
# 4168 calls to readv in 1411
|
|
1364 |
# 4168 calls to parse_record in 2880
|
|
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1365 |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1366 |
needed_records = [] |
1367 |
for version_id, pos, size in records: |
|
1368 |
if version_id not in self._records: |
|
1369 |
needed_records.append((version_id, pos, size)) |
|
1370 |
||
1371 |
if len(needed_records): |
|
1372 |
# We take it that the transport optimizes the fetching as good
|
|
1373 |
# as possible (ie, reads continous ranges.)
|
|
1374 |
response = self._transport.readv(self._filename, |
|
1375 |
[(pos, size) for version_id, pos, size in needed_records]) |
|
1376 |
||
1594.3.5
by Robert Collins
Bugfix knit caching implementation. |
1377 |
for (record_id, pos, size), (pos, data) in izip(iter(needed_records), response): |
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1378 |
content, digest = self._parse_record(record_id, data) |
1379 |
self._records[record_id] = (digest, content) |
|
1380 |
||
1381 |
for version_id, pos, size in records: |
|
1628.1.2
by Robert Collins
More knit micro-optimisations. |
1382 |
yield version_id, list(self._records[version_id][1]), self._records[version_id][0] |
1563.2.4
by Robert Collins
First cut at including the knit implementation of versioned_file. |
1383 |
|
1384 |
def read_records(self, records): |
|
1385 |
"""Read records into a dictionary."""
|
|
1386 |
components = {} |
|
1387 |
for record_id, content, digest in self.read_records_iter(records): |
|
1388 |
components[record_id] = (content, digest) |
|
1389 |
return components |
|
1390 |
||
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1391 |
|
1392 |
class InterKnit(InterVersionedFile): |
|
1393 |
"""Optimised code paths for knit to knit operations."""
|
|
1394 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1395 |
_matching_file_from_factory = KnitVersionedFile |
1396 |
_matching_file_to_factory = KnitVersionedFile |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1397 |
|
1398 |
@staticmethod
|
|
1399 |
def is_compatible(source, target): |
|
1400 |
"""Be compatible with knits. """
|
|
1401 |
try: |
|
1402 |
return (isinstance(source, KnitVersionedFile) and |
|
1403 |
isinstance(target, KnitVersionedFile)) |
|
1404 |
except AttributeError: |
|
1405 |
return False |
|
1406 |
||
1563.2.31
by Robert Collins
Convert Knit repositories to use knits. |
1407 |
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False): |
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1408 |
"""See InterVersionedFile.join."""
|
1409 |
assert isinstance(self.source, KnitVersionedFile) |
|
1410 |
assert isinstance(self.target, KnitVersionedFile) |
|
1411 |
||
1684.3.2
by Robert Collins
Factor out version_ids-to-join selection in InterVersionedfile. |
1412 |
version_ids = self._get_source_version_ids(version_ids, ignore_missing) |
1563.2.31
by Robert Collins
Convert Knit repositories to use knits. |
1413 |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1414 |
if not version_ids: |
1415 |
return 0 |
|
1416 |
||
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1417 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
1418 |
try: |
|
1419 |
version_ids = list(version_ids) |
|
1420 |
if None in version_ids: |
|
1421 |
version_ids.remove(None) |
|
1422 |
||
1423 |
self.source_ancestry = set(self.source.get_ancestry(version_ids)) |
|
1424 |
this_versions = set(self.target._index.get_versions()) |
|
1425 |
needed_versions = self.source_ancestry - this_versions |
|
1426 |
cross_check_versions = self.source_ancestry.intersection(this_versions) |
|
1427 |
mismatched_versions = set() |
|
1428 |
for version in cross_check_versions: |
|
1429 |
# scan to include needed parents.
|
|
1430 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1431 |
n2 = set(self.source.get_parents_with_ghosts(version)) |
|
1432 |
if n1 != n2: |
|
1433 |
# FIXME TEST this check for cycles being introduced works
|
|
1434 |
# the logic is we have a cycle if in our graph we are an
|
|
1435 |
# ancestor of any of the n2 revisions.
|
|
1436 |
for parent in n2: |
|
1437 |
if parent in n1: |
|
1438 |
# safe
|
|
1439 |
continue
|
|
1440 |
else: |
|
1441 |
parent_ancestors = self.source.get_ancestry(parent) |
|
1442 |
if version in parent_ancestors: |
|
1443 |
raise errors.GraphCycleError([parent, version]) |
|
1444 |
# ensure this parent will be available later.
|
|
1445 |
new_parents = n2.difference(n1) |
|
1446 |
needed_versions.update(new_parents.difference(this_versions)) |
|
1447 |
mismatched_versions.add(version) |
|
1448 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1449 |
if not needed_versions and not mismatched_versions: |
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1450 |
return 0 |
1451 |
full_list = topo_sort(self.source.get_graph()) |
|
1452 |
||
1453 |
version_list = [i for i in full_list if (not self.target.has_version(i) |
|
1454 |
and i in needed_versions)] |
|
1455 |
||
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1456 |
# plan the join:
|
1457 |
copy_queue = [] |
|
1458 |
copy_queue_records = [] |
|
1459 |
copy_set = set() |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1460 |
for version_id in version_list: |
1461 |
options = self.source._index.get_options(version_id) |
|
1462 |
parents = self.source._index.get_parents_with_ghosts(version_id) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1463 |
# check that its will be a consistent copy:
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1464 |
for parent in parents: |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1465 |
# if source has the parent, we must :
|
1466 |
# * already have it or
|
|
1467 |
# * have it scheduled already
|
|
1468 |
# otherwise we dont care
|
|
1469 |
assert (self.target.has_version(parent) or |
|
1470 |
parent in copy_set or |
|
1471 |
not self.source.has_version(parent)) |
|
1472 |
data_pos, data_size = self.source._index.get_position(version_id) |
|
1473 |
copy_queue_records.append((version_id, data_pos, data_size)) |
|
1474 |
copy_queue.append((version_id, options, parents)) |
|
1475 |
copy_set.add(version_id) |
|
1476 |
||
1477 |
# data suck the join:
|
|
1478 |
count = 0 |
|
1479 |
total = len(version_list) |
|
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1480 |
raw_datum = [] |
1481 |
raw_records = [] |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1482 |
for (version_id, raw_data), \ |
1483 |
(version_id2, options, parents) in \ |
|
1484 |
izip(self.source._data.read_records_iter_raw(copy_queue_records), |
|
1485 |
copy_queue): |
|
1486 |
assert version_id == version_id2, 'logic error, inconsistent results' |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1487 |
count = count + 1 |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1488 |
pb.update("Joining knit", count, total) |
1692.2.1
by Robert Collins
Fix knit based push to only perform 2 appends to the target, rather that 2*new-versions. |
1489 |
raw_records.append((version_id, options, parents, len(raw_data))) |
1490 |
raw_datum.append(raw_data) |
|
1491 |
self.target._add_raw_records(raw_records, ''.join(raw_datum)) |
|
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1492 |
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1493 |
for version in mismatched_versions: |
1596.2.8
by Robert Collins
Join knits with the original gzipped data avoiding recompression. |
1494 |
# FIXME RBC 20060309 is this needed?
|
1594.2.24
by Robert Collins
Make use of the transaction finalisation warning support to implement in-knit caching. |
1495 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
1496 |
n2 = set(self.source.get_parents_with_ghosts(version)) |
|
1497 |
# write a combined record to our history preserving the current
|
|
1498 |
# parents as first in the list
|
|
1499 |
new_parents = self.target.get_parents_with_ghosts(version) + list(n2.difference(n1)) |
|
1500 |
self.target.fix_parents(version, new_parents) |
|
1501 |
return count |
|
1502 |
finally: |
|
1503 |
pb.finished() |
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
1504 |
|
1505 |
||
1506 |
InterVersionedFile.register_optimiser(InterKnit) |
|
1596.2.24
by Robert Collins
Gzipfile was slightly slower than ideal. |
1507 |
|
1508 |
||
1684.3.3
by Robert Collins
Add a special cased weaves to knit converter. |
1509 |
class WeaveToKnit(InterVersionedFile): |
1510 |
"""Optimised code paths for weave to knit operations."""
|
|
1511 |
||
1512 |
_matching_file_from_factory = bzrlib.weave.WeaveFile |
|
1513 |
_matching_file_to_factory = KnitVersionedFile |
|
1514 |
||
1515 |
@staticmethod
|
|
1516 |
def is_compatible(source, target): |
|
1517 |
"""Be compatible with weaves to knits."""
|
|
1518 |
try: |
|
1519 |
return (isinstance(source, bzrlib.weave.Weave) and |
|
1520 |
isinstance(target, KnitVersionedFile)) |
|
1521 |
except AttributeError: |
|
1522 |
return False |
|
1523 |
||
1524 |
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False): |
|
1525 |
"""See InterVersionedFile.join."""
|
|
1526 |
assert isinstance(self.source, bzrlib.weave.Weave) |
|
1527 |
assert isinstance(self.target, KnitVersionedFile) |
|
1528 |
||
1529 |
version_ids = self._get_source_version_ids(version_ids, ignore_missing) |
|
1530 |
||
1531 |
if not version_ids: |
|
1532 |
return 0 |
|
1533 |
||
1534 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
|
1535 |
try: |
|
1536 |
version_ids = list(version_ids) |
|
1537 |
||
1538 |
self.source_ancestry = set(self.source.get_ancestry(version_ids)) |
|
1539 |
this_versions = set(self.target._index.get_versions()) |
|
1540 |
needed_versions = self.source_ancestry - this_versions |
|
1541 |
cross_check_versions = self.source_ancestry.intersection(this_versions) |
|
1542 |
mismatched_versions = set() |
|
1543 |
for version in cross_check_versions: |
|
1544 |
# scan to include needed parents.
|
|
1545 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1546 |
n2 = set(self.source.get_parents(version)) |
|
1547 |
# if all of n2's parents are in n1, then its fine.
|
|
1548 |
if n2.difference(n1): |
|
1549 |
# FIXME TEST this check for cycles being introduced works
|
|
1550 |
# the logic is we have a cycle if in our graph we are an
|
|
1551 |
# ancestor of any of the n2 revisions.
|
|
1552 |
for parent in n2: |
|
1553 |
if parent in n1: |
|
1554 |
# safe
|
|
1555 |
continue
|
|
1556 |
else: |
|
1557 |
parent_ancestors = self.source.get_ancestry(parent) |
|
1558 |
if version in parent_ancestors: |
|
1559 |
raise errors.GraphCycleError([parent, version]) |
|
1560 |
# ensure this parent will be available later.
|
|
1561 |
new_parents = n2.difference(n1) |
|
1562 |
needed_versions.update(new_parents.difference(this_versions)) |
|
1563 |
mismatched_versions.add(version) |
|
1564 |
||
1565 |
if not needed_versions and not mismatched_versions: |
|
1566 |
return 0 |
|
1567 |
full_list = topo_sort(self.source.get_graph()) |
|
1568 |
||
1569 |
version_list = [i for i in full_list if (not self.target.has_version(i) |
|
1570 |
and i in needed_versions)] |
|
1571 |
||
1572 |
# do the join:
|
|
1573 |
count = 0 |
|
1574 |
total = len(version_list) |
|
1575 |
for version_id in version_list: |
|
1576 |
pb.update("Converting to knit", count, total) |
|
1577 |
parents = self.source.get_parents(version_id) |
|
1578 |
# check that its will be a consistent copy:
|
|
1579 |
for parent in parents: |
|
1580 |
# if source has the parent, we must already have it
|
|
1581 |
assert (self.target.has_version(parent)) |
|
1582 |
self.target.add_lines( |
|
1583 |
version_id, parents, self.source.get_lines(version_id)) |
|
1584 |
count = count + 1 |
|
1585 |
||
1586 |
for version in mismatched_versions: |
|
1587 |
# FIXME RBC 20060309 is this needed?
|
|
1588 |
n1 = set(self.target.get_parents_with_ghosts(version)) |
|
1589 |
n2 = set(self.source.get_parents(version)) |
|
1590 |
# write a combined record to our history preserving the current
|
|
1591 |
# parents as first in the list
|
|
1592 |
new_parents = self.target.get_parents_with_ghosts(version) + list(n2.difference(n1)) |
|
1593 |
self.target.fix_parents(version, new_parents) |
|
1594 |
return count |
|
1595 |
finally: |
|
1596 |
pb.finished() |
|
1597 |
||
1598 |
||
1599 |
InterVersionedFile.register_optimiser(WeaveToKnit) |
|
1600 |
||
1601 |
||
1596.2.35
by Robert Collins
Subclass SequenceMatcher to get a slightly faster (in our case) find_longest_match routine. |
1602 |
class SequenceMatcher(difflib.SequenceMatcher): |
1603 |
"""Knit tuned sequence matcher.
|
|
1604 |
||
1605 |
This is based on profiling of difflib which indicated some improvements
|
|
1606 |
for our usage pattern.
|
|
1607 |
"""
|
|
1608 |
||
1609 |
def find_longest_match(self, alo, ahi, blo, bhi): |
|
1610 |
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
|
|
1611 |
||
1612 |
If isjunk is not defined:
|
|
1613 |
||
1614 |
Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
|
|
1615 |
alo <= i <= i+k <= ahi
|
|
1616 |
blo <= j <= j+k <= bhi
|
|
1617 |
and for all (i',j',k') meeting those conditions,
|
|
1618 |
k >= k'
|
|
1619 |
i <= i'
|
|
1620 |
and if i == i', j <= j'
|
|
1621 |
||
1622 |
In other words, of all maximal matching blocks, return one that
|
|
1623 |
starts earliest in a, and of all those maximal matching blocks that
|
|
1624 |
start earliest in a, return the one that starts earliest in b.
|
|
1625 |
||
1626 |
>>> s = SequenceMatcher(None, " abcd", "abcd abcd")
|
|
1627 |
>>> s.find_longest_match(0, 5, 0, 9)
|
|
1628 |
(0, 4, 5)
|
|
1629 |
||
1630 |
If isjunk is defined, first the longest matching block is
|
|
1631 |
determined as above, but with the additional restriction that no
|
|
1632 |
junk element appears in the block. Then that block is extended as
|
|
1633 |
far as possible by matching (only) junk elements on both sides. So
|
|
1634 |
the resulting block never matches on junk except as identical junk
|
|
1635 |
happens to be adjacent to an "interesting" match.
|
|
1636 |
||
1637 |
Here's the same example as before, but considering blanks to be
|
|
1638 |
junk. That prevents " abcd" from matching the " abcd" at the tail
|
|
1639 |
end of the second sequence directly. Instead only the "abcd" can
|
|
1640 |
match, and matches the leftmost "abcd" in the second sequence:
|
|
1641 |
||
1642 |
>>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd")
|
|
1643 |
>>> s.find_longest_match(0, 5, 0, 9)
|
|
1644 |
(1, 0, 4)
|
|
1645 |
||
1646 |
If no blocks match, return (alo, blo, 0).
|
|
1647 |
||
1648 |
>>> s = SequenceMatcher(None, "ab", "c")
|
|
1649 |
>>> s.find_longest_match(0, 2, 0, 1)
|
|
1650 |
(0, 0, 0)
|
|
1651 |
"""
|
|
1652 |
||
1653 |
# CAUTION: stripping common prefix or suffix would be incorrect.
|
|
1654 |
# E.g.,
|
|
1655 |
# ab
|
|
1656 |
# acab
|
|
1657 |
# Longest matching block is "ab", but if common prefix is
|
|
1658 |
# stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
|
|
1659 |
# strip, so ends up claiming that ab is changed to acab by
|
|
1660 |
# inserting "ca" in the middle. That's minimal but unintuitive:
|
|
1661 |
# "it's obvious" that someone inserted "ac" at the front.
|
|
1662 |
# Windiff ends up at the same place as diff, but by pairing up
|
|
1663 |
# the unique 'b's and then matching the first two 'a's.
|
|
1664 |
||
1665 |
a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.isbjunk |
|
1666 |
besti, bestj, bestsize = alo, blo, 0 |
|
1667 |
# find longest junk-free match
|
|
1668 |
# during an iteration of the loop, j2len[j] = length of longest
|
|
1669 |
# junk-free match ending with a[i-1] and b[j]
|
|
1670 |
j2len = {} |
|
1671 |
# nothing = []
|
|
1672 |
b2jget = b2j.get |
|
1673 |
for i in xrange(alo, ahi): |
|
1674 |
# look at all instances of a[i] in b; note that because
|
|
1675 |
# b2j has no junk keys, the loop is skipped if a[i] is junk
|
|
1676 |
j2lenget = j2len.get |
|
1677 |
newj2len = {} |
|
1678 |
||
1679 |
# changing b2j.get(a[i], nothing) to a try:Keyerror pair produced the
|
|
1680 |
# following improvement
|
|
1681 |
# 704 0 4650.5320 2620.7410 bzrlib.knit:1336(find_longest_match)
|
|
1682 |
# +326674 0 1655.1210 1655.1210 +<method 'get' of 'dict' objects>
|
|
1683 |
# +76519 0 374.6700 374.6700 +<method 'has_key' of 'dict' objects>
|
|
1684 |
# to
|
|
1685 |
# 704 0 3733.2820 2209.6520 bzrlib.knit:1336(find_longest_match)
|
|
1686 |
# +211400 0 1147.3520 1147.3520 +<method 'get' of 'dict' objects>
|
|
1687 |
# +76519 0 376.2780 376.2780 +<method 'has_key' of 'dict' objects>
|
|
1688 |
||
1689 |
try: |
|
1690 |
js = b2j[a[i]] |
|
1691 |
except KeyError: |
|
1692 |
pass
|
|
1693 |
else: |
|
1694 |
for j in js: |
|
1695 |
# a[i] matches b[j]
|
|
1696 |
if j >= blo: |
|
1697 |
if j >= bhi: |
|
1698 |
break
|
|
1699 |
k = newj2len[j] = 1 + j2lenget(-1 + j, 0) |
|
1700 |
if k > bestsize: |
|
1701 |
besti, bestj, bestsize = 1 + i-k, 1 + j-k, k |
|
1702 |
j2len = newj2len |
|
1703 |
||
1704 |
# Extend the best by non-junk elements on each end. In particular,
|
|
1705 |
# "popular" non-junk elements aren't in b2j, which greatly speeds
|
|
1706 |
# the inner loop above, but also means "the best" match so far
|
|
1707 |
# doesn't contain any junk *or* popular non-junk elements.
|
|
1708 |
while besti > alo and bestj > blo and \ |
|
1709 |
not isbjunk(b[bestj-1]) and \ |
|
1710 |
a[besti-1] == b[bestj-1]: |
|
1711 |
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 |
|
1712 |
while besti+bestsize < ahi and bestj+bestsize < bhi and \ |
|
1713 |
not isbjunk(b[bestj+bestsize]) and \ |
|
1714 |
a[besti+bestsize] == b[bestj+bestsize]: |
|
1715 |
bestsize += 1 |
|
1716 |
||
1717 |
# Now that we have a wholly interesting match (albeit possibly
|
|
1718 |
# empty!), we may as well suck up the matching junk on each
|
|
1719 |
# side of it too. Can't think of a good reason not to, and it
|
|
1720 |
# saves post-processing the (possibly considerable) expense of
|
|
1721 |
# figuring out what to do with it. In the case of an empty
|
|
1722 |
# interesting match, this is clearly the right thing to do,
|
|
1723 |
# because no other kind of match is possible in the regions.
|
|
1724 |
while besti > alo and bestj > blo and \ |
|
1725 |
isbjunk(b[bestj-1]) and \ |
|
1726 |
a[besti-1] == b[bestj-1]: |
|
1727 |
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 |
|
1728 |
while besti+bestsize < ahi and bestj+bestsize < bhi and \ |
|
1729 |
isbjunk(b[bestj+bestsize]) and \ |
|
1730 |
a[besti+bestsize] == b[bestj+bestsize]: |
|
1731 |
bestsize = bestsize + 1 |
|
1732 |
||
1733 |
return besti, bestj, bestsize |
|
1734 |