78
78
from bzrlib import (
81
from bzrlib.trace import mutter
81
82
from bzrlib.errors import (WeaveError, WeaveFormatError, WeaveParentMismatch,
82
83
RevisionAlreadyPresent,
83
84
RevisionNotPresent,
84
UnavailableRepresentation,
85
85
WeaveRevisionAlreadyPresent,
86
86
WeaveRevisionNotPresent,
88
88
import bzrlib.errors as errors
89
from bzrlib.osutils import dirname, sha_strings, split_lines
89
from bzrlib.osutils import sha_strings
90
90
import bzrlib.patiencediff
91
from bzrlib.revision import NULL_REVISION
92
from bzrlib.symbol_versioning import *
93
from bzrlib.trace import mutter
91
from bzrlib.symbol_versioning import (deprecated_method,
94
95
from bzrlib.tsort import topo_sort
95
from bzrlib.versionedfile import (
96
from bzrlib.versionedfile import VersionedFile, InterVersionedFile
101
97
from bzrlib.weavefile import _read_weave_v5, write_weave_v5
104
class WeaveContentFactory(ContentFactory):
105
"""Content factory for streaming from weaves.
107
:seealso ContentFactory:
110
def __init__(self, version, weave):
111
"""Create a WeaveContentFactory for version from weave."""
112
ContentFactory.__init__(self)
113
self.sha1 = weave.get_sha1s([version])[version]
114
self.key = (version,)
115
parents = weave.get_parent_map([version])[version]
116
self.parents = tuple((parent,) for parent in parents)
117
self.storage_kind = 'fulltext'
120
def get_bytes_as(self, storage_kind):
121
if storage_kind == 'fulltext':
122
return self._weave.get_text(self.key[-1])
124
raise UnavailableRepresentation(self.key, storage_kind, 'fulltext')
127
100
class Weave(VersionedFile):
128
101
"""weave - versioned text file storage.
298
274
__contains__ = has_version
300
def get_record_stream(self, versions, ordering, include_delta_closure):
301
"""Get a stream of records for versions.
303
:param versions: The versions to include. Each version is a tuple
305
:param ordering: Either 'unordered' or 'topological'. A topologically
306
sorted stream has compression parents strictly before their
308
:param include_delta_closure: If True then the closure across any
309
compression parents will be included (in the opaque data).
310
:return: An iterator of ContentFactory objects, each of which is only
311
valid until the iterator is advanced.
313
versions = [version[-1] for version in versions]
314
if ordering == 'topological':
315
parents = self.get_parent_map(versions)
316
new_versions = topo_sort(parents)
317
new_versions.extend(set(versions).difference(set(parents)))
318
versions = new_versions
319
for version in versions:
321
yield WeaveContentFactory(version, self)
323
yield AbsentContentFactory((version,))
325
def get_parent_map(self, version_ids):
326
"""See VersionedFile.get_parent_map."""
276
def get_delta(self, version_id):
277
"""See VersionedFile.get_delta."""
278
return self.get_deltas([version_id])[version_id]
280
def get_deltas(self, version_ids):
281
"""See VersionedFile.get_deltas."""
282
version_ids = self.get_ancestry(version_ids)
328
283
for version_id in version_ids:
329
if version_id == NULL_REVISION:
334
map(self._idx_to_name,
335
self._parents[self._lookup(version_id)]))
336
except RevisionNotPresent:
284
if not self.has_version(version_id):
285
raise RevisionNotPresent(version_id, self)
286
# try extracting all versions; parallel extraction is used
287
nv = self.num_versions()
293
last_parent_lines = {}
295
parent_inclusions = {}
300
# its simplest to generate a full set of prepared variables.
302
name = self._names[i]
303
sha1s[name] = self.get_sha1(name)
304
parents_list = self.get_parents(name)
306
parent = parents_list[0]
307
parents[name] = parent
308
parent_inclusions[name] = inclusions[parent]
311
parent_inclusions[name] = set()
312
# we want to emit start, finish, replacement_length, replacement_lines tuples.
313
diff_hunks[name] = []
314
current_hunks[name] = [0, 0, 0, []] # #start, finish, repl_length, repl_tuples
315
parent_linenums[name] = 0
317
parent_noeols[name] = False
318
last_parent_lines[name] = None
319
new_inc = set([name])
320
for p in self._parents[i]:
321
new_inc.update(inclusions[self._idx_to_name(p)])
322
# debug only, known good so far.
323
#assert set(new_inc) == set(self.get_ancestry(name)), \
324
# 'failed %s != %s' % (set(new_inc), set(self.get_ancestry(name)))
325
inclusions[name] = new_inc
327
nlines = len(self._weave)
329
for lineno, inserted, deletes, line in self._walk_internal():
330
# a line is active in a version if:
331
# insert is in the versions inclusions
333
# deleteset & the versions inclusions is an empty set.
334
# so - if we have a included by mapping - version is included by
335
# children, we get a list of children to examine for deletes affect
336
# ing them, which is less than the entire set of children.
337
for version_id in version_ids:
338
# The active inclusion must be an ancestor,
339
# and no ancestors must have deleted this line,
340
# because we don't support resurrection.
341
parent_inclusion = parent_inclusions[version_id]
342
inclusion = inclusions[version_id]
343
parent_active = inserted in parent_inclusion and not (deletes & parent_inclusion)
344
version_active = inserted in inclusion and not (deletes & inclusion)
345
if not parent_active and not version_active:
346
# unrelated line of ancestry
338
result[version_id] = parents
348
elif parent_active and version_active:
350
parent_linenum = parent_linenums[version_id]
351
if current_hunks[version_id] != [parent_linenum, parent_linenum, 0, []]:
352
diff_hunks[version_id].append(tuple(current_hunks[version_id]))
354
current_hunks[version_id] = [parent_linenum, parent_linenum, 0, []]
355
parent_linenums[version_id] = parent_linenum
358
noeols[version_id] = True
361
elif parent_active and not version_active:
363
current_hunks[version_id][1] += 1
364
parent_linenums[version_id] += 1
365
last_parent_lines[version_id] = line
366
elif not parent_active and version_active:
368
# noeol only occurs at the end of a file because we
369
# diff linewise. We want to show noeol changes as a
370
# empty diff unless the actual eol-less content changed.
373
if last_parent_lines[version_id][-1] != '\n':
374
parent_noeols[version_id] = True
375
except (TypeError, IndexError):
378
if theline[-1] != '\n':
379
noeols[version_id] = True
383
parent_should_go = False
385
if parent_noeols[version_id] == noeols[version_id]:
386
# no noeol toggle, so trust the weaves statement
387
# that this line is changed.
389
if parent_noeols[version_id]:
390
theline = theline + '\n'
391
elif parent_noeols[version_id]:
392
# parent has no eol, we do:
393
# our line is new, report as such..
395
elif noeols[version_id]:
396
# append a eol so that it looks like
398
theline = theline + '\n'
399
if parents[version_id] is not None:
400
#if last_parent_lines[version_id] is not None:
401
parent_should_go = True
402
if last_parent_lines[version_id] != theline:
405
#parent_should_go = False
407
current_hunks[version_id][2] += 1
408
current_hunks[version_id][3].append((inserted, theline))
410
# last hunk last parent line is not eaten
411
current_hunks[version_id][1] -= 1
412
if current_hunks[version_id][1] < 0:
413
current_hunks[version_id][1] = 0
414
# import pdb;pdb.set_trace()
415
# assert current_hunks[version_id][1] >= 0
419
version = self._idx_to_name(i)
420
if current_hunks[version] != [0, 0, 0, []]:
421
diff_hunks[version].append(tuple(current_hunks[version]))
423
for version_id in version_ids:
424
result[version_id] = (
428
diff_hunks[version_id],
341
def get_parents_with_ghosts(self, version_id):
342
raise NotImplementedError(self.get_parents_with_ghosts)
344
def insert_record_stream(self, stream):
345
"""Insert a record stream into this versioned file.
347
:param stream: A stream of records to insert.
349
:seealso VersionedFile.get_record_stream:
352
for record in stream:
353
# Raise an error when a record is missing.
354
if record.storage_kind == 'absent':
355
raise RevisionNotPresent([record.key[0]], self)
356
# adapt to non-tuple interface
357
parents = [parent[0] for parent in record.parents]
358
if record.storage_kind == 'fulltext':
359
self.add_lines(record.key[0], parents,
360
split_lines(record.get_bytes_as('fulltext')))
362
adapter_key = record.storage_kind, 'fulltext'
364
adapter = adapters[adapter_key]
366
adapter_factory = adapter_registry.get(adapter_key)
367
adapter = adapter_factory(self)
368
adapters[adapter_key] = adapter
369
lines = split_lines(adapter.get_bytes(
370
record, record.get_bytes_as(record.storage_kind)))
372
self.add_lines(record.key[0], parents, lines)
373
except RevisionAlreadyPresent:
432
def get_parents(self, version_id):
433
"""See VersionedFile.get_parent."""
434
return map(self._idx_to_name, self._parents[self._lookup(version_id)])
376
436
def _check_repeated_add(self, name, parents, text, sha1):
377
437
"""Check that a duplicated add is OK.
384
444
raise RevisionAlreadyPresent(name, self._weave_name)
387
def _add_lines(self, version_id, parents, lines, parent_texts,
388
left_matching_blocks, nostore_sha, random_id, check_content):
447
@deprecated_method(zero_eight)
448
def add_identical(self, old_rev_id, new_rev_id, parents):
449
"""Please use Weave.clone_text now."""
450
return self.clone_text(new_rev_id, old_rev_id, parents)
452
def _add_lines(self, version_id, parents, lines, parent_texts):
389
453
"""See VersionedFile.add_lines."""
390
idx = self._add(version_id, lines, map(self._lookup, parents),
391
nostore_sha=nostore_sha)
392
return sha_strings(lines), sum(map(len, lines)), idx
394
def _add(self, version_id, lines, parents, sha1=None, nostore_sha=None):
454
return self._add(version_id, lines, map(self._lookup, parents))
456
@deprecated_method(zero_eight)
457
def add(self, name, parents, text, sha1=None):
458
"""See VersionedFile.add_lines for the non deprecated api."""
459
return self._add(name, text, map(self._maybe_lookup, parents), sha1)
461
def _add(self, version_id, lines, parents, sha1=None):
395
462
"""Add a single text on top of the weave.
397
464
Returns the index number of the newly added version.
844
985
# no lines outside of insertion blocks, that deletions are
845
986
# properly paired, etc.
988
def _join(self, other, pb, msg, version_ids, ignore_missing):
989
"""Worker routine for join()."""
990
if not other.versions():
991
return # nothing to update, easy
994
# versions is never none, InterWeave checks this.
997
# two loops so that we do not change ourselves before verifying it
999
# work through in index order to make sure we get all dependencies
1002
# get the selected versions only that are in other.versions.
1003
version_ids = set(other.versions()).intersection(set(version_ids))
1004
# pull in the referenced graph.
1005
version_ids = other.get_ancestry(version_ids)
1006
pending_graph = [(version, other.get_parents(version)) for
1007
version in version_ids]
1008
for name in topo_sort(pending_graph):
1009
other_idx = other._name_map[name]
1010
# returns True if we have it, False if we need it.
1011
if not self._check_version_consistent(other, other_idx, name):
1012
names_to_join.append((other_idx, name))
1021
for other_idx, name in names_to_join:
1022
# TODO: If all the parents of the other version are already
1023
# present then we can avoid some work by just taking the delta
1024
# and adjusting the offsets.
1025
new_parents = self._imported_parents(other, other_idx)
1026
sha1 = other._sha1s[other_idx]
1031
pb.update(msg, merged, len(names_to_join))
1033
lines = other.get_lines(other_idx)
1034
self._add(name, lines, new_parents, sha1)
1036
mutter("merged = %d, processed = %d, file_id=%s; deltat=%d"%(
1037
merged, processed, self._weave_name, time.time()-time0))
847
1039
def _imported_parents(self, other, other_idx):
848
1040
"""Return list of parents in self corresponding to indexes in other."""
849
1041
new_parents = []