69
71
from copy import copy
70
72
from cStringIO import StringIO
73
from bzrlib.lazy_import import lazy_import
74
lazy_import(globals(), """
75
from bzrlib import tsort
77
78
from bzrlib import (
81
from bzrlib.trace import mutter
81
82
from bzrlib.errors import (WeaveError, WeaveFormatError, WeaveParentMismatch,
82
83
RevisionAlreadyPresent,
83
84
RevisionNotPresent,
84
UnavailableRepresentation,
85
WeaveRevisionAlreadyPresent,
86
WeaveRevisionNotPresent,
86
from bzrlib.osutils import dirname, sha, sha_strings, split_lines
88
import bzrlib.errors as errors
89
from bzrlib.osutils import sha_strings
87
90
import bzrlib.patiencediff
88
from bzrlib.revision import NULL_REVISION
89
from bzrlib.symbol_versioning import *
90
from bzrlib.trace import mutter
91
from bzrlib.versionedfile import (
91
from bzrlib.symbol_versioning import (deprecated_method,
95
from bzrlib.tsort import topo_sort
96
from bzrlib.versionedfile import VersionedFile, InterVersionedFile
98
97
from bzrlib.weavefile import _read_weave_v5, write_weave_v5
101
class WeaveContentFactory(ContentFactory):
102
"""Content factory for streaming from weaves.
104
:seealso ContentFactory:
107
def __init__(self, version, weave):
108
"""Create a WeaveContentFactory for version from weave."""
109
ContentFactory.__init__(self)
110
self.sha1 = weave.get_sha1s([version])[version]
111
self.key = (version,)
112
parents = weave.get_parent_map([version])[version]
113
self.parents = tuple((parent,) for parent in parents)
114
self.storage_kind = 'fulltext'
117
def get_bytes_as(self, storage_kind):
118
if storage_kind == 'fulltext':
119
return self._weave.get_text(self.key[-1])
120
elif storage_kind == 'chunked':
121
return self._weave.get_lines(self.key[-1])
123
raise UnavailableRepresentation(self.key, storage_kind, 'fulltext')
126
100
class Weave(VersionedFile):
127
101
"""weave - versioned text file storage.
129
103
A Weave manages versions of line-based text files, keeping track
130
104
of the originating version for each line.
297
274
__contains__ = has_version
299
def get_record_stream(self, versions, ordering, include_delta_closure):
300
"""Get a stream of records for versions.
302
:param versions: The versions to include. Each version is a tuple
304
:param ordering: Either 'unordered' or 'topological'. A topologically
305
sorted stream has compression parents strictly before their
307
:param include_delta_closure: If True then the closure across any
308
compression parents will be included (in the opaque data).
309
:return: An iterator of ContentFactory objects, each of which is only
310
valid until the iterator is advanced.
312
versions = [version[-1] for version in versions]
313
if ordering == 'topological':
314
parents = self.get_parent_map(versions)
315
new_versions = tsort.topo_sort(parents)
316
new_versions.extend(set(versions).difference(set(parents)))
317
versions = new_versions
318
elif ordering == 'groupcompress':
319
parents = self.get_parent_map(versions)
320
new_versions = sort_groupcompress(parents)
321
new_versions.extend(set(versions).difference(set(parents)))
322
versions = new_versions
323
for version in versions:
325
yield WeaveContentFactory(version, self)
327
yield AbsentContentFactory((version,))
329
def get_parent_map(self, version_ids):
330
"""See VersionedFile.get_parent_map."""
276
def get_delta(self, version_id):
277
"""See VersionedFile.get_delta."""
278
return self.get_deltas([version_id])[version_id]
280
def get_deltas(self, version_ids):
281
"""See VersionedFile.get_deltas."""
282
version_ids = self.get_ancestry(version_ids)
332
283
for version_id in version_ids:
333
if version_id == NULL_REVISION:
338
map(self._idx_to_name,
339
self._parents[self._lookup(version_id)]))
340
except RevisionNotPresent:
284
if not self.has_version(version_id):
285
raise RevisionNotPresent(version_id, self)
286
# try extracting all versions; parallel extraction is used
287
nv = self.num_versions()
293
last_parent_lines = {}
295
parent_inclusions = {}
300
# its simplest to generate a full set of prepared variables.
302
name = self._names[i]
303
sha1s[name] = self.get_sha1(name)
304
parents_list = self.get_parents(name)
306
parent = parents_list[0]
307
parents[name] = parent
308
parent_inclusions[name] = inclusions[parent]
311
parent_inclusions[name] = set()
312
# we want to emit start, finish, replacement_length, replacement_lines tuples.
313
diff_hunks[name] = []
314
current_hunks[name] = [0, 0, 0, []] # #start, finish, repl_length, repl_tuples
315
parent_linenums[name] = 0
317
parent_noeols[name] = False
318
last_parent_lines[name] = None
319
new_inc = set([name])
320
for p in self._parents[i]:
321
new_inc.update(inclusions[self._idx_to_name(p)])
322
# debug only, known good so far.
323
#assert set(new_inc) == set(self.get_ancestry(name)), \
324
# 'failed %s != %s' % (set(new_inc), set(self.get_ancestry(name)))
325
inclusions[name] = new_inc
327
nlines = len(self._weave)
329
for lineno, inserted, deletes, line in self._walk_internal():
330
# a line is active in a version if:
331
# insert is in the versions inclusions
333
# deleteset & the versions inclusions is an empty set.
334
# so - if we have a included by mapping - version is included by
335
# children, we get a list of children to examine for deletes affect
336
# ing them, which is less than the entire set of children.
337
for version_id in version_ids:
338
# The active inclusion must be an ancestor,
339
# and no ancestors must have deleted this line,
340
# because we don't support resurrection.
341
parent_inclusion = parent_inclusions[version_id]
342
inclusion = inclusions[version_id]
343
parent_active = inserted in parent_inclusion and not (deletes & parent_inclusion)
344
version_active = inserted in inclusion and not (deletes & inclusion)
345
if not parent_active and not version_active:
346
# unrelated line of ancestry
342
result[version_id] = parents
348
elif parent_active and version_active:
350
parent_linenum = parent_linenums[version_id]
351
if current_hunks[version_id] != [parent_linenum, parent_linenum, 0, []]:
352
diff_hunks[version_id].append(tuple(current_hunks[version_id]))
354
current_hunks[version_id] = [parent_linenum, parent_linenum, 0, []]
355
parent_linenums[version_id] = parent_linenum
358
noeols[version_id] = True
361
elif parent_active and not version_active:
363
current_hunks[version_id][1] += 1
364
parent_linenums[version_id] += 1
365
last_parent_lines[version_id] = line
366
elif not parent_active and version_active:
368
# noeol only occurs at the end of a file because we
369
# diff linewise. We want to show noeol changes as a
370
# empty diff unless the actual eol-less content changed.
373
if last_parent_lines[version_id][-1] != '\n':
374
parent_noeols[version_id] = True
375
except (TypeError, IndexError):
378
if theline[-1] != '\n':
379
noeols[version_id] = True
383
parent_should_go = False
385
if parent_noeols[version_id] == noeols[version_id]:
386
# no noeol toggle, so trust the weaves statement
387
# that this line is changed.
389
if parent_noeols[version_id]:
390
theline = theline + '\n'
391
elif parent_noeols[version_id]:
392
# parent has no eol, we do:
393
# our line is new, report as such..
395
elif noeols[version_id]:
396
# append a eol so that it looks like
398
theline = theline + '\n'
399
if parents[version_id] is not None:
400
#if last_parent_lines[version_id] is not None:
401
parent_should_go = True
402
if last_parent_lines[version_id] != theline:
405
#parent_should_go = False
407
current_hunks[version_id][2] += 1
408
current_hunks[version_id][3].append((inserted, theline))
410
# last hunk last parent line is not eaten
411
current_hunks[version_id][1] -= 1
412
if current_hunks[version_id][1] < 0:
413
current_hunks[version_id][1] = 0
414
# import pdb;pdb.set_trace()
415
# assert current_hunks[version_id][1] >= 0
419
version = self._idx_to_name(i)
420
if current_hunks[version] != [0, 0, 0, []]:
421
diff_hunks[version].append(tuple(current_hunks[version]))
423
for version_id in version_ids:
424
result[version_id] = (
428
diff_hunks[version_id],
345
def get_parents_with_ghosts(self, version_id):
346
raise NotImplementedError(self.get_parents_with_ghosts)
348
def insert_record_stream(self, stream):
349
"""Insert a record stream into this versioned file.
351
:param stream: A stream of records to insert.
353
:seealso VersionedFile.get_record_stream:
356
for record in stream:
357
# Raise an error when a record is missing.
358
if record.storage_kind == 'absent':
359
raise RevisionNotPresent([record.key[0]], self)
360
# adapt to non-tuple interface
361
parents = [parent[0] for parent in record.parents]
362
if (record.storage_kind == 'fulltext'
363
or record.storage_kind == 'chunked'):
364
self.add_lines(record.key[0], parents,
365
osutils.chunks_to_lines(record.get_bytes_as('chunked')))
367
adapter_key = record.storage_kind, 'fulltext'
369
adapter = adapters[adapter_key]
371
adapter_factory = adapter_registry.get(adapter_key)
372
adapter = adapter_factory(self)
373
adapters[adapter_key] = adapter
374
lines = split_lines(adapter.get_bytes(record))
376
self.add_lines(record.key[0], parents, lines)
377
except RevisionAlreadyPresent:
432
def get_parents(self, version_id):
433
"""See VersionedFile.get_parent."""
434
return map(self._idx_to_name, self._parents[self._lookup(version_id)])
380
436
def _check_repeated_add(self, name, parents, text, sha1):
381
437
"""Check that a duplicated add is OK.
388
444
raise RevisionAlreadyPresent(name, self._weave_name)
447
@deprecated_method(zero_eight)
448
def add_identical(self, old_rev_id, new_rev_id, parents):
449
"""Please use Weave.clone_text now."""
450
return self.clone_text(new_rev_id, old_rev_id, parents)
391
452
def _add_lines(self, version_id, parents, lines, parent_texts,
392
left_matching_blocks, nostore_sha, random_id, check_content):
453
left_matching_blocks=None):
393
454
"""See VersionedFile.add_lines."""
394
idx = self._add(version_id, lines, map(self._lookup, parents),
395
nostore_sha=nostore_sha)
396
return sha_strings(lines), sum(map(len, lines)), idx
398
def _add(self, version_id, lines, parents, sha1=None, nostore_sha=None):
455
return self._add(version_id, lines, map(self._lookup, parents))
457
@deprecated_method(zero_eight)
458
def add(self, name, parents, text, sha1=None):
459
"""See VersionedFile.add_lines for the non deprecated api."""
460
return self._add(name, text, map(self._maybe_lookup, parents), sha1)
462
def _add(self, version_id, lines, parents, sha1=None):
399
463
"""Add a single text on top of the weave.
401
465
Returns the index number of the newly added version.
404
468
Symbolic name for this version.
405
469
(Typically the revision-id of the revision that added it.)
406
If None, a name will be allocated based on the hash. (sha1:SHAHASH)
409
472
List or set of direct parent version numbers.
412
475
Sequence of lines to be added in the new version.
414
:param nostore_sha: See VersionedFile.add_lines.
478
assert isinstance(version_id, basestring)
416
479
self._check_lines_not_unicode(lines)
417
480
self._check_lines_are_lines(lines)
419
482
sha1 = sha_strings(lines)
420
if sha1 == nostore_sha:
421
raise errors.ExistingContent
422
if version_id is None:
423
version_id = "sha1:" + sha1
424
483
if version_id in self._name_map:
425
484
return self._check_repeated_add(version_id, parents, lines, sha1)
848
976
# no lines outside of insertion blocks, that deletions are
849
977
# properly paired, etc.
979
def _join(self, other, pb, msg, version_ids, ignore_missing):
980
"""Worker routine for join()."""
981
if not other.versions():
982
return # nothing to update, easy
985
# versions is never none, InterWeave checks this.
988
# two loops so that we do not change ourselves before verifying it
990
# work through in index order to make sure we get all dependencies
993
# get the selected versions only that are in other.versions.
994
version_ids = set(other.versions()).intersection(set(version_ids))
995
# pull in the referenced graph.
996
version_ids = other.get_ancestry(version_ids)
997
pending_graph = [(version, other.get_parents(version)) for
998
version in version_ids]
999
for name in topo_sort(pending_graph):
1000
other_idx = other._name_map[name]
1001
# returns True if we have it, False if we need it.
1002
if not self._check_version_consistent(other, other_idx, name):
1003
names_to_join.append((other_idx, name))
1012
for other_idx, name in names_to_join:
1013
# TODO: If all the parents of the other version are already
1014
# present then we can avoid some work by just taking the delta
1015
# and adjusting the offsets.
1016
new_parents = self._imported_parents(other, other_idx)
1017
sha1 = other._sha1s[other_idx]
1022
pb.update(msg, merged, len(names_to_join))
1024
lines = other.get_lines(other_idx)
1025
self._add(name, lines, new_parents, sha1)
1027
mutter("merged = %d, processed = %d, file_id=%s; deltat=%d"%(
1028
merged, processed, self._weave_name, time.time()-time0))
851
1030
def _imported_parents(self, other, other_idx):
852
1031
"""Return list of parents in self corresponding to indexes in other."""
853
1032
new_parents = []
946
1133
transport.put_file(name + WeaveFile.WEAVE_SUFFIX, sio, self._filemode)
1135
def create_empty(self, name, transport, filemode=None):
1136
return WeaveFile(name, transport, filemode, create=True)
948
1138
def _save(self):
949
1139
"""Save the weave."""
950
1140
self._check_write_ok()
951
1141
sio = StringIO()
952
1142
write_weave_v5(self, sio)
954
bytes = sio.getvalue()
955
path = self._weave_name + WeaveFile.WEAVE_SUFFIX
957
self._transport.put_bytes(path, bytes, self._filemode)
958
except errors.NoSuchFile:
959
self._transport.mkdir(dirname(path))
960
self._transport.put_bytes(path, bytes, self._filemode)
1144
self._transport.put_file(self._weave_name + WeaveFile.WEAVE_SUFFIX,
963
1149
def get_suffixes():
964
1150
"""See VersionedFile.get_suffixes()."""
965
1151
return [WeaveFile.WEAVE_SUFFIX]
967
def insert_record_stream(self, stream):
968
super(WeaveFile, self).insert_record_stream(stream)
1153
def join(self, other, pb=None, msg=None, version_ids=None,
1154
ignore_missing=False):
1155
"""Join other into self and save."""
1156
super(WeaveFile, self).join(other, pb, msg, version_ids, ignore_missing)
1160
@deprecated_function(zero_eight)
1161
def reweave(wa, wb, pb=None, msg=None):
1162
"""reweaving is deprecation, please just use weave.join()."""
1163
_reweave(wa, wb, pb, msg)
972
1165
def _reweave(wa, wb, pb=None, msg=None):
973
1166
"""Combine two weaves and return the result.
975
This works even if a revision R has different parents in
1168
This works even if a revision R has different parents in
976
1169
wa and wb. In the resulting weave all the parents are given.
978
This is done by just building up a new weave, maintaining ordering
1171
This is done by just building up a new weave, maintaining ordering
979
1172
of the versions in the two inputs. More efficient approaches
980
might be possible but it should only be necessary to do
981
this operation rarely, when a new previously ghost version is
1173
might be possible but it should only be necessary to do
1174
this operation rarely, when a new previously ghost version is
984
1177
:param pb: An optional progress bar, indicating how far done we are
1029
1221
p = combined.setdefault(name, set())
1030
1222
p.update(map(weave._idx_to_name, weave._parents[idx]))
1031
1223
return combined
1227
"""Show the weave's table-of-contents"""
1228
print '%6s %50s %10s %10s' % ('ver', 'name', 'sha1', 'parents')
1229
for i in (6, 50, 10, 10):
1232
for i in range(w.num_versions()):
1235
parent_str = ' '.join(map(str, w._parents[i]))
1236
print '%6d %-50.50s %10.10s %s' % (i, name, sha1, parent_str)
1240
def weave_stats(weave_file, pb):
1241
from bzrlib.weavefile import read_weave
1243
wf = file(weave_file, 'rb')
1245
# FIXME: doesn't work on pipes
1246
weave_size = wf.tell()
1250
for i in range(vers):
1251
pb.update('checking sizes', i, vers)
1252
for origin, lineno, line in w._extract([i]):
1257
print 'versions %9d' % vers
1258
print 'weave file %9d bytes' % weave_size
1259
print 'total contents %9d bytes' % total
1260
print 'compression ratio %9.2fx' % (float(total) / float(weave_size))
1263
print 'average size %9d bytes' % avg
1264
print 'relative size %9.2fx' % (float(weave_size) / float(avg))
1268
print """bzr weave tool
1270
Experimental tool for weave algorithm.
1273
weave init WEAVEFILE
1274
Create an empty weave file
1275
weave get WEAVEFILE VERSION
1276
Write out specified version.
1277
weave check WEAVEFILE
1278
Check consistency of all versions.
1280
Display table of contents.
1281
weave add WEAVEFILE NAME [BASE...] < NEWTEXT
1282
Add NEWTEXT, with specified parent versions.
1283
weave annotate WEAVEFILE VERSION
1284
Display origin of each line.
1285
weave merge WEAVEFILE VERSION1 VERSION2 > OUT
1286
Auto-merge two versions and display conflicts.
1287
weave diff WEAVEFILE VERSION1 VERSION2
1288
Show differences between two versions.
1292
% weave init foo.weave
1294
% weave add foo.weave ver0 < foo.txt
1297
(create updated version)
1299
% weave get foo.weave 0 | diff -u - foo.txt
1300
% weave add foo.weave ver1 0 < foo.txt
1303
% weave get foo.weave 0 > foo.txt (create forked version)
1305
% weave add foo.weave ver2 0 < foo.txt
1308
% weave merge foo.weave 1 2 > foo.txt (merge them)
1309
% vi foo.txt (resolve conflicts)
1310
% weave add foo.weave merged 1 2 < foo.txt (commit merged version)
1322
# in case we're run directly from the subdirectory
1323
sys.path.append('..')
1325
from bzrlib.weavefile import write_weave, read_weave
1326
from bzrlib.progress import ProgressBar
1341
return read_weave(file(argv[2], 'rb'))
1347
# at the moment, based on everything in the file
1349
parents = map(int, argv[4:])
1350
lines = sys.stdin.readlines()
1351
ver = w.add(name, parents, lines)
1352
write_weave(w, file(argv[2], 'wb'))
1353
print 'added version %r %d' % (name, ver)
1356
if os.path.exists(fn):
1357
raise IOError("file exists")
1359
write_weave(w, file(fn, 'wb'))
1360
elif cmd == 'get': # get one version
1362
sys.stdout.writelines(w.get_iter(int(argv[3])))
1367
v1, v2 = map(int, argv[3:5])
1370
diff_gen = bzrlib.patiencediff.unified_diff(lines1, lines2,
1371
'%s version %d' % (fn, v1),
1372
'%s version %d' % (fn, v2))
1373
sys.stdout.writelines(diff_gen)
1375
elif cmd == 'annotate':
1377
# newline is added to all lines regardless; too hard to get
1378
# reasonable formatting otherwise
1380
for origin, text in w.annotate(int(argv[3])):
1381
text = text.rstrip('\r\n')
1383
print ' | %s' % (text)
1385
print '%5d | %s' % (origin, text)
1391
elif cmd == 'stats':
1392
weave_stats(argv[2], ProgressBar())
1394
elif cmd == 'check':
1399
print '%d versions ok' % w.num_versions()
1401
elif cmd == 'inclusions':
1403
print ' '.join(map(str, w.inclusions([int(argv[3])])))
1405
elif cmd == 'parents':
1407
print ' '.join(map(str, w._parents[int(argv[3])]))
1409
elif cmd == 'plan-merge':
1410
# replaced by 'bzr weave-plan-merge'
1412
for state, line in w.plan_merge(int(argv[3]), int(argv[4])):
1414
print '%14s | %s' % (state, line),
1415
elif cmd == 'merge':
1416
# replaced by 'bzr weave-merge-text'
1418
p = w.plan_merge(int(argv[3]), int(argv[4]))
1419
sys.stdout.writelines(w.weave_merge(p))
1421
raise ValueError('unknown command %r' % cmd)
1424
if __name__ == '__main__':
1426
sys.exit(main(sys.argv))
1429
class InterWeave(InterVersionedFile):
1430
"""Optimised code paths for weave to weave operations."""
1432
_matching_file_from_factory = staticmethod(WeaveFile)
1433
_matching_file_to_factory = staticmethod(WeaveFile)
1436
def is_compatible(source, target):
1437
"""Be compatible with weaves."""
1439
return (isinstance(source, Weave) and
1440
isinstance(target, Weave))
1441
except AttributeError:
1444
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False):
1445
"""See InterVersionedFile.join."""
1446
version_ids = self._get_source_version_ids(version_ids, ignore_missing)
1447
if self.target.versions() == [] and version_ids is None:
1448
self.target._copy_weave_content(self.source)
1451
self.target._join(self.source, pb, msg, version_ids, ignore_missing)
1452
except errors.WeaveParentMismatch:
1453
self.target._reweave(self.source, pb, msg)
1456
InterVersionedFile.register_optimiser(InterWeave)