20
20
"""Versioned text file storage api."""
23
from copy import deepcopy
24
from unittest import TestSuite
27
import bzrlib.errors as errors
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
33
from bzrlib.transport.memory import MemoryTransport
36
from cStringIO import StringIO
28
38
from bzrlib.inter import InterObject
29
39
from bzrlib.textmerge import TextMerge
30
from bzrlib.transport.memory import MemoryTransport
31
from bzrlib.tsort import topo_sort
33
40
from bzrlib.symbol_versioning import (deprecated_function,
54
61
self.finished = False
55
62
self._access_mode = access_mode
65
def check_not_reserved_id(version_id):
66
revision.check_not_reserved_id(version_id)
57
68
def copy_to(self, name, transport):
58
69
"""Copy this versioned file to name on transport."""
59
70
raise NotImplementedError(self.copy_to)
87
98
:param sha1: The sha1 of the full text.
88
99
:param delta: The delta instructions. See get_delta for details.
101
version_id = osutils.safe_revision_id(version_id)
102
parents = [osutils.safe_revision_id(v) for v in parents]
90
103
self._check_write_ok()
91
104
if self.has_version(version_id):
92
105
raise errors.RevisionAlreadyPresent(version_id, self)
196
213
Must raise RevisionAlreadyPresent if the new version is
197
214
already present in file history."""
215
new_version_id = osutils.safe_revision_id(new_version_id)
216
old_version_id = osutils.safe_revision_id(old_version_id)
198
217
self._check_write_ok()
199
218
return self._clone_text(new_version_id, old_version_id, parents)
212
231
raise NotImplementedError(self.create_empty)
214
def fix_parents(self, version, new_parents):
233
def fix_parents(self, version_id, new_parents):
215
234
"""Fix the parents list for version.
217
236
This is done by appending a new version to the index
219
238
the parents list must be a superset of the current
241
version_id = osutils.safe_revision_id(version_id)
242
new_parents = [osutils.safe_revision_id(p) for p in new_parents]
222
243
self._check_write_ok()
223
return self._fix_parents(version, new_parents)
244
return self._fix_parents(version_id, new_parents)
225
def _fix_parents(self, version, new_parents):
246
def _fix_parents(self, version_id, new_parents):
226
247
"""Helper for fix_parents."""
227
248
raise NotImplementedError(self.fix_parents)
235
256
raise NotImplementedError(self.get_delta)
237
def get_deltas(self, versions):
258
def get_deltas(self, version_ids):
238
259
"""Get multiple deltas at once for constructing versions.
240
261
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
242
263
version_id is the version_id created by that delta.
245
for version in versions:
246
result[version] = self.get_delta(version)
266
for version_id in version_ids:
267
result[version_id] = self.get_delta(version_id)
270
def make_mpdiffs(self, version_ids):
271
"""Create multiparent diffs for specified versions"""
272
knit_versions = set()
273
for version_id in version_ids:
274
knit_versions.add(version_id)
275
knit_versions.update(self.get_parents(version_id))
276
lines = dict(zip(knit_versions,
277
self._get_lf_split_line_list(knit_versions)))
279
for version_id in version_ids:
280
target = lines[version_id]
281
parents = [lines[p] for p in self.get_parents(version_id)]
283
left_parent_blocks = self._extract_blocks(version_id,
286
left_parent_blocks = None
287
diffs.append(multiparent.MultiParent.from_lines(target, parents,
291
def _extract_blocks(self, version_id, source, target):
294
def add_mpdiffs(self, records):
295
"""Add mpdiffs to this versionedfile
297
Records should be iterables of version, parents, expected_sha1,
298
mpdiff. mpdiff should be a MultiParent instance.
301
for version, parents, expected_sha1, mpdiff in records:
302
mpvf = multiparent.MultiMemoryVersionedFile()
303
needed_parents = [p for p in parents if not mpvf.has_version(p)]
304
parent_lines = self._get_lf_split_line_list(needed_parents)
305
for parent_id, lines in zip(needed_parents, parent_lines):
306
mpvf.add_version(lines, parent_id, [])
307
mpvf.add_diff(mpdiff, version, parents)
308
lines = mpvf.get_line_list([version])[0]
309
version_text = self.add_lines(version, parents, lines, vf_parents)
310
vf_parents[version] = version_text
311
if expected_sha1 != self.get_sha1(version):
312
raise errors.VersionedFileInvalidChecksum(version)
249
314
def get_sha1(self, version_id):
250
315
"""Get the stored sha1 sum for the given revision.
254
319
raise NotImplementedError(self.get_sha1)
321
def get_sha1s(self, version_ids):
322
"""Get the stored sha1 sums for the given revisions.
324
:param version_ids: The names of the versions to lookup
325
:return: a list of sha1s in order according to the version_ids
327
raise NotImplementedError(self.get_sha1)
256
329
def get_suffixes(self):
257
330
"""Return the file suffixes associated with this versioned file."""
258
331
raise NotImplementedError(self.get_suffixes)
283
356
raise NotImplementedError(self.get_lines)
285
def get_ancestry(self, version_ids):
358
def _get_lf_split_line_list(self, version_ids):
359
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
361
def get_ancestry(self, version_ids, topo_sorted=True):
286
362
"""Return a list of all ancestors of given version(s). This
287
363
will not include the null revision.
365
This list will not be topologically sorted if topo_sorted=False is
289
368
Must raise RevisionNotPresent if any of the given versions are
290
369
not present in file history."""
291
370
if isinstance(version_ids, basestring):
311
390
:param version_ids: Versions to select.
312
391
None means retrieve all versions.
393
if version_ids is None:
394
return dict(self.iter_parents(self.versions()))
315
if version_ids is None:
316
for version in self.versions():
317
result[version] = self.get_parents(version)
319
pending = set(version_ids)
321
version = pending.pop()
322
if version in result:
324
parents = self.get_parents(version)
396
pending = set(osutils.safe_revision_id(v) for v in version_ids)
398
this_iteration = pending
400
for version, parents in self.iter_parents(this_iteration):
401
result[version] = parents
325
402
for parent in parents:
326
403
if parent in result:
328
405
pending.add(parent)
329
result[version] = parents
332
408
def get_graph_with_ghosts(self):
406
def iter_lines_added_or_present_in_versions(self, version_ids=None):
482
def iter_lines_added_or_present_in_versions(self, version_ids=None,
407
484
"""Iterate over the lines in the versioned file from version_ids.
409
486
This may return lines from other versions, and does not return the
412
489
thinks is relevant, but given that such hints are just guesses,
413
490
its better not to have it if we don't need it.
492
If a progress bar is supplied, it may be used to indicate progress.
493
The caller is responsible for cleaning up progress bars (because this
415
496
NOTES: Lines are normalised: they will all have \n terminators.
416
497
Lines are returned in arbitrary order.
418
499
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
501
def iter_parents(self, version_ids):
502
"""Iterate through the parents for many version ids.
504
:param version_ids: An iterable yielding version_ids.
505
:return: An iterator that yields (version_id, parents). Requested
506
version_ids not present in the versioned file are simply skipped.
507
The order is undefined, allowing for different optimisations in
508
the underlying implementation.
510
for version_id in version_ids:
512
yield version_id, tuple(self.get_parents(version_id))
513
except errors.RevisionNotPresent:
420
516
def transaction_finished(self):
421
517
"""The transaction that this file was opened in has finished.
469
565
raise NotImplementedError(VersionedFile.plan_merge)
471
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
567
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
472
568
b_marker=TextMerge.B_MARKER):
473
569
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
656
753
new_version_ids.add(version)
657
754
return new_version_ids
660
class InterVersionedFileTestProviderAdapter(object):
661
"""A tool to generate a suite testing multiple inter versioned-file classes.
663
This is done by copying the test once for each InterVersionedFile provider
664
and injecting the transport_server, transport_readonly_server,
665
versionedfile_factory and versionedfile_factory_to classes into each copy.
666
Each copy is also given a new id() to make it easy to identify.
669
def __init__(self, transport_server, transport_readonly_server, formats):
670
self._transport_server = transport_server
671
self._transport_readonly_server = transport_readonly_server
672
self._formats = formats
674
def adapt(self, test):
676
for (interversionedfile_class,
677
versionedfile_factory,
678
versionedfile_factory_to) in self._formats:
679
new_test = deepcopy(test)
680
new_test.transport_server = self._transport_server
681
new_test.transport_readonly_server = self._transport_readonly_server
682
new_test.interversionedfile_class = interversionedfile_class
683
new_test.versionedfile_factory = versionedfile_factory
684
new_test.versionedfile_factory_to = versionedfile_factory_to
685
def make_new_test_id():
686
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
687
return lambda: new_id
688
new_test.id = make_new_test_id()
689
result.addTest(new_test)
693
def default_test_list():
694
"""Generate the default list of interversionedfile permutations to test."""
695
from bzrlib.weave import WeaveFile
696
from bzrlib.knit import KnitVersionedFile
698
# test the fallback InterVersionedFile from annotated knits to weave
699
result.append((InterVersionedFile,
702
for optimiser in InterVersionedFile._optimisers:
703
result.append((optimiser,
704
optimiser._matching_file_from_factory,
705
optimiser._matching_file_to_factory
707
# if there are specific combinations we want to use, we can add them