20
20
"""Versioned text file storage api."""
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
33
from bzrlib.transport.memory import MemoryTransport
36
from cStringIO import StringIO
23
from copy import deepcopy
24
from unittest import TestSuite
27
import bzrlib.errors as errors
38
28
from bzrlib.inter import InterObject
39
29
from bzrlib.textmerge import TextMerge
30
from bzrlib.transport.memory import MemoryTransport
31
from bzrlib.tsort import topo_sort
40
33
from bzrlib.symbol_versioning import (deprecated_function,
61
54
self.finished = False
62
55
self._access_mode = access_mode
65
def check_not_reserved_id(version_id):
66
revision.check_not_reserved_id(version_id)
68
57
def copy_to(self, name, transport):
69
58
"""Copy this versioned file to name on transport."""
70
59
raise NotImplementedError(self.copy_to)
98
87
:param sha1: The sha1 of the full text.
99
88
:param delta: The delta instructions. See get_delta for details.
101
version_id = osutils.safe_revision_id(version_id)
102
parents = [osutils.safe_revision_id(v) for v in parents]
103
90
self._check_write_ok()
104
91
if self.has_version(version_id):
105
92
raise errors.RevisionAlreadyPresent(version_id, self)
213
196
Must raise RevisionAlreadyPresent if the new version is
214
197
already present in file history."""
215
new_version_id = osutils.safe_revision_id(new_version_id)
216
old_version_id = osutils.safe_revision_id(old_version_id)
217
198
self._check_write_ok()
218
199
return self._clone_text(new_version_id, old_version_id, parents)
231
212
raise NotImplementedError(self.create_empty)
233
def fix_parents(self, version_id, new_parents):
214
def fix_parents(self, version, new_parents):
234
215
"""Fix the parents list for version.
236
217
This is done by appending a new version to the index
238
219
the parents list must be a superset of the current
241
version_id = osutils.safe_revision_id(version_id)
242
new_parents = [osutils.safe_revision_id(p) for p in new_parents]
243
222
self._check_write_ok()
244
return self._fix_parents(version_id, new_parents)
223
return self._fix_parents(version, new_parents)
246
def _fix_parents(self, version_id, new_parents):
225
def _fix_parents(self, version, new_parents):
247
226
"""Helper for fix_parents."""
248
227
raise NotImplementedError(self.fix_parents)
256
235
raise NotImplementedError(self.get_delta)
258
def get_deltas(self, version_ids):
237
def get_deltas(self, versions):
259
238
"""Get multiple deltas at once for constructing versions.
261
240
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
263
242
version_id is the version_id created by that delta.
266
for version_id in version_ids:
267
result[version_id] = self.get_delta(version_id)
245
for version in versions:
246
result[version] = self.get_delta(version)
270
def make_mpdiffs(self, version_ids):
271
"""Create multiparent diffs for specified versions"""
272
knit_versions = set()
273
for version_id in version_ids:
274
knit_versions.add(version_id)
275
knit_versions.update(self.get_parents(version_id))
276
lines = dict(zip(knit_versions,
277
self._get_lf_split_line_list(knit_versions)))
279
for version_id in version_ids:
280
target = lines[version_id]
281
parents = [lines[p] for p in self.get_parents(version_id)]
283
left_parent_blocks = self._extract_blocks(version_id,
286
left_parent_blocks = None
287
diffs.append(multiparent.MultiParent.from_lines(target, parents,
291
def _extract_blocks(self, version_id, source, target):
294
def add_mpdiffs(self, records):
295
"""Add mpdiffs to this versionedfile
297
Records should be iterables of version, parents, expected_sha1,
298
mpdiff. mpdiff should be a MultiParent instance.
301
for version, parents, expected_sha1, mpdiff in records:
302
mpvf = multiparent.MultiMemoryVersionedFile()
303
needed_parents = [p for p in parents if not mpvf.has_version(p)]
304
parent_lines = self._get_lf_split_line_list(needed_parents)
305
for parent_id, lines in zip(needed_parents, parent_lines):
306
mpvf.add_version(lines, parent_id, [])
307
mpvf.add_diff(mpdiff, version, parents)
308
lines = mpvf.get_line_list([version])[0]
309
version_text = self.add_lines(version, parents, lines, vf_parents)
310
vf_parents[version] = version_text
311
if expected_sha1 != self.get_sha1(version):
312
raise errors.VersionedFileInvalidChecksum(version)
314
249
def get_sha1(self, version_id):
315
250
"""Get the stored sha1 sum for the given revision.
319
254
raise NotImplementedError(self.get_sha1)
321
def get_sha1s(self, version_ids):
322
"""Get the stored sha1 sums for the given revisions.
324
:param version_ids: The names of the versions to lookup
325
:return: a list of sha1s in order according to the version_ids
327
raise NotImplementedError(self.get_sha1)
329
256
def get_suffixes(self):
330
257
"""Return the file suffixes associated with this versioned file."""
331
258
raise NotImplementedError(self.get_suffixes)
356
283
raise NotImplementedError(self.get_lines)
358
def _get_lf_split_line_list(self, version_ids):
359
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
361
def get_ancestry(self, version_ids, topo_sorted=True):
285
def get_ancestry(self, version_ids):
362
286
"""Return a list of all ancestors of given version(s). This
363
287
will not include the null revision.
365
This list will not be topologically sorted if topo_sorted=False is
368
289
Must raise RevisionNotPresent if any of the given versions are
369
290
not present in file history."""
370
291
if isinstance(version_ids, basestring):
390
311
:param version_ids: Versions to select.
391
312
None means retrieve all versions.
393
315
if version_ids is None:
394
return dict(self.iter_parents(self.versions()))
396
pending = set(osutils.safe_revision_id(v) for v in version_ids)
398
this_iteration = pending
400
for version, parents in self.iter_parents(this_iteration):
316
for version in self.versions():
317
result[version] = self.get_parents(version)
319
pending = set(version_ids)
321
version = pending.pop()
322
if version in result:
324
parents = self.get_parents(version)
325
for parent in parents:
401
329
result[version] = parents
402
pending.update(parents)
403
pending.difference_update(result)
406
332
def get_graph_with_ghosts(self):
480
def iter_lines_added_or_present_in_versions(self, version_ids=None,
406
def iter_lines_added_or_present_in_versions(self, version_ids=None):
482
407
"""Iterate over the lines in the versioned file from version_ids.
484
409
This may return lines from other versions, and does not return the
487
412
thinks is relevant, but given that such hints are just guesses,
488
413
its better not to have it if we don't need it.
490
If a progress bar is supplied, it may be used to indicate progress.
491
The caller is responsible for cleaning up progress bars (because this
494
415
NOTES: Lines are normalised: they will all have \n terminators.
495
416
Lines are returned in arbitrary order.
497
418
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
499
def iter_parents(self, version_ids):
500
"""Iterate through the parents for many version ids.
502
:param version_ids: An iterable yielding version_ids.
503
:return: An iterator that yields (version_id, parents). Requested
504
version_ids not present in the versioned file are simply skipped.
505
The order is undefined, allowing for different optimisations in
506
the underlying implementation.
508
for version_id in version_ids:
510
yield version_id, tuple(self.get_parents(version_id))
511
except errors.RevisionNotPresent:
514
420
def transaction_finished(self):
515
421
"""The transaction that this file was opened in has finished.
563
469
raise NotImplementedError(VersionedFile.plan_merge)
565
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
471
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
566
472
b_marker=TextMerge.B_MARKER):
567
473
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
751
656
new_version_ids.add(version)
752
657
return new_version_ids
660
class InterVersionedFileTestProviderAdapter(object):
661
"""A tool to generate a suite testing multiple inter versioned-file classes.
663
This is done by copying the test once for each InterVersionedFile provider
664
and injecting the transport_server, transport_readonly_server,
665
versionedfile_factory and versionedfile_factory_to classes into each copy.
666
Each copy is also given a new id() to make it easy to identify.
669
def __init__(self, transport_server, transport_readonly_server, formats):
670
self._transport_server = transport_server
671
self._transport_readonly_server = transport_readonly_server
672
self._formats = formats
674
def adapt(self, test):
676
for (interversionedfile_class,
677
versionedfile_factory,
678
versionedfile_factory_to) in self._formats:
679
new_test = deepcopy(test)
680
new_test.transport_server = self._transport_server
681
new_test.transport_readonly_server = self._transport_readonly_server
682
new_test.interversionedfile_class = interversionedfile_class
683
new_test.versionedfile_factory = versionedfile_factory
684
new_test.versionedfile_factory_to = versionedfile_factory_to
685
def make_new_test_id():
686
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
687
return lambda: new_id
688
new_test.id = make_new_test_id()
689
result.addTest(new_test)
693
def default_test_list():
694
"""Generate the default list of interversionedfile permutations to test."""
695
from bzrlib.weave import WeaveFile
696
from bzrlib.knit import KnitVersionedFile
698
# test the fallback InterVersionedFile from annotated knits to weave
699
result.append((InterVersionedFile,
702
for optimiser in InterVersionedFile._optimisers:
703
result.append((optimiser,
704
optimiser._matching_file_from_factory,
705
optimiser._matching_file_to_factory
707
# if there are specific combinations we want to use, we can add them