88
77
"""Returns whether version is present."""
89
78
raise NotImplementedError(self.has_version)
91
def add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
92
"""Add a text to the versioned file via a pregenerated delta.
94
:param version_id: The version id being added.
95
:param parents: The parents of the version_id.
96
:param delta_parent: The parent this delta was created against.
97
:param sha1: The sha1 of the full text.
98
:param delta: The delta instructions. See get_delta for details.
100
version_id = osutils.safe_revision_id(version_id)
101
parents = [osutils.safe_revision_id(v) for v in parents]
102
self._check_write_ok()
103
if self.has_version(version_id):
104
raise errors.RevisionAlreadyPresent(version_id, self)
105
return self._add_delta(version_id, parents, delta_parent, sha1, noeol, delta)
107
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
108
"""Class specific routine to add a delta.
110
This generic version simply applies the delta to the delta_parent and
113
# strip annotation from delta
115
for start, stop, delta_len, delta_lines in delta:
116
new_delta.append((start, stop, delta_len, [text for origin, text in delta_lines]))
117
if delta_parent is not None:
118
parent_full = self.get_lines(delta_parent)
121
new_full = self._apply_delta(parent_full, new_delta)
122
# its impossible to have noeol on an empty file
123
if noeol and new_full[-1][-1] == '\n':
124
new_full[-1] = new_full[-1][:-1]
125
self.add_lines(version_id, parents, new_full)
127
def add_lines(self, version_id, parents, lines, parent_texts=None):
80
def add_lines(self, version_id, parents, lines, parent_texts=None,
81
left_matching_blocks=None, nostore_sha=None, random_id=False,
128
83
"""Add a single text on top of the versioned file.
130
85
Must raise RevisionAlreadyPresent if the new version is
133
88
Must raise RevisionNotPresent if any of the given parents are
134
89
not present in file history.
91
:param lines: A list of lines. Each line must be a bytestring. And all
92
of them except the last must be terminated with \n and contain no
93
other \n's. The last line may either contain no \n's or a single
94
terminated \n. If the lines list does meet this constraint the add
95
routine may error or may succeed - but you will be unable to read
96
the data back accurately. (Checking the lines have been split
97
correctly is expensive and extremely unlikely to catch bugs so it
98
is not done at runtime unless check_content is True.)
135
99
:param parent_texts: An optional dictionary containing the opaque
136
representations of some or all of the parents of
137
version_id to allow delta optimisations.
138
VERY IMPORTANT: the texts must be those returned
139
by add_lines or data corruption can be caused.
140
:return: An opaque representation of the inserted version which can be
141
provided back to future add_lines calls in the parent_texts
100
representations of some or all of the parents of version_id to
101
allow delta optimisations. VERY IMPORTANT: the texts must be those
102
returned by add_lines or data corruption can be caused.
103
:param left_matching_blocks: a hint about which areas are common
104
between the text and its left-hand-parent. The format is
105
the SequenceMatcher.get_matching_blocks format.
106
:param nostore_sha: Raise ExistingContent and do not add the lines to
107
the versioned file if the digest of the lines matches this.
108
:param random_id: If True a random id has been selected rather than
109
an id determined by some deterministic process such as a converter
110
from a foreign VCS. When True the backend may choose not to check
111
for uniqueness of the resulting key within the versioned file, so
112
this should only be done when the result is expected to be unique
114
:param check_content: If True, the lines supplied are verified to be
115
bytestrings that are correctly formed lines.
116
:return: The text sha1, the number of bytes in the text, and an opaque
117
representation of the inserted version which can be provided
118
back to future add_lines calls in the parent_texts dictionary.
144
version_id = osutils.safe_revision_id(version_id)
145
parents = [osutils.safe_revision_id(v) for v in parents]
146
120
self._check_write_ok()
147
return self._add_lines(version_id, parents, lines, parent_texts)
121
return self._add_lines(version_id, parents, lines, parent_texts,
122
left_matching_blocks, nostore_sha, random_id, check_content)
149
def _add_lines(self, version_id, parents, lines, parent_texts):
124
def _add_lines(self, version_id, parents, lines, parent_texts,
125
left_matching_blocks, nostore_sha, random_id, check_content):
150
126
"""Helper to do the class specific add_lines."""
151
127
raise NotImplementedError(self.add_lines)
153
129
def add_lines_with_ghosts(self, version_id, parents, lines,
130
parent_texts=None, nostore_sha=None, random_id=False,
155
132
"""Add lines to the versioned file, allowing ghosts to be present.
157
This takes the same parameters as add_lines.
134
This takes the same parameters as add_lines and returns the same.
159
version_id = osutils.safe_revision_id(version_id)
160
parents = [osutils.safe_revision_id(v) for v in parents]
161
136
self._check_write_ok()
162
137
return self._add_lines_with_ghosts(version_id, parents, lines,
138
parent_texts, nostore_sha, random_id, check_content)
165
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts):
140
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
141
nostore_sha, random_id, check_content):
166
142
"""Helper to do class specific add_lines_with_ghosts."""
167
143
raise NotImplementedError(self.add_lines_with_ghosts)
230
204
raise NotImplementedError(self.create_empty)
232
def fix_parents(self, version_id, new_parents):
233
"""Fix the parents list for version.
235
This is done by appending a new version to the index
236
with identical data except for the parents list.
237
the parents list must be a superset of the current
240
version_id = osutils.safe_revision_id(version_id)
241
new_parents = [osutils.safe_revision_id(p) for p in new_parents]
242
self._check_write_ok()
243
return self._fix_parents(version_id, new_parents)
245
def _fix_parents(self, version_id, new_parents):
246
"""Helper for fix_parents."""
247
raise NotImplementedError(self.fix_parents)
249
def get_delta(self, version):
250
"""Get a delta for constructing version from some other version.
252
:return: (delta_parent, sha1, noeol, delta)
253
Where delta_parent is a version id or None to indicate no parent.
255
raise NotImplementedError(self.get_delta)
257
def get_deltas(self, version_ids):
258
"""Get multiple deltas at once for constructing versions.
260
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
261
Where delta_parent is a version id or None to indicate no parent, and
262
version_id is the version_id created by that delta.
265
for version_id in version_ids:
266
result[version_id] = self.get_delta(version_id)
206
def get_format_signature(self):
207
"""Get a text description of the data encoding in this file.
211
raise NotImplementedError(self.get_format_signature)
213
def make_mpdiffs(self, version_ids):
214
"""Create multiparent diffs for specified versions."""
215
knit_versions = set()
216
for version_id in version_ids:
217
knit_versions.add(version_id)
218
knit_versions.update(self.get_parents(version_id))
219
lines = dict(zip(knit_versions,
220
self._get_lf_split_line_list(knit_versions)))
222
for version_id in version_ids:
223
target = lines[version_id]
224
parents = [lines[p] for p in self.get_parents(version_id)]
226
left_parent_blocks = self._extract_blocks(version_id,
229
left_parent_blocks = None
230
diffs.append(multiparent.MultiParent.from_lines(target, parents,
234
def _extract_blocks(self, version_id, source, target):
237
def add_mpdiffs(self, records):
238
"""Add mpdiffs to this VersionedFile.
240
Records should be iterables of version, parents, expected_sha1,
241
mpdiff. mpdiff should be a MultiParent instance.
243
# Does this need to call self._check_write_ok()? (IanC 20070919)
245
mpvf = multiparent.MultiMemoryVersionedFile()
247
for version, parent_ids, expected_sha1, mpdiff in records:
248
versions.append(version)
249
mpvf.add_diff(mpdiff, version, parent_ids)
250
needed_parents = set()
251
for version, parent_ids, expected_sha1, mpdiff in records:
252
needed_parents.update(p for p in parent_ids
253
if not mpvf.has_version(p))
254
for parent_id, lines in zip(needed_parents,
255
self._get_lf_split_line_list(needed_parents)):
256
mpvf.add_version(lines, parent_id, [])
257
for (version, parent_ids, expected_sha1, mpdiff), lines in\
258
zip(records, mpvf.get_line_list(versions)):
259
if len(parent_ids) == 1:
260
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
261
mpvf.get_diff(parent_ids[0]).num_lines()))
263
left_matching_blocks = None
264
_, _, version_text = self.add_lines(version, parent_ids, lines,
265
vf_parents, left_matching_blocks=left_matching_blocks)
266
vf_parents[version] = version_text
267
for (version, parent_ids, expected_sha1, mpdiff), sha1 in\
268
zip(records, self.get_sha1s(versions)):
269
if expected_sha1 != sha1:
270
raise errors.VersionedFileInvalidChecksum(version)
269
272
def get_sha1(self, version_id):
270
273
"""Get the stored sha1 sum for the given revision.
272
:param name: The name of the version to lookup
275
:param version_id: The name of the version to lookup
274
277
raise NotImplementedError(self.get_sha1)
279
def get_sha1s(self, version_ids):
280
"""Get the stored sha1 sums for the given revisions.
282
:param version_ids: The names of the versions to lookup
283
:return: a list of sha1s in order according to the version_ids
285
raise NotImplementedError(self.get_sha1s)
276
287
def get_suffixes(self):
277
288
"""Return the file suffixes associated with this versioned file."""
278
289
raise NotImplementedError(self.get_suffixes)
426
def iter_lines_added_or_present_in_versions(self, version_ids=None,
423
def iter_lines_added_or_present_in_versions(self, version_ids=None,
428
425
"""Iterate over the lines in the versioned file from version_ids.
430
This may return lines from other versions, and does not return the
431
specific version marker at this point. The api may be changed
432
during development to include the version that the versioned file
433
thinks is relevant, but given that such hints are just guesses,
434
its better not to have it if we don't need it.
427
This may return lines from other versions. Each item the returned
428
iterator yields is a tuple of a line and a text version that that line
429
is present in (not introduced in).
431
Ordering of results is in whatever order is most suitable for the
432
underlying storage format.
436
434
If a progress bar is supplied, it may be used to indicate progress.
437
435
The caller is responsible for cleaning up progress bars (because this
440
438
NOTES: Lines are normalised: they will all have \n terminators.
441
439
Lines are returned in arbitrary order.
441
:return: An iterator over (line, version_id).
443
443
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
445
def iter_parents(self, version_ids):
446
"""Iterate through the parents for many version ids.
448
:param version_ids: An iterable yielding version_ids.
449
:return: An iterator that yields (version_id, parents). Requested
450
version_ids not present in the versioned file are simply skipped.
451
The order is undefined, allowing for different optimisations in
452
the underlying implementation.
454
for version_id in version_ids:
456
yield version_id, tuple(self.get_parents(version_id))
457
except errors.RevisionNotPresent:
445
460
def transaction_finished(self):
446
461
"""The transaction that this file was opened in has finished.
451
466
self.finished = True
453
@deprecated_method(zero_eight)
454
def walk(self, version_ids=None):
455
"""Walk the versioned file as a weave-like structure, for
456
versions relative to version_ids. Yields sequence of (lineno,
457
insert, deletes, text) for each relevant line.
459
Must raise RevisionNotPresent if any of the specified versions
460
are not present in the file history.
462
:param version_ids: the version_ids to walk with respect to. If not
463
supplied the entire weave-like structure is walked.
465
walk is deprecated in favour of iter_lines_added_or_present_in_versions
467
raise NotImplementedError(self.walk)
469
@deprecated_method(zero_eight)
470
def iter_names(self):
471
"""Walk the names list."""
472
return iter(self.versions())
474
468
def plan_merge(self, ver_a, ver_b):
475
469
"""Return pseudo-annotation indicating how the two versions merge.
498
492
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
495
class _PlanMergeVersionedFile(object):
496
"""A VersionedFile for uncommitted and committed texts.
498
It is intended to allow merges to be planned with working tree texts.
499
It implements only the small part of the VersionedFile interface used by
500
PlanMerge. It falls back to multiple versionedfiles for data not stored in
501
_PlanMergeVersionedFile itself.
504
def __init__(self, file_id, fallback_versionedfiles=None):
507
:param file_id: Used when raising exceptions.
508
:param fallback_versionedfiles: If supplied, the set of fallbacks to
509
use. Otherwise, _PlanMergeVersionedFile.fallback_versionedfiles
510
can be appended to later.
512
self._file_id = file_id
513
if fallback_versionedfiles is None:
514
self.fallback_versionedfiles = []
516
self.fallback_versionedfiles = fallback_versionedfiles
520
def plan_merge(self, ver_a, ver_b, base=None):
521
"""See VersionedFile.plan_merge"""
522
from bzrlib.merge import _PlanMerge
524
return _PlanMerge(ver_a, ver_b, self).plan_merge()
525
old_plan = list(_PlanMerge(ver_a, base, self).plan_merge())
526
new_plan = list(_PlanMerge(ver_a, ver_b, self).plan_merge())
527
return _PlanMerge._subtract_plans(old_plan, new_plan)
529
def plan_lca_merge(self, ver_a, ver_b, base=None):
530
from bzrlib.merge import _PlanLCAMerge
531
graph = self._get_graph()
532
new_plan = _PlanLCAMerge(ver_a, ver_b, self, graph).plan_merge()
535
old_plan = _PlanLCAMerge(ver_a, base, self, graph).plan_merge()
536
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
538
def add_lines(self, version_id, parents, lines):
539
"""See VersionedFile.add_lines
541
Lines are added locally, not fallback versionedfiles. Also, ghosts are
542
permitted. Only reserved ids are permitted.
544
if not revision.is_reserved_id(version_id):
545
raise ValueError('Only reserved ids may be used')
547
raise ValueError('Parents may not be None')
549
raise ValueError('Lines may not be None')
550
self._parents[version_id] = parents
551
self._lines[version_id] = lines
553
def get_lines(self, version_id):
554
"""See VersionedFile.get_ancestry"""
555
lines = self._lines.get(version_id)
556
if lines is not None:
558
for versionedfile in self.fallback_versionedfiles:
560
return versionedfile.get_lines(version_id)
561
except errors.RevisionNotPresent:
564
raise errors.RevisionNotPresent(version_id, self._file_id)
566
def get_ancestry(self, version_id, topo_sorted=False):
567
"""See VersionedFile.get_ancestry.
569
Note that this implementation assumes that if a VersionedFile can
570
answer get_ancestry at all, it can give an authoritative answer. In
571
fact, ghosts can invalidate this assumption. But it's good enough
572
99% of the time, and far cheaper/simpler.
574
Also note that the results of this version are never topologically
575
sorted, and are a set.
578
raise ValueError('This implementation does not provide sorting')
579
parents = self._parents.get(version_id)
581
for vf in self.fallback_versionedfiles:
583
return vf.get_ancestry(version_id, topo_sorted=False)
584
except errors.RevisionNotPresent:
587
raise errors.RevisionNotPresent(version_id, self._file_id)
588
ancestry = set([version_id])
589
for parent in parents:
590
ancestry.update(self.get_ancestry(parent, topo_sorted=False))
593
def get_parents(self, version_id):
594
"""See VersionedFile.get_parents"""
595
parents = self._parents.get(version_id)
596
if parents is not None:
598
for versionedfile in self.fallback_versionedfiles:
600
return versionedfile.get_parents(version_id)
601
except errors.RevisionNotPresent:
604
raise errors.RevisionNotPresent(version_id, self._file_id)
606
def _get_graph(self):
607
from bzrlib.graph import (
610
_StackedParentsProvider,
612
from bzrlib.repofmt.knitrepo import _KnitParentsProvider
613
parent_providers = [DictParentsProvider(self._parents)]
614
for vf in self.fallback_versionedfiles:
615
parent_providers.append(_KnitParentsProvider(vf))
616
return Graph(_StackedParentsProvider(parent_providers))
501
619
class PlanWeaveMerge(TextMerge):
502
620
"""Weave merge that takes a plan as its input.
627
751
# TODO: remove parent texts when they are not relevant any more for
628
752
# memory pressure reduction. RBC 20060313
629
753
# pb.update('Converting versioned data', 0, len(order))
630
# deltas = self.source.get_deltas(order)
631
755
for index, version in enumerate(order):
632
pb.update('Converting versioned data', index, len(order))
633
parent_text = target.add_lines(version,
756
pb.update('Converting versioned data', index, total)
757
_, _, parent_text = target.add_lines(version,
634
758
self.source.get_parents(version),
635
759
self.source.get_lines(version),
636
760
parent_texts=parent_texts)
637
761
parent_texts[version] = parent_text
638
#delta_parent, sha1, noeol, delta = deltas[version]
639
#target.add_delta(version,
640
# self.source.get_parents(version),
645
#target.get_lines(version)
647
763
# this should hit the native code path for target
648
764
if target is not self.target:
682
799
new_version_ids.add(version)
683
800
return new_version_ids
686
class InterVersionedFileTestProviderAdapter(object):
687
"""A tool to generate a suite testing multiple inter versioned-file classes.
689
This is done by copying the test once for each InterVersionedFile provider
690
and injecting the transport_server, transport_readonly_server,
691
versionedfile_factory and versionedfile_factory_to classes into each copy.
692
Each copy is also given a new id() to make it easy to identify.
695
def __init__(self, transport_server, transport_readonly_server, formats):
696
self._transport_server = transport_server
697
self._transport_readonly_server = transport_readonly_server
698
self._formats = formats
700
def adapt(self, test):
701
result = unittest.TestSuite()
702
for (interversionedfile_class,
703
versionedfile_factory,
704
versionedfile_factory_to) in self._formats:
705
new_test = deepcopy(test)
706
new_test.transport_server = self._transport_server
707
new_test.transport_readonly_server = self._transport_readonly_server
708
new_test.interversionedfile_class = interversionedfile_class
709
new_test.versionedfile_factory = versionedfile_factory
710
new_test.versionedfile_factory_to = versionedfile_factory_to
711
def make_new_test_id():
712
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
713
return lambda: new_id
714
new_test.id = make_new_test_id()
715
result.addTest(new_test)
719
def default_test_list():
720
"""Generate the default list of interversionedfile permutations to test."""
721
from bzrlib.weave import WeaveFile
722
from bzrlib.knit import KnitVersionedFile
724
# test the fallback InterVersionedFile from annotated knits to weave
725
result.append((InterVersionedFile,
728
for optimiser in InterVersionedFile._optimisers:
729
result.append((optimiser,
730
optimiser._matching_file_from_factory,
731
optimiser._matching_file_to_factory
733
# if there are specific combinations we want to use, we can add them