88
80
"""Returns whether version is present."""
89
81
raise NotImplementedError(self.has_version)
91
def add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
92
"""Add a text to the versioned file via a pregenerated delta.
94
:param version_id: The version id being added.
95
:param parents: The parents of the version_id.
96
:param delta_parent: The parent this delta was created against.
97
:param sha1: The sha1 of the full text.
98
:param delta: The delta instructions. See get_delta for details.
100
version_id = osutils.safe_revision_id(version_id)
101
parents = [osutils.safe_revision_id(v) for v in parents]
102
self._check_write_ok()
103
if self.has_version(version_id):
104
raise errors.RevisionAlreadyPresent(version_id, self)
105
return self._add_delta(version_id, parents, delta_parent, sha1, noeol, delta)
107
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
108
"""Class specific routine to add a delta.
110
This generic version simply applies the delta to the delta_parent and
113
# strip annotation from delta
115
for start, stop, delta_len, delta_lines in delta:
116
new_delta.append((start, stop, delta_len, [text for origin, text in delta_lines]))
117
if delta_parent is not None:
118
parent_full = self.get_lines(delta_parent)
121
new_full = self._apply_delta(parent_full, new_delta)
122
# its impossible to have noeol on an empty file
123
if noeol and new_full[-1][-1] == '\n':
124
new_full[-1] = new_full[-1][:-1]
125
self.add_lines(version_id, parents, new_full)
127
def add_lines(self, version_id, parents, lines, parent_texts=None):
83
def add_lines(self, version_id, parents, lines, parent_texts=None,
84
left_matching_blocks=None, nostore_sha=None, random_id=False,
128
86
"""Add a single text on top of the versioned file.
130
88
Must raise RevisionAlreadyPresent if the new version is
133
91
Must raise RevisionNotPresent if any of the given parents are
134
92
not present in file history.
94
:param lines: A list of lines. Each line must be a bytestring. And all
95
of them except the last must be terminated with \n and contain no
96
other \n's. The last line may either contain no \n's or a single
97
terminated \n. If the lines list does meet this constraint the add
98
routine may error or may succeed - but you will be unable to read
99
the data back accurately. (Checking the lines have been split
100
correctly is expensive and extremely unlikely to catch bugs so it
101
is not done at runtime unless check_content is True.)
135
102
:param parent_texts: An optional dictionary containing the opaque
136
representations of some or all of the parents of
137
version_id to allow delta optimisations.
138
VERY IMPORTANT: the texts must be those returned
139
by add_lines or data corruption can be caused.
140
:return: An opaque representation of the inserted version which can be
141
provided back to future add_lines calls in the parent_texts
103
representations of some or all of the parents of version_id to
104
allow delta optimisations. VERY IMPORTANT: the texts must be those
105
returned by add_lines or data corruption can be caused.
106
:param left_matching_blocks: a hint about which areas are common
107
between the text and its left-hand-parent. The format is
108
the SequenceMatcher.get_matching_blocks format.
109
:param nostore_sha: Raise ExistingContent and do not add the lines to
110
the versioned file if the digest of the lines matches this.
111
:param random_id: If True a random id has been selected rather than
112
an id determined by some deterministic process such as a converter
113
from a foreign VCS. When True the backend may choose not to check
114
for uniqueness of the resulting key within the versioned file, so
115
this should only be done when the result is expected to be unique
117
:param check_content: If True, the lines supplied are verified to be
118
bytestrings that are correctly formed lines.
119
:return: The text sha1, the number of bytes in the text, and an opaque
120
representation of the inserted version which can be provided
121
back to future add_lines calls in the parent_texts dictionary.
144
version_id = osutils.safe_revision_id(version_id)
145
parents = [osutils.safe_revision_id(v) for v in parents]
146
123
self._check_write_ok()
147
return self._add_lines(version_id, parents, lines, parent_texts)
124
return self._add_lines(version_id, parents, lines, parent_texts,
125
left_matching_blocks, nostore_sha, random_id, check_content)
149
def _add_lines(self, version_id, parents, lines, parent_texts):
127
def _add_lines(self, version_id, parents, lines, parent_texts,
128
left_matching_blocks, nostore_sha, random_id, check_content):
150
129
"""Helper to do the class specific add_lines."""
151
130
raise NotImplementedError(self.add_lines)
153
132
def add_lines_with_ghosts(self, version_id, parents, lines,
133
parent_texts=None, nostore_sha=None, random_id=False,
134
check_content=True, left_matching_blocks=None):
155
135
"""Add lines to the versioned file, allowing ghosts to be present.
157
This takes the same parameters as add_lines.
137
This takes the same parameters as add_lines and returns the same.
159
version_id = osutils.safe_revision_id(version_id)
160
parents = [osutils.safe_revision_id(v) for v in parents]
161
139
self._check_write_ok()
162
140
return self._add_lines_with_ghosts(version_id, parents, lines,
141
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
165
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts):
143
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
144
nostore_sha, random_id, check_content, left_matching_blocks):
166
145
"""Helper to do class specific add_lines_with_ghosts."""
167
146
raise NotImplementedError(self.add_lines_with_ghosts)
220
197
"""Helper function to do the _clone_text work."""
221
198
raise NotImplementedError(self.clone_text)
223
def create_empty(self, name, transport, mode=None):
224
"""Create a new versioned file of this exact type.
226
:param name: the file name
227
:param transport: the transport
228
:param mode: optional file mode.
230
raise NotImplementedError(self.create_empty)
232
def fix_parents(self, version_id, new_parents):
233
"""Fix the parents list for version.
235
This is done by appending a new version to the index
236
with identical data except for the parents list.
237
the parents list must be a superset of the current
240
version_id = osutils.safe_revision_id(version_id)
241
new_parents = [osutils.safe_revision_id(p) for p in new_parents]
242
self._check_write_ok()
243
return self._fix_parents(version_id, new_parents)
245
def _fix_parents(self, version_id, new_parents):
246
"""Helper for fix_parents."""
247
raise NotImplementedError(self.fix_parents)
249
def get_delta(self, version):
250
"""Get a delta for constructing version from some other version.
252
:return: (delta_parent, sha1, noeol, delta)
253
Where delta_parent is a version id or None to indicate no parent.
255
raise NotImplementedError(self.get_delta)
257
def get_deltas(self, version_ids):
258
"""Get multiple deltas at once for constructing versions.
260
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
261
Where delta_parent is a version id or None to indicate no parent, and
262
version_id is the version_id created by that delta.
265
for version_id in version_ids:
266
result[version_id] = self.get_delta(version_id)
200
def get_format_signature(self):
201
"""Get a text description of the data encoding in this file.
205
raise NotImplementedError(self.get_format_signature)
207
def make_mpdiffs(self, version_ids):
208
"""Create multiparent diffs for specified versions."""
209
knit_versions = set()
210
knit_versions.update(version_ids)
211
parent_map = self.get_parent_map(version_ids)
212
for version_id in version_ids:
214
knit_versions.update(parent_map[version_id])
216
raise RevisionNotPresent(version_id, self)
217
# We need to filter out ghosts, because we can't diff against them.
218
knit_versions = set(self.get_parent_map(knit_versions).keys())
219
lines = dict(zip(knit_versions,
220
self._get_lf_split_line_list(knit_versions)))
222
for version_id in version_ids:
223
target = lines[version_id]
225
parents = [lines[p] for p in parent_map[version_id] if p in
228
raise RevisionNotPresent(version_id, self)
230
left_parent_blocks = self._extract_blocks(version_id,
233
left_parent_blocks = None
234
diffs.append(multiparent.MultiParent.from_lines(target, parents,
238
def _extract_blocks(self, version_id, source, target):
241
def add_mpdiffs(self, records):
242
"""Add mpdiffs to this VersionedFile.
244
Records should be iterables of version, parents, expected_sha1,
245
mpdiff. mpdiff should be a MultiParent instance.
247
# Does this need to call self._check_write_ok()? (IanC 20070919)
249
mpvf = multiparent.MultiMemoryVersionedFile()
251
for version, parent_ids, expected_sha1, mpdiff in records:
252
versions.append(version)
253
mpvf.add_diff(mpdiff, version, parent_ids)
254
needed_parents = set()
255
for version, parent_ids, expected_sha1, mpdiff in records:
256
needed_parents.update(p for p in parent_ids
257
if not mpvf.has_version(p))
258
present_parents = set(self.get_parent_map(needed_parents).keys())
259
for parent_id, lines in zip(present_parents,
260
self._get_lf_split_line_list(present_parents)):
261
mpvf.add_version(lines, parent_id, [])
262
for (version, parent_ids, expected_sha1, mpdiff), lines in\
263
zip(records, mpvf.get_line_list(versions)):
264
if len(parent_ids) == 1:
265
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
266
mpvf.get_diff(parent_ids[0]).num_lines()))
268
left_matching_blocks = None
270
_, _, version_text = self.add_lines_with_ghosts(version,
271
parent_ids, lines, vf_parents,
272
left_matching_blocks=left_matching_blocks)
273
except NotImplementedError:
274
# The vf can't handle ghosts, so add lines normally, which will
275
# (reasonably) fail if there are ghosts in the data.
276
_, _, version_text = self.add_lines(version,
277
parent_ids, lines, vf_parents,
278
left_matching_blocks=left_matching_blocks)
279
vf_parents[version] = version_text
280
for (version, parent_ids, expected_sha1, mpdiff), sha1 in\
281
zip(records, self.get_sha1s(versions)):
282
if expected_sha1 != sha1:
283
raise errors.VersionedFileInvalidChecksum(version)
269
285
def get_sha1(self, version_id):
270
286
"""Get the stored sha1 sum for the given revision.
272
:param name: The name of the version to lookup
288
:param version_id: The name of the version to lookup
274
290
raise NotImplementedError(self.get_sha1)
292
def get_sha1s(self, version_ids):
293
"""Get the stored sha1 sums for the given revisions.
295
:param version_ids: The names of the versions to lookup
296
:return: a list of sha1s in order according to the version_ids
298
raise NotImplementedError(self.get_sha1s)
276
300
def get_suffixes(self):
277
301
"""Return the file suffixes associated with this versioned file."""
278
302
raise NotImplementedError(self.get_suffixes)
358
386
raise NotImplementedError(self.get_graph_with_ghosts)
360
@deprecated_method(zero_eight)
361
def parent_names(self, version):
362
"""Return version names for parents of a version.
364
See get_parents for the current api.
388
def get_parent_map(self, version_ids):
389
"""Get a map of the parents of version_ids.
391
:param version_ids: The version ids to look up parents for.
392
:return: A mapping from version id to parents.
366
return self.get_parents(version)
394
raise NotImplementedError(self.get_parent_map)
396
@deprecated_method(one_four)
368
397
def get_parents(self, version_id):
369
398
"""Return version names for parents of a version.
371
400
Must raise RevisionNotPresent if version is not present in
374
raise NotImplementedError(self.get_parents)
404
all = self.get_parent_map([version_id])[version_id]
406
raise errors.RevisionNotPresent(version_id, self)
408
parent_parents = self.get_parent_map(all)
409
for version_id in all:
410
if version_id in parent_parents:
411
result.append(version_id)
376
414
def get_parents_with_ghosts(self, version_id):
377
415
"""Return version names for parents of version_id.
498
523
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
526
class _PlanMergeVersionedFile(object):
527
"""A VersionedFile for uncommitted and committed texts.
529
It is intended to allow merges to be planned with working tree texts.
530
It implements only the small part of the VersionedFile interface used by
531
PlanMerge. It falls back to multiple versionedfiles for data not stored in
532
_PlanMergeVersionedFile itself.
535
def __init__(self, file_id, fallback_versionedfiles=None):
538
:param file_id: Used when raising exceptions.
539
:param fallback_versionedfiles: If supplied, the set of fallbacks to
540
use. Otherwise, _PlanMergeVersionedFile.fallback_versionedfiles
541
can be appended to later.
543
self._file_id = file_id
544
if fallback_versionedfiles is None:
545
self.fallback_versionedfiles = []
547
self.fallback_versionedfiles = fallback_versionedfiles
551
def plan_merge(self, ver_a, ver_b, base=None):
552
"""See VersionedFile.plan_merge"""
553
from bzrlib.merge import _PlanMerge
555
return _PlanMerge(ver_a, ver_b, self).plan_merge()
556
old_plan = list(_PlanMerge(ver_a, base, self).plan_merge())
557
new_plan = list(_PlanMerge(ver_a, ver_b, self).plan_merge())
558
return _PlanMerge._subtract_plans(old_plan, new_plan)
560
def plan_lca_merge(self, ver_a, ver_b, base=None):
561
from bzrlib.merge import _PlanLCAMerge
562
graph = self._get_graph()
563
new_plan = _PlanLCAMerge(ver_a, ver_b, self, graph).plan_merge()
566
old_plan = _PlanLCAMerge(ver_a, base, self, graph).plan_merge()
567
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
569
def add_lines(self, version_id, parents, lines):
570
"""See VersionedFile.add_lines
572
Lines are added locally, not fallback versionedfiles. Also, ghosts are
573
permitted. Only reserved ids are permitted.
575
if not revision.is_reserved_id(version_id):
576
raise ValueError('Only reserved ids may be used')
578
raise ValueError('Parents may not be None')
580
raise ValueError('Lines may not be None')
581
self._parents[version_id] = tuple(parents)
582
self._lines[version_id] = lines
584
def get_lines(self, version_id):
585
"""See VersionedFile.get_ancestry"""
586
lines = self._lines.get(version_id)
587
if lines is not None:
589
for versionedfile in self.fallback_versionedfiles:
591
return versionedfile.get_lines(version_id)
592
except errors.RevisionNotPresent:
595
raise errors.RevisionNotPresent(version_id, self._file_id)
597
def get_ancestry(self, version_id, topo_sorted=False):
598
"""See VersionedFile.get_ancestry.
600
Note that this implementation assumes that if a VersionedFile can
601
answer get_ancestry at all, it can give an authoritative answer. In
602
fact, ghosts can invalidate this assumption. But it's good enough
603
99% of the time, and far cheaper/simpler.
605
Also note that the results of this version are never topologically
606
sorted, and are a set.
609
raise ValueError('This implementation does not provide sorting')
610
parents = self._parents.get(version_id)
612
for vf in self.fallback_versionedfiles:
614
return vf.get_ancestry(version_id, topo_sorted=False)
615
except errors.RevisionNotPresent:
618
raise errors.RevisionNotPresent(version_id, self._file_id)
619
ancestry = set([version_id])
620
for parent in parents:
621
ancestry.update(self.get_ancestry(parent, topo_sorted=False))
624
def get_parent_map(self, version_ids):
625
"""See VersionedFile.get_parent_map"""
627
pending = set(version_ids)
628
for key in version_ids:
630
result[key] = self._parents[key]
633
pending = pending - set(result.keys())
634
for versionedfile in self.fallback_versionedfiles:
635
parents = versionedfile.get_parent_map(pending)
636
result.update(parents)
637
pending = pending - set(parents.keys())
642
def _get_graph(self):
643
from bzrlib.graph import (
646
_StackedParentsProvider,
648
from bzrlib.repofmt.knitrepo import _KnitParentsProvider
649
parent_providers = [DictParentsProvider(self._parents)]
650
for vf in self.fallback_versionedfiles:
651
parent_providers.append(_KnitParentsProvider(vf))
652
return Graph(_StackedParentsProvider(parent_providers))
501
655
class PlanWeaveMerge(TextMerge):
502
656
"""Weave merge that takes a plan as its input.
593
753
incorporated into this versioned file.
595
755
Must raise RevisionNotPresent if any of the specified versions
596
are not present in the other files history unless ignore_missing is
597
supplied when they are silently skipped.
756
are not present in the other file's history unless ignore_missing is
757
supplied in which case they are silently skipped.
600
# - if the target is empty, just add all the versions from
601
# source to target, otherwise:
602
# - make a temporary versioned file of type target
603
# - insert the source content into it one at a time
605
if not self.target.versions():
608
# Make a new target-format versioned file.
609
temp_source = self.target.create_empty("temp", MemoryTransport())
611
760
version_ids = self._get_source_version_ids(version_ids, ignore_missing)
612
graph = self.source.get_graph(version_ids)
613
order = tsort.topo_sort(graph.items())
761
graph = Graph(self.source)
762
search = graph._make_breadth_first_searcher(version_ids)
763
transitive_ids = set()
764
map(transitive_ids.update, list(search))
765
parent_map = self.source.get_parent_map(transitive_ids)
766
order = tsort.topo_sort(parent_map.items())
614
767
pb = ui.ui_factory.nested_progress_bar()
615
768
parent_texts = {}
627
780
# TODO: remove parent texts when they are not relevant any more for
628
781
# memory pressure reduction. RBC 20060313
629
782
# pb.update('Converting versioned data', 0, len(order))
630
# deltas = self.source.get_deltas(order)
631
784
for index, version in enumerate(order):
632
pb.update('Converting versioned data', index, len(order))
633
parent_text = target.add_lines(version,
634
self.source.get_parents(version),
785
pb.update('Converting versioned data', index, total)
786
if version in target:
788
_, _, parent_text = target.add_lines(version,
635
790
self.source.get_lines(version),
636
791
parent_texts=parent_texts)
637
792
parent_texts[version] = parent_text
638
#delta_parent, sha1, noeol, delta = deltas[version]
639
#target.add_delta(version,
640
# self.source.get_parents(version),
645
#target.get_lines(version)
647
# this should hit the native code path for target
648
if target is not self.target:
649
return self.target.join(temp_source,
682
821
new_version_ids.add(version)
683
822
return new_version_ids
686
class InterVersionedFileTestProviderAdapter(object):
687
"""A tool to generate a suite testing multiple inter versioned-file classes.
689
This is done by copying the test once for each InterVersionedFile provider
690
and injecting the transport_server, transport_readonly_server,
691
versionedfile_factory and versionedfile_factory_to classes into each copy.
692
Each copy is also given a new id() to make it easy to identify.
695
def __init__(self, transport_server, transport_readonly_server, formats):
696
self._transport_server = transport_server
697
self._transport_readonly_server = transport_readonly_server
698
self._formats = formats
700
def adapt(self, test):
701
result = unittest.TestSuite()
702
for (interversionedfile_class,
703
versionedfile_factory,
704
versionedfile_factory_to) in self._formats:
705
new_test = deepcopy(test)
706
new_test.transport_server = self._transport_server
707
new_test.transport_readonly_server = self._transport_readonly_server
708
new_test.interversionedfile_class = interversionedfile_class
709
new_test.versionedfile_factory = versionedfile_factory
710
new_test.versionedfile_factory_to = versionedfile_factory_to
711
def make_new_test_id():
712
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
713
return lambda: new_id
714
new_test.id = make_new_test_id()
715
result.addTest(new_test)
719
def default_test_list():
720
"""Generate the default list of interversionedfile permutations to test."""
721
from bzrlib.weave import WeaveFile
722
from bzrlib.knit import KnitVersionedFile
724
# test the fallback InterVersionedFile from annotated knits to weave
725
result.append((InterVersionedFile,
728
for optimiser in InterVersionedFile._optimisers:
729
result.append((optimiser,
730
optimiser._matching_file_from_factory,
731
optimiser._matching_file_to_factory
733
# if there are specific combinations we want to use, we can add them