7
7
# it under the terms of the GNU General Public License as published by
8
8
# the Free Software Foundation; either version 2 of the License, or
9
9
# (at your option) any later version.
11
11
# This program is distributed in the hope that it will be useful,
12
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
14
# GNU General Public License for more details.
16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
# Remaing to do is to figure out if get_graph should return a simple
21
# map, or a graph object of some kind.
20
24
"""Versioned text file storage api."""
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
27
from copy import deepcopy
28
from unittest import TestSuite
31
from bzrlib.inter import InterObject
32
from bzrlib.symbol_versioning import *
32
33
from bzrlib.transport.memory import MemoryTransport
35
from bzrlib.inter import InterObject
36
from bzrlib.textmerge import TextMerge
37
from bzrlib.symbol_versioning import (deprecated_function,
34
from bzrlib.tsort import topo_sort
43
38
class VersionedFile(object):
78
65
"""Return a unsorted list of versions."""
79
66
raise NotImplementedError(self.versions)
81
def has_ghost(self, version_id):
82
"""Returns whether version is present as a ghost."""
83
raise NotImplementedError(self.has_ghost)
85
68
def has_version(self, version_id):
86
69
"""Returns whether version is present."""
87
70
raise NotImplementedError(self.has_version)
89
def add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
90
"""Add a text to the versioned file via a pregenerated delta.
92
:param version_id: The version id being added.
93
:param parents: The parents of the version_id.
94
:param delta_parent: The parent this delta was created against.
95
:param sha1: The sha1 of the full text.
96
:param delta: The delta instructions. See get_delta for details.
98
version_id = osutils.safe_revision_id(version_id)
99
parents = [osutils.safe_revision_id(v) for v in parents]
100
self._check_write_ok()
101
if self.has_version(version_id):
102
raise errors.RevisionAlreadyPresent(version_id, self)
103
return self._add_delta(version_id, parents, delta_parent, sha1, noeol, delta)
105
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
106
"""Class specific routine to add a delta.
108
This generic version simply applies the delta to the delta_parent and
111
# strip annotation from delta
113
for start, stop, delta_len, delta_lines in delta:
114
new_delta.append((start, stop, delta_len, [text for origin, text in delta_lines]))
115
if delta_parent is not None:
116
parent_full = self.get_lines(delta_parent)
119
new_full = self._apply_delta(parent_full, new_delta)
120
# its impossible to have noeol on an empty file
121
if noeol and new_full[-1][-1] == '\n':
122
new_full[-1] = new_full[-1][:-1]
123
self.add_lines(version_id, parents, new_full)
125
def add_lines(self, version_id, parents, lines, parent_texts=None):
72
def add_lines(self, version_id, parents, lines):
126
73
"""Add a single text on top of the versioned file.
128
75
Must raise RevisionAlreadyPresent if the new version is
129
76
already present in file history.
131
78
Must raise RevisionNotPresent if any of the given parents are
132
not present in file history.
133
:param parent_texts: An optional dictionary containing the opaque
134
representations of some or all of the parents of
135
version_id to allow delta optimisations.
136
VERY IMPORTANT: the texts must be those returned
137
by add_lines or data corruption can be caused.
138
:return: An opaque representation of the inserted version which can be
139
provided back to future add_lines calls in the parent_texts
142
version_id = osutils.safe_revision_id(version_id)
143
parents = [osutils.safe_revision_id(v) for v in parents]
144
self._check_write_ok()
145
return self._add_lines(version_id, parents, lines, parent_texts)
147
def _add_lines(self, version_id, parents, lines, parent_texts):
148
"""Helper to do the class specific add_lines."""
79
not present in file history."""
149
80
raise NotImplementedError(self.add_lines)
151
def add_lines_with_ghosts(self, version_id, parents, lines,
153
"""Add lines to the versioned file, allowing ghosts to be present.
155
This takes the same parameters as add_lines.
157
version_id = osutils.safe_revision_id(version_id)
158
parents = [osutils.safe_revision_id(v) for v in parents]
159
self._check_write_ok()
160
return self._add_lines_with_ghosts(version_id, parents, lines,
163
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts):
164
"""Helper to do class specific add_lines_with_ghosts."""
165
raise NotImplementedError(self.add_lines_with_ghosts)
167
82
def check(self, progress_bar=None):
168
83
"""Check the versioned file for integrity."""
169
84
raise NotImplementedError(self.check)
171
def _check_lines_not_unicode(self, lines):
172
"""Check that lines being added to a versioned file are not unicode."""
174
if line.__class__ is not str:
175
raise errors.BzrBadParameterUnicode("lines")
177
def _check_lines_are_lines(self, lines):
178
"""Check that the lines really are full lines without inline EOL."""
180
if '\n' in line[:-1]:
181
raise errors.BzrBadParameterContainsNewline("lines")
183
def _check_write_ok(self):
184
"""Is the versioned file marked as 'finished' ? Raise if it is."""
186
raise errors.OutSideTransaction()
187
if self._access_mode != 'w':
188
raise errors.ReadOnlyObjectDirtiedError(self)
190
def enable_cache(self):
191
"""Tell this versioned file that it should cache any data it reads.
193
This is advisory, implementations do not have to support caching.
197
86
def clear_cache(self):
198
"""Remove any data cached in the versioned file object.
200
This only needs to be supported if caches are supported
87
"""Remove any data cached in the versioned file object."""
204
89
def clone_text(self, new_version_id, old_version_id, parents):
205
90
"""Add an identical text to old_version_id as new_version_id.
228
106
raise NotImplementedError(self.create_empty)
230
def fix_parents(self, version_id, new_parents):
231
"""Fix the parents list for version.
233
This is done by appending a new version to the index
234
with identical data except for the parents list.
235
the parents list must be a superset of the current
238
version_id = osutils.safe_revision_id(version_id)
239
new_parents = [osutils.safe_revision_id(p) for p in new_parents]
240
self._check_write_ok()
241
return self._fix_parents(version_id, new_parents)
243
def _fix_parents(self, version_id, new_parents):
244
"""Helper for fix_parents."""
245
raise NotImplementedError(self.fix_parents)
247
def get_delta(self, version):
248
"""Get a delta for constructing version from some other version.
250
:return: (delta_parent, sha1, noeol, delta)
251
Where delta_parent is a version id or None to indicate no parent.
253
raise NotImplementedError(self.get_delta)
255
def get_deltas(self, version_ids):
256
"""Get multiple deltas at once for constructing versions.
258
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
259
Where delta_parent is a version id or None to indicate no parent, and
260
version_id is the version_id created by that delta.
263
for version_id in version_ids:
264
result[version_id] = self.get_delta(version_id)
267
def get_sha1(self, version_id):
268
"""Get the stored sha1 sum for the given revision.
270
:param name: The name of the version to lookup
272
raise NotImplementedError(self.get_sha1)
274
108
def get_suffixes(self):
275
109
"""Return the file suffixes associated with this versioned file."""
276
110
raise NotImplementedError(self.get_suffixes)
301
127
raise NotImplementedError(self.get_lines)
303
def get_ancestry(self, version_ids, topo_sorted=True):
129
def get_ancestry(self, version_ids):
304
130
"""Return a list of all ancestors of given version(s). This
305
131
will not include the null revision.
307
This list will not be topologically sorted if topo_sorted=False is
310
133
Must raise RevisionNotPresent if any of the given versions are
311
134
not present in file history."""
312
135
if isinstance(version_ids, basestring):
313
136
version_ids = [version_ids]
314
137
raise NotImplementedError(self.get_ancestry)
316
def get_ancestry_with_ghosts(self, version_ids):
317
"""Return a list of all ancestors of given version(s). This
318
will not include the null revision.
320
Must raise RevisionNotPresent if any of the given versions are
321
not present in file history.
323
Ghosts that are known about will be included in ancestry list,
324
but are not explicitly marked.
326
raise NotImplementedError(self.get_ancestry_with_ghosts)
328
def get_graph(self, version_ids=None):
329
"""Return a graph from the versioned file.
331
Ghosts are not listed or referenced in the graph.
332
:param version_ids: Versions to select.
333
None means retrieve all versions.
140
"""Return a graph for the entire versioned file."""
336
if version_ids is None:
337
for version in self.versions():
338
result[version] = self.get_parents(version)
340
pending = set(osutils.safe_revision_id(v) for v in version_ids)
342
version = pending.pop()
343
if version in result:
345
parents = self.get_parents(version)
346
for parent in parents:
350
result[version] = parents
142
for version in self.versions():
143
result[version] = self.get_parents(version)
353
def get_graph_with_ghosts(self):
354
"""Return a graph for the entire versioned file.
356
Ghosts are referenced in parents list but are not
359
raise NotImplementedError(self.get_graph_with_ghosts)
361
146
@deprecated_method(zero_eight)
362
147
def parent_names(self, version):
363
148
"""Return version names for parents of a version.
417
182
are not present in the other files history unless ignore_missing
418
183
is supplied when they are silently skipped.
420
self._check_write_ok()
421
185
return InterVersionedFile.get(other, self).join(
427
def iter_lines_added_or_present_in_versions(self, version_ids=None,
429
"""Iterate over the lines in the versioned file from version_ids.
431
This may return lines from other versions, and does not return the
432
specific version marker at this point. The api may be changed
433
during development to include the version that the versioned file
434
thinks is relevant, but given that such hints are just guesses,
435
its better not to have it if we don't need it.
437
If a progress bar is supplied, it may be used to indicate progress.
438
The caller is responsible for cleaning up progress bars (because this
441
NOTES: Lines are normalised: they will all have \n terminators.
442
Lines are returned in arbitrary order.
444
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
446
def transaction_finished(self):
447
"""The transaction that this file was opened in has finished.
449
This records self.finished = True and should cause all mutating
454
@deprecated_method(zero_eight)
455
191
def walk(self, version_ids=None):
456
192
"""Walk the versioned file as a weave-like structure, for
457
193
versions relative to version_ids. Yields sequence of (lineno,
481
215
Weave lines present in none of them are skipped entirely.
484
killed-base Dead in base revision
485
killed-both Killed in each revision
488
unchanged Alive in both a and b (possibly created in both)
491
ghost-a Killed in a, unborn in b
492
ghost-b Killed in b, unborn in a
493
irrelevant Not in either revision
495
raise NotImplementedError(VersionedFile.plan_merge)
497
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
498
b_marker=TextMerge.B_MARKER):
499
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
502
class PlanWeaveMerge(TextMerge):
503
"""Weave merge that takes a plan as its input.
505
This exists so that VersionedFile.plan_merge is implementable.
506
Most callers will want to use WeaveMerge instead.
509
def __init__(self, plan, a_marker=TextMerge.A_MARKER,
510
b_marker=TextMerge.B_MARKER):
511
TextMerge.__init__(self, a_marker, b_marker)
514
def _merge_struct(self):
217
inc_a = set(self.get_ancestry([ver_a]))
218
inc_b = set(self.get_ancestry([ver_b]))
219
inc_c = inc_a & inc_b
221
for lineno, insert, deleteset, line in self.walk():
222
if deleteset & inc_c:
223
# killed in parent; can't be in either a or b
224
# not relevant to our work
225
yield 'killed-base', line
226
elif insert in inc_c:
227
# was inserted in base
228
killed_a = bool(deleteset & inc_a)
229
killed_b = bool(deleteset & inc_b)
230
if killed_a and killed_b:
231
yield 'killed-both', line
233
yield 'killed-a', line
235
yield 'killed-b', line
237
yield 'unchanged', line
238
elif insert in inc_a:
239
if deleteset & inc_a:
240
yield 'ghost-a', line
244
elif insert in inc_b:
245
if deleteset & inc_b:
246
yield 'ghost-b', line
250
# not in either revision
251
yield 'irrelevant', line
253
yield 'unchanged', '' # terminator
255
def weave_merge(self, plan, a_marker='<<<<<<< \n', b_marker='>>>>>>> \n'):
517
258
ch_a = ch_b = False
519
def outstanding_struct():
520
if not lines_a and not lines_b:
522
elif ch_a and not ch_b:
525
elif ch_b and not ch_a:
527
elif lines_a == lines_b:
530
yield (lines_a, lines_b)
532
# We previously considered either 'unchanged' or 'killed-both' lines
533
# to be possible places to resynchronize. However, assuming agreement
534
# on killed-both lines may be too aggressive. -- mbp 20060324
535
for state, line in self.plan:
536
if state == 'unchanged':
259
# TODO: Return a structured form of the conflicts (e.g. 2-tuples for
260
# conflicted regions), rather than just inserting the markers.
262
# TODO: Show some version information (e.g. author, date) on
263
# conflicted regions.
264
for state, line in plan:
265
if state == 'unchanged' or state == 'killed-both':
537
266
# resync and flush queued conflicts changes if any
538
for struct in outstanding_struct():
267
if not lines_a and not lines_b:
269
elif ch_a and not ch_b:
271
for l in lines_a: yield l
272
elif ch_b and not ch_a:
273
for l in lines_b: yield l
274
elif lines_a == lines_b:
275
for l in lines_a: yield l
278
for l in lines_a: yield l
280
for l in lines_b: yield l
542
285
ch_a = ch_b = False
544
287
if state == 'unchanged':
547
290
elif state == 'killed-a':
549
292
lines_b.append(line)
598
331
supplied when they are silently skipped.
600
333
# the default join:
601
# - if the target is empty, just add all the versions from
602
# source to target, otherwise:
603
334
# - make a temporary versioned file of type target
604
335
# - insert the source content into it one at a time
606
if not self.target.versions():
609
# Make a new target-format versioned file.
610
temp_source = self.target.create_empty("temp", MemoryTransport())
612
version_ids = self._get_source_version_ids(version_ids, ignore_missing)
613
graph = self.source.get_graph(version_ids)
614
order = tsort.topo_sort(graph.items())
615
pb = ui.ui_factory.nested_progress_bar()
618
# TODO for incremental cross-format work:
619
# make a versioned file with the following content:
620
# all revisions we have been asked to join
621
# all their ancestors that are *not* in target already.
622
# the immediate parents of the above two sets, with
623
# empty parent lists - these versions are in target already
624
# and the incorrect version data will be ignored.
625
# TODO: for all ancestors that are present in target already,
626
# check them for consistent data, this requires moving sha1 from
628
# TODO: remove parent texts when they are not relevant any more for
629
# memory pressure reduction. RBC 20060313
630
# pb.update('Converting versioned data', 0, len(order))
631
# deltas = self.source.get_deltas(order)
632
for index, version in enumerate(order):
633
pb.update('Converting versioned data', index, len(order))
634
parent_text = target.add_lines(version,
635
self.source.get_parents(version),
636
self.source.get_lines(version),
637
parent_texts=parent_texts)
638
parent_texts[version] = parent_text
639
#delta_parent, sha1, noeol, delta = deltas[version]
640
#target.add_delta(version,
641
# self.source.get_parents(version),
646
#target.get_lines(version)
648
# this should hit the native code path for target
649
if target is not self.target:
650
return self.target.join(temp_source,
658
def _get_source_version_ids(self, version_ids, ignore_missing):
659
"""Determine the version ids to be used from self.source.
661
:param version_ids: The caller-supplied version ids to check. (None
662
for all). If None is in version_ids, it is stripped.
663
:param ignore_missing: if True, remove missing ids from the version
664
list. If False, raise RevisionNotPresent on
665
a missing version id.
666
:return: A set of version ids.
668
if version_ids is None:
669
# None cannot be in source.versions
670
return set(self.source.versions())
672
version_ids = [osutils.safe_revision_id(v) for v in version_ids]
674
return set(self.source.versions()).intersection(set(version_ids))
676
new_version_ids = set()
677
for version in version_ids:
680
if not self.source.has_version(version):
681
raise errors.RevisionNotPresent(version, str(self.source))
683
new_version_ids.add(version)
684
return new_version_ids
337
# Make a new target-format versioned file.
338
temp_source = self.target.create_empty("temp", MemoryTransport())
339
graph = self.source.get_graph()
340
order = topo_sort(graph.items())
342
pb = ui.ui_factory.progress_bar()
343
for index, version in enumerate(order):
344
pb.update('Converting versioned data', index, len(order))
345
temp_source.add_lines(version,
346
self.source.get_parents(version),
347
self.source.get_lines(version))
349
# this should hit the native code path for target
350
return self.target.join(temp_source,
357
class InterVersionedFileTestProviderAdapter(object):
358
"""A tool to generate a suite testing multiple inter versioned-file classes.
360
This is done by copying the test once for each interversionedfile provider
361
and injecting the transport_server, transport_readonly_server,
362
versionedfile_factory and versionedfile_factory_to classes into each copy.
363
Each copy is also given a new id() to make it easy to identify.
366
def __init__(self, transport_server, transport_readonly_server, formats):
367
self._transport_server = transport_server
368
self._transport_readonly_server = transport_readonly_server
369
self._formats = formats
371
def adapt(self, test):
373
for (interversionedfile_class,
374
versionedfile_factory,
375
versionedfile_factory_to) in self._formats:
376
new_test = deepcopy(test)
377
new_test.transport_server = self._transport_server
378
new_test.transport_readonly_server = self._transport_readonly_server
379
new_test.interversionedfile_class = interversionedfile_class
380
new_test.versionedfile_factory = versionedfile_factory
381
new_test.versionedfile_factory_to = versionedfile_factory_to
382
def make_new_test_id():
383
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
384
return lambda: new_id
385
new_test.id = make_new_test_id()
386
result.addTest(new_test)
390
def default_test_list():
391
"""Generate the default list of interversionedfile permutations to test."""
392
from bzrlib.weave import WeaveFile
393
from bzrlib.knit import KnitVersionedFile
395
# test the fallback InterVersionedFile from weave to annotated knits
396
result.append((InterVersionedFile,
399
for optimiser in InterVersionedFile._optimisers:
400
result.append((optimiser,
401
optimiser._matching_file_factory,
402
optimiser._matching_file_factory
404
# if there are specific combinations we want to use, we can add them