267
266
result[version_id] = self.get_delta(version_id)
270
def make_mpdiffs(self, version_ids):
271
"""Create multiparent diffs for specified versions"""
272
knit_versions = set()
273
for version_id in version_ids:
274
knit_versions.add(version_id)
275
knit_versions.update(self.get_parents(version_id))
276
lines = dict(zip(knit_versions,
277
self._get_lf_split_line_list(knit_versions)))
279
for version_id in version_ids:
280
target = lines[version_id]
281
parents = [lines[p] for p in self.get_parents(version_id)]
283
left_parent_blocks = self._extract_blocks(version_id,
286
left_parent_blocks = None
287
diffs.append(multiparent.MultiParent.from_lines(target, parents,
291
def _extract_blocks(self, version_id, source, target):
294
def add_mpdiffs(self, records):
295
"""Add mpdiffs to this versionedfile
297
Records should be iterables of version, parents, expected_sha1,
298
mpdiff. mpdiff should be a MultiParent instance.
301
for version, parents, expected_sha1, mpdiff in records:
302
mpvf = multiparent.MultiMemoryVersionedFile()
303
needed_parents = [p for p in parents if not mpvf.has_version(p)]
304
parent_lines = self._get_lf_split_line_list(needed_parents)
305
for parent_id, lines in zip(needed_parents, parent_lines):
306
mpvf.add_version(lines, parent_id, [])
307
mpvf.add_diff(mpdiff, version, parents)
308
lines = mpvf.get_line_list([version])[0]
309
version_text = self.add_lines(version, parents, lines, vf_parents)
310
vf_parents[version] = version_text
311
if expected_sha1 != self.get_sha1(version):
312
raise errors.VersionedFileInvalidChecksum(version)
314
269
def get_sha1(self, version_id):
315
270
"""Get the stored sha1 sum for the given revision.
319
274
raise NotImplementedError(self.get_sha1)
321
def get_sha1s(self, version_ids):
322
"""Get the stored sha1 sums for the given revisions.
324
:param version_ids: The names of the versions to lookup
325
:return: a list of sha1s in order according to the version_ids
327
raise NotImplementedError(self.get_sha1)
329
276
def get_suffixes(self):
330
277
"""Return the file suffixes associated with this versioned file."""
331
278
raise NotImplementedError(self.get_suffixes)
356
303
raise NotImplementedError(self.get_lines)
358
def _get_lf_split_line_list(self, version_ids):
359
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
361
305
def get_ancestry(self, version_ids, topo_sorted=True):
362
306
"""Return a list of all ancestors of given version(s). This
363
307
will not include the null revision.
390
334
:param version_ids: Versions to select.
391
335
None means retrieve all versions.
393
338
if version_ids is None:
394
return dict(self.iter_parents(self.versions()))
396
pending = set(osutils.safe_revision_id(v) for v in version_ids)
398
this_iteration = pending
400
for version, parents in self.iter_parents(this_iteration):
339
for version in self.versions():
340
result[version] = self.get_parents(version)
342
pending = set(osutils.safe_revision_id(v) for v in version_ids)
344
version = pending.pop()
345
if version in result:
347
parents = self.get_parents(version)
348
for parent in parents:
401
352
result[version] = parents
402
pending.update(parents)
403
pending.difference_update(result)
406
355
def get_graph_with_ghosts(self):
497
446
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
499
def iter_parents(self, version_ids):
500
"""Iterate through the parents for many version ids.
502
:param version_ids: An iterable yielding version_ids.
503
:return: An iterator that yields (version_id, parents). Requested
504
version_ids not present in the versioned file are simply skipped.
505
The order is undefined, allowing for different optimisations in
506
the underlying implementation.
508
for version_id in version_ids:
510
yield version_id, tuple(self.get_parents(version_id))
511
except errors.RevisionNotPresent:
514
448
def transaction_finished(self):
515
449
"""The transaction that this file was opened in has finished.
751
685
new_version_ids.add(version)
752
686
return new_version_ids
689
class InterVersionedFileTestProviderAdapter(object):
690
"""A tool to generate a suite testing multiple inter versioned-file classes.
692
This is done by copying the test once for each InterVersionedFile provider
693
and injecting the transport_server, transport_readonly_server,
694
versionedfile_factory and versionedfile_factory_to classes into each copy.
695
Each copy is also given a new id() to make it easy to identify.
698
def __init__(self, transport_server, transport_readonly_server, formats):
699
self._transport_server = transport_server
700
self._transport_readonly_server = transport_readonly_server
701
self._formats = formats
703
def adapt(self, test):
704
result = unittest.TestSuite()
705
for (interversionedfile_class,
706
versionedfile_factory,
707
versionedfile_factory_to) in self._formats:
708
new_test = deepcopy(test)
709
new_test.transport_server = self._transport_server
710
new_test.transport_readonly_server = self._transport_readonly_server
711
new_test.interversionedfile_class = interversionedfile_class
712
new_test.versionedfile_factory = versionedfile_factory
713
new_test.versionedfile_factory_to = versionedfile_factory_to
714
def make_new_test_id():
715
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
716
return lambda: new_id
717
new_test.id = make_new_test_id()
718
result.addTest(new_test)
722
def default_test_list():
723
"""Generate the default list of interversionedfile permutations to test."""
724
from bzrlib.weave import WeaveFile
725
from bzrlib.knit import KnitVersionedFile
727
# test the fallback InterVersionedFile from annotated knits to weave
728
result.append((InterVersionedFile,
731
for optimiser in InterVersionedFile._optimisers:
732
result.append((optimiser,
733
optimiser._matching_file_from_factory,
734
optimiser._matching_file_to_factory
736
# if there are specific combinations we want to use, we can add them