1
# Copyright (C) 2005, 2006 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
"""Versioned text file storage api."""
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
33
from bzrlib.graph import Graph
34
from bzrlib.transport.memory import MemoryTransport
37
from cStringIO import StringIO
39
from bzrlib.inter import InterObject
40
from bzrlib.registry import Registry
41
from bzrlib.symbol_versioning import *
42
from bzrlib.textmerge import TextMerge
45
adapter_registry = Registry()
46
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
47
'DeltaPlainToFullText')
48
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
50
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
51
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
52
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
53
'bzrlib.knit', 'DeltaAnnotatedToFullText')
54
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
55
'bzrlib.knit', 'FTAnnotatedToUnannotated')
56
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
57
'bzrlib.knit', 'FTAnnotatedToFullText')
60
class ContentFactory(object):
61
"""Abstract interface for insertion and retrieval from a VersionedFile.
63
:ivar sha1: None, or the sha1 of the content fulltext.
64
:ivar storage_kind: The native storage kind of this factory. One of
65
'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
66
'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
67
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
68
:ivar key: The key of this content. Each key is a tuple with a single
70
:ivar parents: A tuple of parent keys for self.key. If the object has
71
no parent information, None (as opposed to () for an empty list of
76
"""Create a ContentFactory."""
78
self.storage_kind = None
83
class AbsentContentFactory(object):
84
"""A placeholder content factory for unavailable texts.
87
:ivar storage_kind: 'absent'.
88
:ivar key: The key of this content. Each key is a tuple with a single
93
def __init__(self, key):
94
"""Create a ContentFactory."""
96
self.storage_kind = 'absent'
101
def filter_absent(record_stream):
102
"""Adapt a record stream to remove absent records."""
103
for record in record_stream:
104
if record.storage_kind != 'absent':
108
class VersionedFile(object):
109
"""Versioned text file storage.
111
A versioned file manages versions of line-based text files,
112
keeping track of the originating version for each line.
114
To clients the "lines" of the file are represented as a list of
115
strings. These strings will typically have terminal newline
116
characters, but this is not required. In particular files commonly
117
do not have a newline at the end of the file.
119
Texts are identified by a version-id string.
123
def check_not_reserved_id(version_id):
124
revision.check_not_reserved_id(version_id)
126
def copy_to(self, name, transport):
127
"""Copy this versioned file to name on transport."""
128
raise NotImplementedError(self.copy_to)
130
def get_record_stream(self, versions, ordering, include_delta_closure):
131
"""Get a stream of records for versions.
133
:param versions: The versions to include. Each version is a tuple
135
:param ordering: Either 'unordered' or 'topological'. A topologically
136
sorted stream has compression parents strictly before their
138
:param include_delta_closure: If True then the closure across any
139
compression parents will be included (in the data content of the
140
stream, not in the emitted records). This guarantees that
141
'fulltext' can be used successfully on every record.
142
:return: An iterator of ContentFactory objects, each of which is only
143
valid until the iterator is advanced.
145
raise NotImplementedError(self.get_record_stream)
147
def has_version(self, version_id):
148
"""Returns whether version is present."""
149
raise NotImplementedError(self.has_version)
151
def insert_record_stream(self, stream):
152
"""Insert a record stream into this versioned file.
154
:param stream: A stream of records to insert.
156
:seealso VersionedFile.get_record_stream:
158
raise NotImplementedError
160
def add_lines(self, version_id, parents, lines, parent_texts=None,
161
left_matching_blocks=None, nostore_sha=None, random_id=False,
163
"""Add a single text on top of the versioned file.
165
Must raise RevisionAlreadyPresent if the new version is
166
already present in file history.
168
Must raise RevisionNotPresent if any of the given parents are
169
not present in file history.
171
:param lines: A list of lines. Each line must be a bytestring. And all
172
of them except the last must be terminated with \n and contain no
173
other \n's. The last line may either contain no \n's or a single
174
terminated \n. If the lines list does meet this constraint the add
175
routine may error or may succeed - but you will be unable to read
176
the data back accurately. (Checking the lines have been split
177
correctly is expensive and extremely unlikely to catch bugs so it
178
is not done at runtime unless check_content is True.)
179
:param parent_texts: An optional dictionary containing the opaque
180
representations of some or all of the parents of version_id to
181
allow delta optimisations. VERY IMPORTANT: the texts must be those
182
returned by add_lines or data corruption can be caused.
183
:param left_matching_blocks: a hint about which areas are common
184
between the text and its left-hand-parent. The format is
185
the SequenceMatcher.get_matching_blocks format.
186
:param nostore_sha: Raise ExistingContent and do not add the lines to
187
the versioned file if the digest of the lines matches this.
188
:param random_id: If True a random id has been selected rather than
189
an id determined by some deterministic process such as a converter
190
from a foreign VCS. When True the backend may choose not to check
191
for uniqueness of the resulting key within the versioned file, so
192
this should only be done when the result is expected to be unique
194
:param check_content: If True, the lines supplied are verified to be
195
bytestrings that are correctly formed lines.
196
:return: The text sha1, the number of bytes in the text, and an opaque
197
representation of the inserted version which can be provided
198
back to future add_lines calls in the parent_texts dictionary.
200
self._check_write_ok()
201
return self._add_lines(version_id, parents, lines, parent_texts,
202
left_matching_blocks, nostore_sha, random_id, check_content)
204
def _add_lines(self, version_id, parents, lines, parent_texts,
205
left_matching_blocks, nostore_sha, random_id, check_content):
206
"""Helper to do the class specific add_lines."""
207
raise NotImplementedError(self.add_lines)
209
def add_lines_with_ghosts(self, version_id, parents, lines,
210
parent_texts=None, nostore_sha=None, random_id=False,
211
check_content=True, left_matching_blocks=None):
212
"""Add lines to the versioned file, allowing ghosts to be present.
214
This takes the same parameters as add_lines and returns the same.
216
self._check_write_ok()
217
return self._add_lines_with_ghosts(version_id, parents, lines,
218
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
220
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
221
nostore_sha, random_id, check_content, left_matching_blocks):
222
"""Helper to do class specific add_lines_with_ghosts."""
223
raise NotImplementedError(self.add_lines_with_ghosts)
225
def check(self, progress_bar=None):
226
"""Check the versioned file for integrity."""
227
raise NotImplementedError(self.check)
229
def _check_lines_not_unicode(self, lines):
230
"""Check that lines being added to a versioned file are not unicode."""
232
if line.__class__ is not str:
233
raise errors.BzrBadParameterUnicode("lines")
235
def _check_lines_are_lines(self, lines):
236
"""Check that the lines really are full lines without inline EOL."""
238
if '\n' in line[:-1]:
239
raise errors.BzrBadParameterContainsNewline("lines")
241
def get_format_signature(self):
242
"""Get a text description of the data encoding in this file.
246
raise NotImplementedError(self.get_format_signature)
248
def make_mpdiffs(self, version_ids):
249
"""Create multiparent diffs for specified versions."""
250
knit_versions = set()
251
knit_versions.update(version_ids)
252
parent_map = self.get_parent_map(version_ids)
253
for version_id in version_ids:
255
knit_versions.update(parent_map[version_id])
257
raise errors.RevisionNotPresent(version_id, self)
258
# We need to filter out ghosts, because we can't diff against them.
259
knit_versions = set(self.get_parent_map(knit_versions).keys())
260
lines = dict(zip(knit_versions,
261
self._get_lf_split_line_list(knit_versions)))
263
for version_id in version_ids:
264
target = lines[version_id]
266
parents = [lines[p] for p in parent_map[version_id] if p in
269
# I don't know how this could ever trigger.
270
# parent_map[version_id] was already triggered in the previous
271
# for loop, and lines[p] has the 'if p in knit_versions' check,
272
# so we again won't have a KeyError.
273
raise errors.RevisionNotPresent(version_id, self)
275
left_parent_blocks = self._extract_blocks(version_id,
278
left_parent_blocks = None
279
diffs.append(multiparent.MultiParent.from_lines(target, parents,
283
def _extract_blocks(self, version_id, source, target):
286
def add_mpdiffs(self, records):
287
"""Add mpdiffs to this VersionedFile.
289
Records should be iterables of version, parents, expected_sha1,
290
mpdiff. mpdiff should be a MultiParent instance.
292
# Does this need to call self._check_write_ok()? (IanC 20070919)
294
mpvf = multiparent.MultiMemoryVersionedFile()
296
for version, parent_ids, expected_sha1, mpdiff in records:
297
versions.append(version)
298
mpvf.add_diff(mpdiff, version, parent_ids)
299
needed_parents = set()
300
for version, parent_ids, expected_sha1, mpdiff in records:
301
needed_parents.update(p for p in parent_ids
302
if not mpvf.has_version(p))
303
present_parents = set(self.get_parent_map(needed_parents).keys())
304
for parent_id, lines in zip(present_parents,
305
self._get_lf_split_line_list(present_parents)):
306
mpvf.add_version(lines, parent_id, [])
307
for (version, parent_ids, expected_sha1, mpdiff), lines in\
308
zip(records, mpvf.get_line_list(versions)):
309
if len(parent_ids) == 1:
310
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
311
mpvf.get_diff(parent_ids[0]).num_lines()))
313
left_matching_blocks = None
315
_, _, version_text = self.add_lines_with_ghosts(version,
316
parent_ids, lines, vf_parents,
317
left_matching_blocks=left_matching_blocks)
318
except NotImplementedError:
319
# The vf can't handle ghosts, so add lines normally, which will
320
# (reasonably) fail if there are ghosts in the data.
321
_, _, version_text = self.add_lines(version,
322
parent_ids, lines, vf_parents,
323
left_matching_blocks=left_matching_blocks)
324
vf_parents[version] = version_text
325
for (version, parent_ids, expected_sha1, mpdiff), sha1 in\
326
zip(records, self.get_sha1s(versions)):
327
if expected_sha1 != sha1:
328
raise errors.VersionedFileInvalidChecksum(version)
330
def get_sha1s(self, version_ids):
331
"""Get the stored sha1 sums for the given revisions.
333
:param version_ids: The names of the versions to lookup
334
:return: a list of sha1s in order according to the version_ids
336
raise NotImplementedError(self.get_sha1s)
338
def get_text(self, version_id):
339
"""Return version contents as a text string.
341
Raises RevisionNotPresent if version is not present in
344
return ''.join(self.get_lines(version_id))
345
get_string = get_text
347
def get_texts(self, version_ids):
348
"""Return the texts of listed versions as a list of strings.
350
Raises RevisionNotPresent if version is not present in
353
return [''.join(self.get_lines(v)) for v in version_ids]
355
def get_lines(self, version_id):
356
"""Return version contents as a sequence of lines.
358
Raises RevisionNotPresent if version is not present in
361
raise NotImplementedError(self.get_lines)
363
def _get_lf_split_line_list(self, version_ids):
364
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
366
def get_ancestry(self, version_ids, topo_sorted=True):
367
"""Return a list of all ancestors of given version(s). This
368
will not include the null revision.
370
This list will not be topologically sorted if topo_sorted=False is
373
Must raise RevisionNotPresent if any of the given versions are
374
not present in file history."""
375
if isinstance(version_ids, basestring):
376
version_ids = [version_ids]
377
raise NotImplementedError(self.get_ancestry)
379
def get_ancestry_with_ghosts(self, version_ids):
380
"""Return a list of all ancestors of given version(s). This
381
will not include the null revision.
383
Must raise RevisionNotPresent if any of the given versions are
384
not present in file history.
386
Ghosts that are known about will be included in ancestry list,
387
but are not explicitly marked.
389
raise NotImplementedError(self.get_ancestry_with_ghosts)
391
def get_parent_map(self, version_ids):
392
"""Get a map of the parents of version_ids.
394
:param version_ids: The version ids to look up parents for.
395
:return: A mapping from version id to parents.
397
raise NotImplementedError(self.get_parent_map)
399
def get_parents_with_ghosts(self, version_id):
400
"""Return version names for parents of version_id.
402
Will raise RevisionNotPresent if version_id is not present
405
Ghosts that are known about will be included in the parent list,
406
but are not explicitly marked.
409
return list(self.get_parent_map([version_id])[version_id])
411
raise errors.RevisionNotPresent(version_id, self)
413
def annotate(self, version_id):
414
"""Return a list of (version-id, line) tuples for version_id.
416
:raise RevisionNotPresent: If the given version is
417
not present in file history.
419
raise NotImplementedError(self.annotate)
421
@deprecated_method(one_five)
422
def join(self, other, pb=None, msg=None, version_ids=None,
423
ignore_missing=False):
424
"""Integrate versions from other into this versioned file.
426
If version_ids is None all versions from other should be
427
incorporated into this versioned file.
429
Must raise RevisionNotPresent if any of the specified versions
430
are not present in the other file's history unless ignore_missing
431
is supplied in which case they are silently skipped.
433
self._check_write_ok()
434
return InterVersionedFile.get(other, self).join(
440
def iter_lines_added_or_present_in_versions(self, version_ids=None,
442
"""Iterate over the lines in the versioned file from version_ids.
444
This may return lines from other versions. Each item the returned
445
iterator yields is a tuple of a line and a text version that that line
446
is present in (not introduced in).
448
Ordering of results is in whatever order is most suitable for the
449
underlying storage format.
451
If a progress bar is supplied, it may be used to indicate progress.
452
The caller is responsible for cleaning up progress bars (because this
455
NOTES: Lines are normalised: they will all have \n terminators.
456
Lines are returned in arbitrary order.
458
:return: An iterator over (line, version_id).
460
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
462
def plan_merge(self, ver_a, ver_b):
463
"""Return pseudo-annotation indicating how the two versions merge.
465
This is computed between versions a and b and their common
468
Weave lines present in none of them are skipped entirely.
471
killed-base Dead in base revision
472
killed-both Killed in each revision
475
unchanged Alive in both a and b (possibly created in both)
478
ghost-a Killed in a, unborn in b
479
ghost-b Killed in b, unborn in a
480
irrelevant Not in either revision
482
raise NotImplementedError(VersionedFile.plan_merge)
484
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
485
b_marker=TextMerge.B_MARKER):
486
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
489
class RecordingVersionedFileDecorator(object):
490
"""A minimal versioned file that records calls made on it.
492
Only enough methods have been added to support tests using it to date.
494
:ivar calls: A list of the calls made; can be reset at any time by
498
def __init__(self, backing_vf):
499
"""Create a RecordingVersionedFileDecorator decorating backing_vf.
501
:param backing_vf: The versioned file to answer all methods.
503
self._backing_vf = backing_vf
506
def get_lines(self, version_ids):
507
self.calls.append(("get_lines", version_ids))
508
return self._backing_vf.get_lines(version_ids)
511
class _PlanMergeVersionedFile(object):
512
"""A VersionedFile for uncommitted and committed texts.
514
It is intended to allow merges to be planned with working tree texts.
515
It implements only the small part of the VersionedFile interface used by
516
PlanMerge. It falls back to multiple versionedfiles for data not stored in
517
_PlanMergeVersionedFile itself.
520
def __init__(self, file_id, fallback_versionedfiles=None):
523
:param file_id: Used when raising exceptions.
524
:param fallback_versionedfiles: If supplied, the set of fallbacks to
525
use. Otherwise, _PlanMergeVersionedFile.fallback_versionedfiles
526
can be appended to later.
528
self._file_id = file_id
529
if fallback_versionedfiles is None:
530
self.fallback_versionedfiles = []
532
self.fallback_versionedfiles = fallback_versionedfiles
536
def plan_merge(self, ver_a, ver_b, base=None):
537
"""See VersionedFile.plan_merge"""
538
from bzrlib.merge import _PlanMerge
540
return _PlanMerge(ver_a, ver_b, self).plan_merge()
541
old_plan = list(_PlanMerge(ver_a, base, self).plan_merge())
542
new_plan = list(_PlanMerge(ver_a, ver_b, self).plan_merge())
543
return _PlanMerge._subtract_plans(old_plan, new_plan)
545
def plan_lca_merge(self, ver_a, ver_b, base=None):
546
from bzrlib.merge import _PlanLCAMerge
547
graph = self._get_graph()
548
new_plan = _PlanLCAMerge(ver_a, ver_b, self, graph).plan_merge()
551
old_plan = _PlanLCAMerge(ver_a, base, self, graph).plan_merge()
552
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
554
def add_lines(self, version_id, parents, lines):
555
"""See VersionedFile.add_lines
557
Lines are added locally, not fallback versionedfiles. Also, ghosts are
558
permitted. Only reserved ids are permitted.
560
if not revision.is_reserved_id(version_id):
561
raise ValueError('Only reserved ids may be used')
563
raise ValueError('Parents may not be None')
565
raise ValueError('Lines may not be None')
566
self._parents[version_id] = tuple(parents)
567
self._lines[version_id] = lines
569
def get_lines(self, version_id):
570
"""See VersionedFile.get_ancestry"""
571
lines = self._lines.get(version_id)
572
if lines is not None:
574
for versionedfile in self.fallback_versionedfiles:
576
return versionedfile.get_lines(version_id)
577
except errors.RevisionNotPresent:
580
raise errors.RevisionNotPresent(version_id, self._file_id)
582
def get_ancestry(self, version_id, topo_sorted=False):
583
"""See VersionedFile.get_ancestry.
585
Note that this implementation assumes that if a VersionedFile can
586
answer get_ancestry at all, it can give an authoritative answer. In
587
fact, ghosts can invalidate this assumption. But it's good enough
588
99% of the time, and far cheaper/simpler.
590
Also note that the results of this version are never topologically
591
sorted, and are a set.
594
raise ValueError('This implementation does not provide sorting')
595
parents = self._parents.get(version_id)
597
for vf in self.fallback_versionedfiles:
599
return vf.get_ancestry(version_id, topo_sorted=False)
600
except errors.RevisionNotPresent:
603
raise errors.RevisionNotPresent(version_id, self._file_id)
604
ancestry = set([version_id])
605
for parent in parents:
606
ancestry.update(self.get_ancestry(parent, topo_sorted=False))
609
def get_parent_map(self, version_ids):
610
"""See VersionedFile.get_parent_map"""
612
pending = set(version_ids)
613
for key in version_ids:
615
result[key] = self._parents[key]
618
pending = pending - set(result.keys())
619
for versionedfile in self.fallback_versionedfiles:
620
parents = versionedfile.get_parent_map(pending)
621
result.update(parents)
622
pending = pending - set(parents.keys())
627
def _get_graph(self):
628
from bzrlib.graph import (
631
_StackedParentsProvider,
633
from bzrlib.repofmt.knitrepo import _KnitParentsProvider
634
parent_providers = [DictParentsProvider(self._parents)]
635
for vf in self.fallback_versionedfiles:
636
parent_providers.append(_KnitParentsProvider(vf))
637
return Graph(_StackedParentsProvider(parent_providers))
640
class PlanWeaveMerge(TextMerge):
641
"""Weave merge that takes a plan as its input.
643
This exists so that VersionedFile.plan_merge is implementable.
644
Most callers will want to use WeaveMerge instead.
647
def __init__(self, plan, a_marker=TextMerge.A_MARKER,
648
b_marker=TextMerge.B_MARKER):
649
TextMerge.__init__(self, a_marker, b_marker)
652
def _merge_struct(self):
657
def outstanding_struct():
658
if not lines_a and not lines_b:
660
elif ch_a and not ch_b:
663
elif ch_b and not ch_a:
665
elif lines_a == lines_b:
668
yield (lines_a, lines_b)
670
# We previously considered either 'unchanged' or 'killed-both' lines
671
# to be possible places to resynchronize. However, assuming agreement
672
# on killed-both lines may be too aggressive. -- mbp 20060324
673
for state, line in self.plan:
674
if state == 'unchanged':
675
# resync and flush queued conflicts changes if any
676
for struct in outstanding_struct():
682
if state == 'unchanged':
685
elif state == 'killed-a':
688
elif state == 'killed-b':
691
elif state == 'new-a':
694
elif state == 'new-b':
697
elif state == 'conflicted-a':
700
elif state == 'conflicted-b':
704
if state not in ('irrelevant', 'ghost-a', 'ghost-b',
705
'killed-base', 'killed-both'):
706
raise AssertionError(state)
707
for struct in outstanding_struct():
711
class WeaveMerge(PlanWeaveMerge):
712
"""Weave merge that takes a VersionedFile and two versions as its input."""
714
def __init__(self, versionedfile, ver_a, ver_b,
715
a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
716
plan = versionedfile.plan_merge(ver_a, ver_b)
717
PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
720
class InterVersionedFile(InterObject):
721
"""This class represents operations taking place between two VersionedFiles.
723
Its instances have methods like join, and contain
724
references to the source and target versionedfiles these operations can be
727
Often we will provide convenience methods on 'versionedfile' which carry out
728
operations with another versionedfile - they will always forward to
729
InterVersionedFile.get(other).method_name(parameters).
733
"""The available optimised InterVersionedFile types."""
735
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False):
736
"""Integrate versions from self.source into self.target.
738
If version_ids is None all versions from source should be
739
incorporated into this versioned file.
741
Must raise RevisionNotPresent if any of the specified versions
742
are not present in the other file's history unless ignore_missing is
743
supplied in which case they are silently skipped.
746
version_ids = self._get_source_version_ids(version_ids, ignore_missing)
747
graph = Graph(self.source)
748
search = graph._make_breadth_first_searcher(version_ids)
749
transitive_ids = set()
750
map(transitive_ids.update, list(search))
751
parent_map = self.source.get_parent_map(transitive_ids)
752
order = tsort.topo_sort(parent_map.items())
753
pb = ui.ui_factory.nested_progress_bar()
756
# TODO for incremental cross-format work:
757
# make a versioned file with the following content:
758
# all revisions we have been asked to join
759
# all their ancestors that are *not* in target already.
760
# the immediate parents of the above two sets, with
761
# empty parent lists - these versions are in target already
762
# and the incorrect version data will be ignored.
763
# TODO: for all ancestors that are present in target already,
764
# check them for consistent data, this requires moving sha1 from
766
# TODO: remove parent texts when they are not relevant any more for
767
# memory pressure reduction. RBC 20060313
768
# pb.update('Converting versioned data', 0, len(order))
770
for index, version in enumerate(order):
771
pb.update('Converting versioned data', index, total)
772
if version in target:
774
_, _, parent_text = target.add_lines(version,
776
self.source.get_lines(version),
777
parent_texts=parent_texts)
778
parent_texts[version] = parent_text
783
def _get_source_version_ids(self, version_ids, ignore_missing):
784
"""Determine the version ids to be used from self.source.
786
:param version_ids: The caller-supplied version ids to check. (None
787
for all). If None is in version_ids, it is stripped.
788
:param ignore_missing: if True, remove missing ids from the version
789
list. If False, raise RevisionNotPresent on
790
a missing version id.
791
:return: A set of version ids.
793
if version_ids is None:
794
# None cannot be in source.versions
795
return set(self.source.versions())
798
return set(self.source.versions()).intersection(set(version_ids))
800
new_version_ids = set()
801
for version in version_ids:
804
if not self.source.has_version(version):
805
raise errors.RevisionNotPresent(version, str(self.source))
807
new_version_ids.add(version)
808
return new_version_ids