~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: Andrew Bennetts
  • Date: 2009-02-17 00:54:19 UTC
  • mto: (4002.1.9 suspend-write-group)
  • mto: This revision was merged to the branch mainline in revision 4025.
  • Revision ID: andrew.bennetts@canonical.com-20090217005419-i9qdpanq2cwm3j59
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.

Show diffs side-by-side

added added

removed removed

Lines of Context:
47
47
from bzrlib.tests import (
48
48
    TestCase,
49
49
    TestCaseWithMemoryTransport,
 
50
    TestNotApplicable,
50
51
    TestScenarioApplier,
51
52
    TestSkipped,
52
53
    condition_isinstance,
94
95
                ConstantMapper('inventory')),
95
96
            'graph':True,
96
97
            'key_length':1,
 
98
            'support_partial_insertion': False,
97
99
            }),
98
100
        ('named-knit', {
99
101
            'cleanup':None,
100
102
            'factory':make_file_factory(False, ConstantMapper('revisions')),
101
103
            'graph':True,
102
104
            'key_length':1,
 
105
            'support_partial_insertion': True,
103
106
            }),
104
 
        ('named-nograph-knit-pack', {
 
107
        ('named-nograph-nodelta-knit-pack', {
105
108
            'cleanup':cleanup_pack_knit,
106
109
            'factory':make_pack_factory(False, False, 1),
107
110
            'graph':False,
108
111
            'key_length':1,
 
112
            'support_partial_insertion': False,
109
113
            }),
110
114
        ('named-graph-knit-pack', {
111
115
            'cleanup':cleanup_pack_knit,
112
116
            'factory':make_pack_factory(True, True, 1),
113
117
            'graph':True,
114
118
            'key_length':1,
 
119
            'support_partial_insertion': True,
115
120
            }),
116
121
        ('named-graph-nodelta-knit-pack', {
117
122
            'cleanup':cleanup_pack_knit,
118
123
            'factory':make_pack_factory(True, False, 1),
119
124
            'graph':True,
120
125
            'key_length':1,
 
126
            'support_partial_insertion': False,
121
127
            }),
122
128
        ]
123
129
    len_two_adapter.scenarios = [
127
133
                PrefixMapper()),
128
134
            'graph':True,
129
135
            'key_length':2,
 
136
            'support_partial_insertion': False,
130
137
            }),
131
138
        ('annotated-knit-escape', {
132
139
            'cleanup':None,
133
140
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
134
141
            'graph':True,
135
142
            'key_length':2,
 
143
            'support_partial_insertion': False,
136
144
            }),
137
145
        ('plain-knit-pack', {
138
146
            'cleanup':cleanup_pack_knit,
139
147
            'factory':make_pack_factory(True, True, 2),
140
148
            'graph':True,
141
149
            'key_length':2,
 
150
            'support_partial_insertion': True,
142
151
            }),
143
152
        ]
144
153
    for test in iter_suite_tests(to_adapt):
1969
1978
        else:
1970
1979
            self.assertIdenticalVersionedFile(source, files)
1971
1980
 
1972
 
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1973
 
        """Insertion where a needed basis is not included aborts safely."""
1974
 
        # We use a knit source with a graph always here to be sure we are
1975
 
        # getting a binary delta.
 
1981
    def get_knit_delta_source(self):
 
1982
        """Get a source that can produce a stream with knit delta records,
 
1983
        regardless of this test's scenario.
 
1984
        """
1976
1985
        mapper = self.get_mapper()
1977
1986
        source_transport = self.get_transport('source')
1978
1987
        source_transport.mkdir('.')
1979
1988
        source = make_file_factory(False, mapper)(source_transport)
1980
1989
        get_diamond_files(source, self.key_length, trailing_eol=True,
1981
1990
            nograph=False, left_only=False)
 
1991
        return source
 
1992
 
 
1993
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
 
1994
        """Insertion where a needed basis is not included notifies the caller
 
1995
        of the missing basis.  In the meantime a record missing its basis is
 
1996
        not added.
 
1997
        """
 
1998
        source = self.get_knit_delta_source()
1982
1999
        entries = source.get_record_stream([self.get_simple_key('origin'),
1983
2000
            self.get_simple_key('merged')], 'unordered', False)
1984
2001
        files = self.get_versionedfiles()
1985
 
        self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1986
 
            entries)
 
2002
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
 
2003
        if self.support_partial_insertion:
 
2004
            files.insert_record_stream(entries)
 
2005
            missing_bases = files.get_missing_compression_parent_keys()
 
2006
            self.assertEqual(set([self.get_simple_key('left')]),
 
2007
                set(missing_bases))
 
2008
        else:
 
2009
            self.assertRaises(
 
2010
                errors.RevisionNotPresent, files.insert_record_stream, entries)
1987
2011
        files.check()
1988
2012
        self.assertEqual({}, files.get_parent_map([]))
1989
2013
 
 
2014
    def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
 
2015
        """Insertion where a needed basis is not included notifies the caller
 
2016
        of the missing basis.  That basis can be added in a second
 
2017
        insert_record_stream call that does not need to repeat records present
 
2018
        in the previous stream.
 
2019
        """
 
2020
        if not self.support_partial_insertion:
 
2021
            raise TestNotApplicable(
 
2022
                'versioned file scenario does not support partial insertion')
 
2023
        source = self.get_knit_delta_source()
 
2024
        entries = source.get_record_stream([self.get_simple_key('origin'),
 
2025
            self.get_simple_key('merged')], 'unordered', False)
 
2026
        files = self.get_versionedfiles()
 
2027
        files.insert_record_stream(entries)
 
2028
        missing_bases = files.get_missing_compression_parent_keys()
 
2029
        self.assertEqual(set([self.get_simple_key('left')]),
 
2030
            set(missing_bases))
 
2031
        # 'merged' is not yet inserted
 
2032
        files.check()
 
2033
        merged_key = self.get_simple_key('merged')
 
2034
        self.assertEqual([], files.get_parent_map([merged_key]).keys())
 
2035
        missing_entries = source.get_record_stream(
 
2036
            [self.get_simple_key('left')], 'unordered', True)
 
2037
        files.insert_record_stream(missing_entries)
 
2038
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
 
2039
        # Now 'merged' is fully inserted
 
2040
        files.check()
 
2041
        self.assertEqual(
 
2042
            [merged_key], files.get_parent_map([merged_key]).keys())
 
2043
 
1990
2044
    def test_iter_lines_added_or_present_in_keys(self):
1991
2045
        # test that we get at least an equalset of the lines added by
1992
2046
        # versions in the store.