~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to tests/test_groupcompress.py

  • Committer: John Arbash Meinel
  • Date: 2009-03-05 03:29:49 UTC
  • mto: (0.17.34 trunk)
  • mto: This revision was merged to the branch mainline in revision 4280.
  • Revision ID: john@arbash-meinel.com-20090305032949-ffww56phklv1vhbj
Play around with detecting compression breaks.
Trying to get tricky with whether the last insert was a fulltext or delta
did not pay off well (yet).
However, using similar logic actually shows some of the best results yet.
The main difference is probably that we detect overflow and rollback.
So if we got a big fulltext that pushes us over the line, in the past
we would leave it alone (poorly compressed in the last group),
and start a new group, which would start off with a new fulltext.

Show diffs side-by-side

added added

removed removed

Lines of Context:
59
59
    def test_one_nosha_delta(self):
60
60
        # diff against NUKK
61
61
        compressor = groupcompress.GroupCompressor(True)
62
 
        sha1, end_point = compressor.compress(('label',),
 
62
        sha1, end_point, _, _ = compressor.compress(('label',),
63
63
            'strange\ncommon\n', None)
64
64
        self.assertEqual(sha_string('strange\ncommon\n'), sha1)
65
65
        expected_lines = [
85
85
 
86
86
    def test_two_nosha_delta(self):
87
87
        compressor = groupcompress.GroupCompressor(True)
88
 
        sha1_1, _ = compressor.compress(('label',),
 
88
        sha1_1, _, _, _ = compressor.compress(('label',),
89
89
            'strange\ncommon long line\nthat needs a 16 byte match\n', None)
90
90
        expected_lines = list(compressor.lines)
91
 
        sha1_2, end_point = compressor.compress(('newlabel',),
 
91
        sha1_2, end_point, _, _ = compressor.compress(('newlabel',),
92
92
            'common long line\nthat needs a 16 byte match\ndifferent\n', None)
93
93
        self.assertEqual(sha_string('common long line\n'
94
94
                                    'that needs a 16 byte match\n'
108
108
        # The first interesting test: make a change that should use lines from
109
109
        # both parents.
110
110
        compressor = groupcompress.GroupCompressor(True)
111
 
        sha1_1, end_point = compressor.compress(('label',),
 
111
        sha1_1, end_point, _, _ = compressor.compress(('label',),
112
112
            'strange\ncommon very very long line\nwith some extra text\n', None)
113
 
        sha1_2, _ = compressor.compress(('newlabel',),
 
113
        sha1_2, _, _, _ = compressor.compress(('newlabel',),
114
114
            'different\nmoredifferent\nand then some more\n', None)
115
115
        expected_lines = list(compressor.lines)
116
 
        sha1_3, end_point = compressor.compress(('label3',),
 
116
        sha1_3, end_point, _, _ = compressor.compress(('label3',),
117
117
            'new\ncommon very very long line\nwith some extra text\n'
118
118
            'different\nmoredifferent\nand then some more\n',
119
119
            None)
149
149
        # Knit fetching will try to reconstruct texts locally which results in
150
150
        # reading something that is in the compressor stream already.
151
151
        compressor = groupcompress.GroupCompressor(True)
152
 
        sha1_1, _ = compressor.compress(('label',),
 
152
        sha1_1, _, _, _ = compressor.compress(('label',),
153
153
            'strange\ncommon long line\nthat needs a 16 byte match\n', None)
154
154
        expected_lines = list(compressor.lines)
155
 
        sha1_2, end_point = compressor.compress(('newlabel',),
 
155
        sha1_2, end_point, _, _ = compressor.compress(('newlabel',),
156
156
            'common long line\nthat needs a 16 byte match\ndifferent\n', None)
157
157
        # get the first out
158
158
        self.assertEqual(('strange\ncommon long line\n'