~bzr-pqm/bzr/bzr.dev

2052.3.2 by John Arbash Meinel
Change Copyright .. by Canonical to Copyright ... Canonical
1
# Copyright (C) 2006 Canonical Ltd
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
2
#
3
# This program is free software; you can redistribute it and/or modify
2052.3.1 by John Arbash Meinel
Add tests to cleanup the copyright of all source files
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
2052.3.1 by John Arbash Meinel
Add tests to cleanup the copyright of all source files
17
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
18
"""Tests for encoding performance."""
19
20
from bzrlib import (
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
21
    cache_utf8,
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
22
    osutils,
23
    )
24
25
from bzrlib.benchmarks import Benchmark
26
27
28
_normal_revision_id = (u'john@arbash-meinel.com-20060801200018'
29
                       u'-cafa6272d9b8cac4')
30
_unicode_revision_id = (u'\u062c\u0648\u062c\u0648@\xe5rbash-meinel.com-'
31
                        u'\xb5\xb5\xb5-20060801200018-cafa6272d9b8cac4')
32
33
_normal_revision_id_utf8 = _normal_revision_id.encode('utf-8')
34
_unicode_revision_id_utf8 = _unicode_revision_id.encode('utf-8')
35
36
37
class EncodingBenchmark(Benchmark):
38
39
    def setUp(self):
40
        super(EncodingBenchmark, self).setUp()
41
        # Make sure we start and end with a clean cache
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
42
        cache_utf8.clear_encoding_cache()
43
        self.addCleanup(cache_utf8.clear_encoding_cache)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
44
45
    def encode_1M(self, revision_id):
46
        """Encode the given revision id 1 million times"""
47
        # In a real kernel tree there are 7.7M lines of code
48
        # so the initial import actually has to encode a revision
49
        # id to store annotated lines one time for every line.
50
        for i in xrange(1000000):
51
            revision_id.encode('utf8')
52
53
    def encode_cached_1M(self, revision_id):
54
        """Encode the given revision id 1 million times using the cache"""
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
55
        encode = cache_utf8.encode
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
56
        for i in xrange(1000000):
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
57
            encode(revision_id)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
58
59
    def encode_multi(self, revision_list, count):
60
        """Encode each entry in the list count times"""
61
        for i in xrange(count):
62
            for revision_id in revision_list:
63
                revision_id.encode('utf-8')
64
65
    def encode_cached_multi(self, revision_list, count):
66
        """Encode each entry in the list count times"""
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
67
        encode = cache_utf8.encode
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
68
        for i in xrange(count):
69
            for revision_id in revision_list:
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
70
                encode(revision_id)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
71
72
    def test_encode_1_by_1M_ascii(self):
73
        """Test encoding a single revision id 1 million times."""
74
        self.time(self.encode_1M, _normal_revision_id)
75
76
    def test_encode_1_by_1M_ascii_cached(self):
77
        """Test encoding a single revision id 1 million times."""
78
        self.time(self.encode_cached_1M, _normal_revision_id)
79
80
    def test_encode_1_by_1M_ascii_str(self):
81
        # We have places that think they have a unicode revision id
82
        # but actually, they have a plain string. So .encode(utf8)
83
        # actually has to decode from ascii, and then encode into utf8
84
        self.time(self.encode_1M, str(_normal_revision_id))
85
86
    def test_encode_1_by_1M_ascii_str_cached(self):
87
        self.time(self.encode_cached_1M, str(_normal_revision_id))
88
89
    def test_encode_1_by_1M_unicode(self):
90
        """Test encoding a single revision id 1 million times."""
91
        self.time(self.encode_1M, _unicode_revision_id)
92
93
    def test_encode_1_by_1M_unicode_cached(self):
94
        """Test encoding a single revision id 1 million times."""
95
        self.time(self.encode_cached_1M, _unicode_revision_id)
96
97
    def test_encode_1k_by_1k_ascii(self):
98
        """Test encoding 5 revisions 100k times"""
99
        revisions = [unicode(osutils.rand_chars(60)) for x in xrange(1000)]
100
        self.time(self.encode_multi, revisions, 1000)
101
102
    def test_encode_1k_by_1k_ascii_cached(self):
103
        """Test encoding 5 revisions 100k times"""
104
        revisions = [unicode(osutils.rand_chars(60)) for x in xrange(1000)]
105
        self.time(self.encode_cached_multi, revisions, 1000)
106
107
    def test_encode_1k_by_1k_unicode(self):
108
        """Test encoding 5 revisions 100k times"""
109
        revisions = ['\u062c\u0648\u062c\u0648' +
110
                     unicode(osutils.rand_chars(60)) for x in xrange(1000)]
111
        self.time(self.encode_multi, revisions, 1000)
112
113
    def test_encode_1k_by_1k_unicode_cached(self):
114
        """Test encoding 5 revisions 100k times"""
115
        revisions = ['\u062c\u0648\u062c\u0648' +
116
                     unicode(osutils.rand_chars(60)) for x in xrange(1000)]
117
        self.time(self.encode_cached_multi, revisions, 1000)
118
119
120
class DecodingBenchmarks(Benchmark):
121
122
    def setUp(self):
123
        super(DecodingBenchmarks, self).setUp()
124
        # Make sure we start and end with a clean cache
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
125
        cache_utf8.clear_encoding_cache()
126
        self.addCleanup(cache_utf8.clear_encoding_cache)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
127
128
    def decode_1M(self, revision_id):
129
        for i in xrange(1000000):
130
            revision_id.decode('utf8')
131
132
    def decode_cached_1M(self, revision_id):
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
133
        decode = cache_utf8.decode
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
134
        for i in xrange(1000000):
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
135
            decode(revision_id)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
136
137
    def decode_multi(self, revision_list, count):
138
        for i in xrange(count):
139
            for revision_id in revision_list:
140
                revision_id.decode('utf-8')
141
142
    def decode_cached_multi(self, revision_list, count):
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
143
        decode = cache_utf8.decode
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
144
        for i in xrange(count):
145
            for revision_id in revision_list:
1911.2.3 by John Arbash Meinel
Moving everything into a new location so that we can cache more than just revision ids
146
                decode(revision_id)
1911.2.1 by John Arbash Meinel
Cache encode/decode operations, saves memory and time. Especially when committing a new kernel tree with 7.7M new lines to annotate
147
148
    def test_decode_1_by_1M_ascii(self):
149
        """Test decoding a single revision id 1 million times."""
150
        self.time(self.decode_1M, _normal_revision_id_utf8)
151
152
    def test_decode_1_by_1M_ascii_cached(self):
153
        """Test decoding a single revision id 1 million times."""
154
        self.time(self.decode_cached_1M, _normal_revision_id_utf8)
155
156
    def test_decode_1_by_1M_unicode(self):
157
        """Test decoding a single revision id 1 million times."""
158
        self.time(self.decode_1M, _unicode_revision_id_utf8)
159
160
    def test_decode_1_by_1M_unicode_cached(self):
161
        """Test decoding a single revision id 1 million times."""
162
        self.time(self.decode_cached_1M, _unicode_revision_id_utf8)
163
164
    def test_decode_1k_by_1k_ascii(self):
165
        """Test decoding 5 revisions 100k times"""
166
        revisions = [osutils.rand_chars(60) for x in xrange(1000)]
167
        self.time(self.decode_multi, revisions, 1000)
168
169
    def test_decode_1k_by_1k_ascii_cached(self):
170
        """Test decoding 5 revisions 100k times"""
171
        revisions = [osutils.rand_chars(60) for x in xrange(1000)]
172
        self.time(self.decode_cached_multi, revisions, 1000)
173
174
    def test_decode_1k_by_1k_unicode(self):
175
        """Test decoding 5 revisions 100k times"""
176
        revisions = [('\u062c\u0648\u062c\u0648' +
177
                      unicode(osutils.rand_chars(60))).encode('utf8')
178
                     for x in xrange(1000)]
179
        self.time(self.decode_multi, revisions, 1000)
180
181
    def test_decode_1k_by_1k_unicode_cached(self):
182
        """Test decoding 5 revisions 100k times"""
183
        revisions = [('\u062c\u0648\u062c\u0648' +
184
                      unicode(osutils.rand_chars(60))).encode('utf8')
185
                     for x in xrange(1000)]
186
        self.time(self.decode_cached_multi, revisions, 1000)