1
# Copyright (C) 2005, 2006 Canonical Ltd
1
# (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
from bzrlib.selftest import InTempDir
22
from bzrlib import osutils
23
from bzrlib.errors import BzrError
24
from bzrlib.hashcache import HashCache
25
from bzrlib.tests import OsFifoFeature, TestCaseInTempDir, TestCase
29
return osutils.sha(t).hexdigest()
23
return sha.new(t).hexdigest()
36
class TestHashCache(TestCaseInTempDir):
37
"""Test the hashcache against a real directory"""
39
def make_hashcache(self):
28
# allow it to stabilize
29
start = int(time.time())
30
while int(time.time()) == start:
35
class TestHashCache(InTempDir):
36
"""Functional tests for hashcache"""
38
from bzrlib.hashcache import HashCache
40
42
# make a dummy bzr directory just to hold the cache
42
hc = HashCache('.', '.bzr/stat-cache')
45
def reopen_hashcache(self):
46
hc = HashCache('.', '.bzr/stat-cache')
50
def test_hashcache_initial_miss(self):
51
"""Get correct hash from an empty hashcache"""
52
hc = self.make_hashcache()
53
self.build_tree_contents([('foo', 'hello')])
46
file('foo', 'wb').write('hello')
54
50
self.assertEquals(hc.get_sha1('foo'),
55
51
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
56
52
self.assertEquals(hc.miss_count, 1)
57
53
self.assertEquals(hc.hit_count, 0)
59
def test_hashcache_new_file(self):
60
hc = self.make_hashcache()
61
self.build_tree_contents([('foo', 'goodbye')])
62
# now read without pausing; it may not be possible to cache it as its
64
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
66
def test_hashcache_nonexistent_file(self):
67
hc = self.make_hashcache()
68
self.assertEquals(hc.get_sha1('no-name-yet'), None)
70
def test_hashcache_replaced_file(self):
71
hc = self.make_hashcache()
72
self.build_tree_contents([('foo', 'goodbye')])
73
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
75
self.assertEquals(hc.get_sha1('foo'), None)
76
self.build_tree_contents([('foo', 'new content')])
77
self.assertEquals(hc.get_sha1('foo'), sha1('new content'))
79
def test_hashcache_not_file(self):
80
hc = self.make_hashcache()
81
self.build_tree(['subdir/'])
55
# check we hit without re-reading
56
self.assertEquals(hc.get_sha1('foo'),
57
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
58
self.assertEquals(hc.miss_count, 1)
59
self.assertEquals(hc.hit_count, 1)
61
# check again without re-reading
62
self.assertEquals(hc.get_sha1('foo'),
63
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
64
self.assertEquals(hc.miss_count, 1)
65
self.assertEquals(hc.hit_count, 2)
67
# write new file and make sure it is seen
68
file('foo', 'wb').write('goodbye')
70
self.assertEquals(hc.get_sha1('foo'),
71
'3c8ec4874488f6090a157b014ce3397ca8e06d4f')
72
self.assertEquals(hc.miss_count, 2)
74
# quickly write new file of same size and make sure it is seen
75
# this may rely on detection of timestamps that are too close
77
file('foo', 'wb').write('g00dbye')
78
self.assertEquals(hc.get_sha1('foo'),
81
file('foo2', 'wb').write('other file')
82
self.assertEquals(hc.get_sha1('foo2'), sha1('other file'))
85
self.assertEquals(hc.get_sha1('foo2'), None)
87
file('foo2', 'wb').write('new content')
88
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
82
90
self.assertEquals(hc.get_sha1('subdir'), None)
84
def test_hashcache_load(self):
85
hc = self.make_hashcache()
86
self.build_tree_contents([('foo', 'contents')])
92
# it's likely neither are cached at the moment because they
93
# changed recently, but we can't be sure
88
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
96
# should now be safe to cache it if we reread them
97
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
98
self.assertEquals(len(hc._cache), 1)
99
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
100
self.assertEquals(len(hc._cache), 2)
102
# write out, read back in and check that we don't need to
90
hc = self.reopen_hashcache()
91
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
92
self.assertEquals(hc.hit_count, 1)
94
def test_hammer_hashcache(self):
95
hc = self.make_hashcache()
96
for i in xrange(10000):
97
self.log('start writing at %s', time.time())
100
last_content = '%08x' % i
101
f.write(last_content)
104
last_sha1 = sha1(last_content)
105
self.log("iteration %d: %r -> %r",
106
i, last_content, last_sha1)
107
got_sha1 = hc.get_sha1('foo')
108
self.assertEquals(got_sha1, last_sha1)
110
hc = self.reopen_hashcache()
112
def test_hashcache_raise(self):
113
"""check that hashcache can raise BzrError"""
114
self.requireFeature(OsFifoFeature)
115
hc = self.make_hashcache()
117
# It's possible that the system supports fifos but the filesystem
118
# can't. In that case we should skip at this point. But in fact
119
# such combinations don't usually occur for the filesystem where
121
self.assertRaises(BzrError, hc.get_sha1, 'a')
124
class FakeHashCache(HashCache):
125
"""Hashcache that consults a fake clock rather than the real one.
127
This lets us examine how old or new files would be handled, without
128
actually having to wait for time to pass.
131
# set root and cache file name to none to make sure we won't touch the
133
HashCache.__init__(self, '.', 'hashcache')
135
# simulated clock running forward as operations happen
138
def put_file(self, filename, file_contents):
139
abspath = './' + filename
140
self._files[abspath] = (file_contents, self._clock)
142
def _fingerprint(self, abspath, fs=None):
143
entry = self._files[abspath]
144
return (len(entry[0]),
149
def _really_sha1_file(self, abspath):
150
if abspath in self._files:
151
return sha1(self._files[abspath][0])
155
def _cutoff_time(self):
156
return self._clock - 2
158
def pretend_to_sleep(self, secs):
162
class TestHashCacheFakeFilesystem(TestCaseInTempDir):
163
"""Tests the hashcache using a simulated OS.
166
def make_hashcache(self):
167
return FakeHashCache()
169
def test_hashcache_miss_new_file(self):
170
"""A new file gives the right sha1 but misses"""
171
hc = self.make_hashcache()
172
hc.put_file('foo', 'hello')
173
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
174
self.assertEquals(hc.miss_count, 1)
175
self.assertEquals(hc.hit_count, 0)
176
# if we try again it's still too new;
177
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
178
self.assertEquals(hc.miss_count, 2)
179
self.assertEquals(hc.hit_count, 0)
181
def test_hashcache_old_file(self):
182
"""An old file gives the right sha1 and hits"""
183
hc = self.make_hashcache()
184
hc.put_file('foo', 'hello')
185
hc.pretend_to_sleep(20)
186
# file is new; should get the correct hash but miss
187
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
188
self.assertEquals(hc.miss_count, 1)
189
self.assertEquals(hc.hit_count, 0)
191
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
192
self.assertEquals(hc.miss_count, 1)
193
self.assertEquals(hc.hit_count, 1)
194
hc.pretend_to_sleep(3)
196
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
197
self.assertEquals(hc.miss_count, 1)
198
self.assertEquals(hc.hit_count, 2)
200
def test_hashcache_invalidates(self):
201
hc = self.make_hashcache()
202
hc.put_file('foo', 'hello')
203
hc.pretend_to_sleep(20)
205
hc.put_file('foo', 'h1llo')
206
self.assertEquals(hc.get_sha1('foo'), sha1('h1llo'))
207
self.assertEquals(hc.miss_count, 2)
208
self.assertEquals(hc.hit_count, 0)
110
self.assertEquals(len(hc._cache), 2)
111
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
112
self.assertEquals(hc.hit_count, 1)
113
self.assertEquals(hc.miss_count, 0)
114
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))