1
# Copyright (C) 2005, 2006 Canonical Ltd
1
# (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
from bzrlib.selftest import TestCaseInTempDir
22
from bzrlib import osutils
23
from bzrlib.errors import BzrError
24
from bzrlib.hashcache import HashCache
25
from bzrlib.tests import OsFifoFeature, TestCaseInTempDir, TestCase
29
return osutils.sha(t).hexdigest()
23
return sha.new(t).hexdigest()
28
# allow it to stabilize
29
start = int(time.time())
30
while int(time.time()) == start:
36
34
class TestHashCache(TestCaseInTempDir):
37
"""Test the hashcache against a real directory"""
39
def make_hashcache(self):
36
def test_hashcache(self):
37
"""Functional tests for hashcache"""
38
from bzrlib.hashcache import HashCache
40
41
# make a dummy bzr directory just to hold the cache
42
hc = HashCache('.', '.bzr/stat-cache')
45
def reopen_hashcache(self):
46
hc = HashCache('.', '.bzr/stat-cache')
50
def test_hashcache_initial_miss(self):
51
"""Get correct hash from an empty hashcache"""
52
hc = self.make_hashcache()
53
self.build_tree_contents([('foo', 'hello')])
45
file('foo', 'wb').write('hello')
54
49
self.assertEquals(hc.get_sha1('foo'),
55
50
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
56
51
self.assertEquals(hc.miss_count, 1)
57
52
self.assertEquals(hc.hit_count, 0)
59
def test_hashcache_new_file(self):
60
hc = self.make_hashcache()
61
self.build_tree_contents([('foo', 'goodbye')])
62
# now read without pausing; it may not be possible to cache it as its
64
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
66
def test_hashcache_nonexistent_file(self):
67
hc = self.make_hashcache()
68
self.assertEquals(hc.get_sha1('no-name-yet'), None)
70
def test_hashcache_replaced_file(self):
71
hc = self.make_hashcache()
72
self.build_tree_contents([('foo', 'goodbye')])
73
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
75
self.assertEquals(hc.get_sha1('foo'), None)
76
self.build_tree_contents([('foo', 'new content')])
77
self.assertEquals(hc.get_sha1('foo'), sha1('new content'))
79
def test_hashcache_not_file(self):
80
hc = self.make_hashcache()
81
self.build_tree(['subdir/'])
54
# check we hit without re-reading
55
self.assertEquals(hc.get_sha1('foo'),
56
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
57
self.assertEquals(hc.miss_count, 1)
58
self.assertEquals(hc.hit_count, 1)
60
# check again without re-reading
61
self.assertEquals(hc.get_sha1('foo'),
62
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
63
self.assertEquals(hc.miss_count, 1)
64
self.assertEquals(hc.hit_count, 2)
66
# write new file and make sure it is seen
67
file('foo', 'wb').write('goodbye')
69
self.assertEquals(hc.get_sha1('foo'),
70
'3c8ec4874488f6090a157b014ce3397ca8e06d4f')
71
self.assertEquals(hc.miss_count, 2)
73
# quickly write new file of same size and make sure it is seen
74
# this may rely on detection of timestamps that are too close
76
file('foo', 'wb').write('g00dbye')
77
self.assertEquals(hc.get_sha1('foo'),
80
file('foo2', 'wb').write('other file')
81
self.assertEquals(hc.get_sha1('foo2'), sha1('other file'))
84
self.assertEquals(hc.get_sha1('foo2'), None)
86
file('foo2', 'wb').write('new content')
87
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
82
89
self.assertEquals(hc.get_sha1('subdir'), None)
84
def test_hashcache_load(self):
85
hc = self.make_hashcache()
86
self.build_tree_contents([('foo', 'contents')])
91
# it's likely neither are cached at the moment because they
92
# changed recently, but we can't be sure
88
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
95
# should now be safe to cache it if we reread them
96
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
97
self.assertEquals(len(hc._cache), 1)
98
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
99
self.assertEquals(len(hc._cache), 2)
101
# write out, read back in and check that we don't need to
90
hc = self.reopen_hashcache()
91
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
92
self.assertEquals(hc.hit_count, 1)
94
def test_hammer_hashcache(self):
95
hc = self.make_hashcache()
96
for i in xrange(10000):
97
self.log('start writing at %s', time.time())
100
last_content = '%08x' % i
101
f.write(last_content)
104
last_sha1 = sha1(last_content)
105
self.log("iteration %d: %r -> %r",
106
i, last_content, last_sha1)
107
got_sha1 = hc.get_sha1('foo')
108
self.assertEquals(got_sha1, last_sha1)
110
hc = self.reopen_hashcache()
112
def test_hashcache_raise(self):
113
"""check that hashcache can raise BzrError"""
114
self.requireFeature(OsFifoFeature)
115
hc = self.make_hashcache()
117
# It's possible that the system supports fifos but the filesystem
118
# can't. In that case we should skip at this point. But in fact
119
# such combinations don't usually occur for the filesystem where
121
self.assertRaises(BzrError, hc.get_sha1, 'a')
124
class FakeHashCache(HashCache):
125
"""Hashcache that consults a fake clock rather than the real one.
127
This lets us examine how old or new files would be handled, without
128
actually having to wait for time to pass.
131
# set root and cache file name to none to make sure we won't touch the
133
HashCache.__init__(self, '.', 'hashcache')
135
# simulated clock running forward as operations happen
138
def put_file(self, filename, file_contents):
139
abspath = './' + filename
140
self._files[abspath] = (file_contents, self._clock)
142
def _fingerprint(self, abspath, fs=None):
143
entry = self._files[abspath]
144
return (len(entry[0]),
149
def _really_sha1_file(self, abspath):
150
if abspath in self._files:
151
return sha1(self._files[abspath][0])
155
def _cutoff_time(self):
156
return self._clock - 2
158
def pretend_to_sleep(self, secs):
162
class TestHashCacheFakeFilesystem(TestCaseInTempDir):
163
"""Tests the hashcache using a simulated OS.
166
def make_hashcache(self):
167
return FakeHashCache()
169
def test_hashcache_miss_new_file(self):
170
"""A new file gives the right sha1 but misses"""
171
hc = self.make_hashcache()
172
hc.put_file('foo', 'hello')
173
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
174
self.assertEquals(hc.miss_count, 1)
175
self.assertEquals(hc.hit_count, 0)
176
# if we try again it's still too new;
177
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
178
self.assertEquals(hc.miss_count, 2)
179
self.assertEquals(hc.hit_count, 0)
181
def test_hashcache_old_file(self):
182
"""An old file gives the right sha1 and hits"""
183
hc = self.make_hashcache()
184
hc.put_file('foo', 'hello')
185
hc.pretend_to_sleep(20)
186
# file is new; should get the correct hash but miss
187
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
188
self.assertEquals(hc.miss_count, 1)
189
self.assertEquals(hc.hit_count, 0)
191
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
192
self.assertEquals(hc.miss_count, 1)
193
self.assertEquals(hc.hit_count, 1)
194
hc.pretend_to_sleep(3)
196
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
197
self.assertEquals(hc.miss_count, 1)
198
self.assertEquals(hc.hit_count, 2)
200
def test_hashcache_invalidates(self):
201
hc = self.make_hashcache()
202
hc.put_file('foo', 'hello')
203
hc.pretend_to_sleep(20)
205
hc.put_file('foo', 'h1llo')
206
self.assertEquals(hc.get_sha1('foo'), sha1('h1llo'))
207
self.assertEquals(hc.miss_count, 2)
208
self.assertEquals(hc.hit_count, 0)
109
self.assertEquals(len(hc._cache), 2)
110
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
111
self.assertEquals(hc.hit_count, 1)
112
self.assertEquals(hc.miss_count, 0)
113
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))