1
# Copyright (C) 2005, 2006 Canonical Ltd
1
# (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
from bzrlib.selftest import TestCaseInTempDir
23
from bzrlib.errors import BzrError
24
from bzrlib.hashcache import HashCache
25
from bzrlib.tests import TestCaseInTempDir, TestSkipped, TestCase
29
23
return sha.new(t).hexdigest()
28
# allow it to stabilize
29
start = int(time.time())
30
while int(time.time()) == start:
36
34
class TestHashCache(TestCaseInTempDir):
37
"""Test the hashcache against a real directory"""
39
def make_hashcache(self):
36
def test_hashcache(self):
37
"""Functional tests for hashcache"""
38
from bzrlib.hashcache import HashCache
40
41
# make a dummy bzr directory just to hold the cache
42
hc = HashCache('.', '.bzr/stat-cache')
45
def reopen_hashcache(self):
46
hc = HashCache('.', '.bzr/stat-cache')
50
def test_hashcache_initial_miss(self):
51
"""Get correct hash from an empty hashcache"""
52
hc = self.make_hashcache()
53
self.build_tree_contents([('foo', 'hello')])
45
file('foo', 'wb').write('hello')
54
49
self.assertEquals(hc.get_sha1('foo'),
55
50
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
56
51
self.assertEquals(hc.miss_count, 1)
57
52
self.assertEquals(hc.hit_count, 0)
59
def test_hashcache_new_file(self):
60
hc = self.make_hashcache()
61
self.build_tree_contents([('foo', 'goodbye')])
62
# now read without pausing; it may not be possible to cache it as its
64
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
66
def test_hashcache_nonexistent_file(self):
67
hc = self.make_hashcache()
68
self.assertEquals(hc.get_sha1('no-name-yet'), None)
70
def test_hashcache_replaced_file(self):
71
hc = self.make_hashcache()
72
self.build_tree_contents([('foo', 'goodbye')])
73
self.assertEquals(hc.get_sha1('foo'), sha1('goodbye'))
75
self.assertEquals(hc.get_sha1('foo'), None)
76
self.build_tree_contents([('foo', 'new content')])
77
self.assertEquals(hc.get_sha1('foo'), sha1('new content'))
79
def test_hashcache_not_file(self):
80
hc = self.make_hashcache()
81
self.build_tree(['subdir/'])
54
# check we hit without re-reading
55
self.assertEquals(hc.get_sha1('foo'),
56
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
57
self.assertEquals(hc.miss_count, 1)
58
self.assertEquals(hc.hit_count, 1)
60
# check again without re-reading
61
self.assertEquals(hc.get_sha1('foo'),
62
'aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d')
63
self.assertEquals(hc.miss_count, 1)
64
self.assertEquals(hc.hit_count, 2)
66
# write new file and make sure it is seen
67
file('foo', 'wb').write('goodbye')
69
self.assertEquals(hc.get_sha1('foo'),
70
'3c8ec4874488f6090a157b014ce3397ca8e06d4f')
71
self.assertEquals(hc.miss_count, 2)
73
# quickly write new file of same size and make sure it is seen
74
# this may rely on detection of timestamps that are too close
76
file('foo', 'wb').write('g00dbye')
77
self.assertEquals(hc.get_sha1('foo'),
80
file('foo2', 'wb').write('other file')
81
self.assertEquals(hc.get_sha1('foo2'), sha1('other file'))
84
self.assertEquals(hc.get_sha1('foo2'), None)
86
file('foo2', 'wb').write('new content')
87
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
82
89
self.assertEquals(hc.get_sha1('subdir'), None)
84
def test_hashcache_load(self):
85
hc = self.make_hashcache()
86
self.build_tree_contents([('foo', 'contents')])
91
# it's likely neither are cached at the moment because they
92
# changed recently, but we can't be sure
88
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
95
# should now be safe to cache it if we reread them
96
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
97
self.assertEquals(len(hc._cache), 1)
98
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))
99
self.assertEquals(len(hc._cache), 2)
101
# write out, read back in and check that we don't need to
90
hc = self.reopen_hashcache()
91
self.assertEquals(hc.get_sha1('foo'), sha1('contents'))
92
self.assertEquals(hc.hit_count, 1)
94
def test_hammer_hashcache(self):
95
hc = self.make_hashcache()
96
for i in xrange(10000):
97
self.log('start writing at %s', time.time())
100
last_content = '%08x' % i
101
f.write(last_content)
104
last_sha1 = sha1(last_content)
105
self.log("iteration %d: %r -> %r",
106
i, last_content, last_sha1)
107
got_sha1 = hc.get_sha1('foo')
108
self.assertEquals(got_sha1, last_sha1)
110
hc = self.reopen_hashcache()
112
def test_hashcache_raise(self):
113
"""check that hashcache can raise BzrError"""
114
hc = self.make_hashcache()
115
if getattr(os, 'mkfifo', None) is None:
116
raise TestSkipped('filesystem fifos not supported on this system')
118
# It's possible that the system supports fifos but the filesystem
119
# can't. In that case we should skip at this point. But in fact
120
# such combinations don't usually occur for the filesystem where
122
self.assertRaises(BzrError, hc.get_sha1, 'a')
125
class FakeHashCache(HashCache):
126
"""Hashcache that consults a fake clock rather than the real one.
128
This lets us examine how old or new files would be handled, without
129
actually having to wait for time to pass.
132
# set root and cache file name to none to make sure we won't touch the
134
HashCache.__init__(self, '.', 'hashcache')
136
# simulated clock running forward as operations happen
139
def put_file(self, filename, file_contents):
140
abspath = './' + filename
141
self._files[abspath] = (file_contents, self._clock)
143
def _fingerprint(self, abspath, fs=None):
144
entry = self._files[abspath]
145
return (len(entry[0]),
150
def _really_sha1_file(self, abspath):
151
if abspath in self._files:
152
return sha1(self._files[abspath][0])
156
def _cutoff_time(self):
157
return self._clock - 2
159
def pretend_to_sleep(self, secs):
163
class TestHashCacheFakeFilesystem(TestCaseInTempDir):
164
"""Tests the hashcache using a simulated OS.
167
def make_hashcache(self):
168
return FakeHashCache()
170
def test_hashcache_miss_new_file(self):
171
"""A new file gives the right sha1 but misses"""
172
hc = self.make_hashcache()
173
hc.put_file('foo', 'hello')
174
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
175
self.assertEquals(hc.miss_count, 1)
176
self.assertEquals(hc.hit_count, 0)
177
# if we try again it's still too new;
178
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
179
self.assertEquals(hc.miss_count, 2)
180
self.assertEquals(hc.hit_count, 0)
182
def test_hashcache_old_file(self):
183
"""An old file gives the right sha1 and hits"""
184
hc = self.make_hashcache()
185
hc.put_file('foo', 'hello')
186
hc.pretend_to_sleep(20)
187
# file is new; should get the correct hash but miss
188
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
189
self.assertEquals(hc.miss_count, 1)
190
self.assertEquals(hc.hit_count, 0)
192
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
193
self.assertEquals(hc.miss_count, 1)
194
self.assertEquals(hc.hit_count, 1)
195
hc.pretend_to_sleep(3)
197
self.assertEquals(hc.get_sha1('foo'), sha1('hello'))
198
self.assertEquals(hc.miss_count, 1)
199
self.assertEquals(hc.hit_count, 2)
201
def test_hashcache_invalidates(self):
202
hc = self.make_hashcache()
203
hc.put_file('foo', 'hello')
204
hc.pretend_to_sleep(20)
206
hc.put_file('foo', 'h1llo')
207
self.assertEquals(hc.get_sha1('foo'), sha1('h1llo'))
208
self.assertEquals(hc.miss_count, 2)
209
self.assertEquals(hc.hit_count, 0)
109
self.assertEquals(len(hc._cache), 2)
110
self.assertEquals(hc.get_sha1('foo'), sha1('g00dbye'))
111
self.assertEquals(hc.hit_count, 1)
112
self.assertEquals(hc.miss_count, 0)
113
self.assertEquals(hc.get_sha1('foo2'), sha1('new content'))