115
def refresh_all(self):
116
prep = [(ce[1][3], path) for (path, ce) in self._cache.iteritems()]
116
"""Scan all files and remove entries where the cache entry is obsolete.
118
Obsolete entries are those where the file has been modified or deleted
119
since the entry was inserted.
121
prep = [(ce[1][3], path, ce) for (path, ce) in self._cache.iteritems()]
119
for inum, path in prep:
120
# we don't really need to re-hash them; we just need to check
121
# if they're up to date
124
for inum, path, cache_entry in prep:
125
abspath = os.sep.join([self.basedir, path])
126
fp = _fingerprint(abspath)
129
cache_fp = cache_entry[1]
131
if (not fp) or (cache_fp != fp):
132
# not here or not a regular file anymore
133
self.removed_count += 1
134
self.needs_write = True
135
del self._cache[path]
125
139
def get_sha1(self, path):
126
140
"""Return the sha1 of a file.
128
142
abspath = os.sep.join([self.basedir, path])
129
fp = _fingerprint(abspath)
144
file_fp = _fingerprint(abspath)
147
# not a regular file or not existing
148
if path in self._cache:
149
self.removed_count += 1
150
self.needs_write = True
151
del self._cache[path]
131
c = self._cache.get(path)
133
cache_sha1, cache_fp = c
154
if path in self._cache:
155
cache_sha1, cache_fp = self._cache[path]
135
157
cache_sha1, cache_fp = None, None
141
if path in self._cache:
142
self.removed_count += 1
143
self.needs_write = True
144
del self._cache[path]
146
elif cache_fp and (cache_fp == fp):
159
if cache_fp == file_fp:
147
160
self.hit_count += 1
148
161
return cache_sha1
164
digest = sha_file(file(abspath, 'rb', buffering=65000))
166
now = int(time.time())
167
if file_fp[1] >= now or file_fp[2] >= now:
168
# changed too recently; can't be cached. we can
169
# return the result and it could possibly be cached
171
self.danger_count += 1
173
self.removed_count += 1
174
self.needs_write = True
175
del self._cache[path]
151
digest = sha_file(file(abspath, 'rb', buffering=65000))
153
now = int(time.time())
154
if fp[1] >= now or fp[2] >= now:
155
# changed too recently; can't be cached. we can
156
# return the result and it could possibly be cached
158
self.danger_count += 1
160
self.removed_count += 1
161
self.needs_write = True
162
del self._cache[path]
163
elif (fp != cache_fp) or (digest != cache_sha1):
164
# mutter("update entry for %s" % path)
165
# mutter(" %r" % (fp,))
166
# mutter(" %r" % (cache_fp,))
167
self.needs_write = True
168
self._cache[path] = (digest, fp)
177
self.update_count += 1
178
self.needs_write = True
179
self._cache[path] = (digest, file_fp)