249
249
ERROR_DIRECTORY = 267
252
if not getattr(struct, '_compile', None):
253
# Cannot pre-compile the dirstate pack_stat
254
def pack_stat(st, _encode=binascii.b2a_base64, _pack=struct.pack):
255
"""Convert stat values into a packed representation."""
256
return _encode(_pack('>LLLLLL', st.st_size, int(st.st_mtime),
257
int(st.st_ctime), st.st_dev, st.st_ino & 0xFFFFFFFF,
260
# compile the struct compiler we need, so as to only do it once
261
from _struct import Struct
262
_compiled_pack = Struct('>LLLLLL').pack
263
def pack_stat(st, _encode=binascii.b2a_base64, _pack=_compiled_pack):
264
"""Convert stat values into a packed representation."""
265
# jam 20060614 it isn't really worth removing more entries if we
266
# are going to leave it in packed form.
267
# With only st_mtime and st_mode filesize is 5.5M and read time is 275ms
268
# With all entries, filesize is 5.9M and read time is maybe 280ms
269
# well within the noise margin
271
# base64 encoding always adds a final newline, so strip it off
272
# The current version
273
return _encode(_pack(st.st_size, int(st.st_mtime), int(st.st_ctime),
274
st.st_dev, st.st_ino & 0xFFFFFFFF, st.st_mode))[:-1]
275
# This is 0.060s / 1.520s faster by not encoding as much information
276
# return _encode(_pack('>LL', int(st.st_mtime), st.st_mode))[:-1]
277
# This is not strictly faster than _encode(_pack())[:-1]
278
# return '%X.%X.%X.%X.%X.%X' % (
279
# st.st_size, int(st.st_mtime), int(st.st_ctime),
280
# st.st_dev, st.st_ino, st.st_mode)
281
# Similar to the _encode(_pack('>LL'))
282
# return '%X.%X' % (int(st.st_mtime), st.st_mode)
285
def _unpack_stat(packed_stat):
286
"""Turn a packed_stat back into the stat fields.
288
This is meant as a debugging tool, should not be used in real code.
290
(st_size, st_mtime, st_ctime, st_dev, st_ino,
291
st_mode) = struct.unpack('>LLLLLL', binascii.a2b_base64(packed_stat))
292
return dict(st_size=st_size, st_mtime=st_mtime, st_ctime=st_ctime,
293
st_dev=st_dev, st_ino=st_ino, st_mode=st_mode)
252
296
class SHA1Provider(object):
253
297
"""An interface for getting sha1s of a file."""
1850
1892
file_id, "This parent is not a directory.")
1852
1894
def _observed_sha1(self, entry, sha1, stat_value,
1853
_stat_to_minikind=_stat_to_minikind):
1895
_stat_to_minikind=_stat_to_minikind, _pack_stat=pack_stat):
1854
1896
"""Note the sha1 of a file.
1856
1898
:param entry: The entry the sha1 is for.
1862
1904
except KeyError:
1863
1905
# Unhandled kind
1907
packed_stat = _pack_stat(stat_value)
1865
1908
if minikind == 'f':
1866
1909
if self._cutoff_time is None:
1867
1910
self._sha_cutoff_time()
1868
1911
if (stat_value.st_mtime < self._cutoff_time
1869
1912
and stat_value.st_ctime < self._cutoff_time):
1870
1913
entry[1][0] = ('f', sha1, stat_value.st_size, entry[1][0][3],
1871
pack_stat(stat_value))
1872
1915
self._mark_modified([entry])
1874
1917
def _sha_cutoff_time(self):
2427
2470
raise errors.BzrError('missing num_entries line')
2428
2471
self._num_entries = int(num_entries_line[len('num_entries: '):-1])
2430
def sha1_from_stat(self, path, stat_result):
2473
def sha1_from_stat(self, path, stat_result, _pack_stat=pack_stat):
2431
2474
"""Find a sha1 given a stat lookup."""
2432
return self._get_packed_stat_index().get(pack_stat(stat_result), None)
2475
return self._get_packed_stat_index().get(_pack_stat(stat_result), None)
2434
2477
def _get_packed_stat_index(self):
2435
2478
"""Get a packed_stat index of self._dirblocks."""
2465
2508
# IN_MEMORY_HASH_MODIFIED, we should only fail quietly if we fail
2466
2509
# to save an IN_MEMORY_HASH_MODIFIED, and fail *noisily* if we
2467
2510
# fail to save IN_MEMORY_MODIFIED
2468
if not self._worth_saving():
2471
grabbed_write_lock = False
2472
if self._lock_state != 'w':
2473
grabbed_write_lock, new_lock = self._lock_token.temporary_write_lock()
2474
# Switch over to the new lock, as the old one may be closed.
2475
# TODO: jam 20070315 We should validate the disk file has
2476
# not changed contents, since temporary_write_lock may
2477
# not be an atomic operation.
2478
self._lock_token = new_lock
2479
self._state_file = new_lock.f
2480
if not grabbed_write_lock:
2481
# We couldn't grab a write lock, so we switch back to a read one
2484
lines = self.get_lines()
2485
self._state_file.seek(0)
2486
self._state_file.writelines(lines)
2487
self._state_file.truncate()
2488
self._state_file.flush()
2489
self._maybe_fdatasync()
2490
self._mark_unmodified()
2492
if grabbed_write_lock:
2493
self._lock_token = self._lock_token.restore_read_lock()
2494
self._state_file = self._lock_token.f
2511
if self._worth_saving():
2512
grabbed_write_lock = False
2513
if self._lock_state != 'w':
2514
grabbed_write_lock, new_lock = self._lock_token.temporary_write_lock()
2515
# Switch over to the new lock, as the old one may be closed.
2495
2516
# TODO: jam 20070315 We should validate the disk file has
2496
# not changed contents. Since restore_read_lock may
2497
# not be an atomic operation.
2499
def _maybe_fdatasync(self):
2500
"""Flush to disk if possible and if not configured off."""
2501
if self._config_stack.get('dirstate.fdatasync'):
2502
osutils.fdatasync(self._state_file.fileno())
2517
# not changed contents. Since temporary_write_lock may
2518
# not be an atomic operation.
2519
self._lock_token = new_lock
2520
self._state_file = new_lock.f
2521
if not grabbed_write_lock:
2522
# We couldn't grab a write lock, so we switch back to a read one
2525
lines = self.get_lines()
2526
self._state_file.seek(0)
2527
self._state_file.writelines(lines)
2528
self._state_file.truncate()
2529
self._state_file.flush()
2530
self._mark_unmodified()
2532
if grabbed_write_lock:
2533
self._lock_token = self._lock_token.restore_read_lock()
2534
self._state_file = self._lock_token.f
2535
# TODO: jam 20070315 We should validate the disk file has
2536
# not changed contents. Since restore_read_lock may
2537
# not be an atomic operation.
2504
2539
def _worth_saving(self):
2505
2540
"""Is it worth saving the dirstate or not?"""