~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/dirstate.py

  • Committer: Patch Queue Manager
  • Date: 2015-10-05 13:45:00 UTC
  • mfrom: (6603.3.1 bts794146)
  • Revision ID: pqm@pqm.ubuntu.com-20151005134500-v244rho557tv0ukd
(vila) Resolve Bug #1480015: Test failure: hexify removed from paramiko
 (Andrew Starr-Bochicchio) (Andrew Starr-Bochicchio)

Show diffs side-by-side

added added

removed removed

Lines of Context:
218
218
 
219
219
"""
220
220
 
 
221
from __future__ import absolute_import
 
222
 
221
223
import bisect
222
 
import binascii
223
224
import errno
224
225
import operator
225
226
import os
226
227
from stat import S_IEXEC
227
228
import stat
228
 
import struct
229
229
import sys
230
230
import time
231
231
import zlib
232
232
 
233
233
from bzrlib import (
234
234
    cache_utf8,
 
235
    config,
235
236
    debug,
236
237
    errors,
237
238
    inventory,
239
240
    osutils,
240
241
    static_tuple,
241
242
    trace,
 
243
    urlutils,
242
244
    )
243
245
 
244
246
 
249
251
ERROR_DIRECTORY = 267
250
252
 
251
253
 
252
 
if not getattr(struct, '_compile', None):
253
 
    # Cannot pre-compile the dirstate pack_stat
254
 
    def pack_stat(st, _encode=binascii.b2a_base64, _pack=struct.pack):
255
 
        """Convert stat values into a packed representation."""
256
 
        return _encode(_pack('>LLLLLL', st.st_size, int(st.st_mtime),
257
 
            int(st.st_ctime), st.st_dev, st.st_ino & 0xFFFFFFFF,
258
 
            st.st_mode))[:-1]
259
 
else:
260
 
    # compile the struct compiler we need, so as to only do it once
261
 
    from _struct import Struct
262
 
    _compiled_pack = Struct('>LLLLLL').pack
263
 
    def pack_stat(st, _encode=binascii.b2a_base64, _pack=_compiled_pack):
264
 
        """Convert stat values into a packed representation."""
265
 
        # jam 20060614 it isn't really worth removing more entries if we
266
 
        # are going to leave it in packed form.
267
 
        # With only st_mtime and st_mode filesize is 5.5M and read time is 275ms
268
 
        # With all entries, filesize is 5.9M and read time is maybe 280ms
269
 
        # well within the noise margin
270
 
 
271
 
        # base64 encoding always adds a final newline, so strip it off
272
 
        # The current version
273
 
        return _encode(_pack(st.st_size, int(st.st_mtime), int(st.st_ctime),
274
 
            st.st_dev, st.st_ino & 0xFFFFFFFF, st.st_mode))[:-1]
275
 
        # This is 0.060s / 1.520s faster by not encoding as much information
276
 
        # return _encode(_pack('>LL', int(st.st_mtime), st.st_mode))[:-1]
277
 
        # This is not strictly faster than _encode(_pack())[:-1]
278
 
        # return '%X.%X.%X.%X.%X.%X' % (
279
 
        #      st.st_size, int(st.st_mtime), int(st.st_ctime),
280
 
        #      st.st_dev, st.st_ino, st.st_mode)
281
 
        # Similar to the _encode(_pack('>LL'))
282
 
        # return '%X.%X' % (int(st.st_mtime), st.st_mode)
283
 
 
284
 
 
285
 
def _unpack_stat(packed_stat):
286
 
    """Turn a packed_stat back into the stat fields.
287
 
 
288
 
    This is meant as a debugging tool, should not be used in real code.
289
 
    """
290
 
    (st_size, st_mtime, st_ctime, st_dev, st_ino,
291
 
     st_mode) = struct.unpack('>LLLLLL', binascii.a2b_base64(packed_stat))
292
 
    return dict(st_size=st_size, st_mtime=st_mtime, st_ctime=st_ctime,
293
 
                st_dev=st_dev, st_ino=st_ino, st_mode=st_mode)
294
 
 
295
 
 
296
254
class SHA1Provider(object):
297
255
    """An interface for getting sha1s of a file."""
298
256
 
448
406
        self._known_hash_changes = set()
449
407
        # How many hash changed entries can we have without saving
450
408
        self._worth_saving_limit = worth_saving_limit
 
409
        self._config_stack = config.LocationStack(urlutils.local_path_to_url(
 
410
            path))
451
411
 
452
412
    def __repr__(self):
453
413
        return "%s(%r)" % \
1332
1292
                    parent_trees.append((parent_id, parent_tree))
1333
1293
                    parent_tree.lock_read()
1334
1294
                result.set_parent_trees(parent_trees, [])
1335
 
                result.set_state_from_inventory(tree.inventory)
 
1295
                result.set_state_from_inventory(tree.root_inventory)
1336
1296
            finally:
1337
1297
                for revid, parent_tree in parent_trees:
1338
1298
                    parent_tree.unlock()
1601
1561
                    else:
1602
1562
                        source_path = child_basename
1603
1563
                    if new_path_utf8:
1604
 
                        target_path = new_path_utf8 + source_path[len(old_path):]
 
1564
                        target_path = \
 
1565
                            new_path_utf8 + source_path[len(old_path_utf8):]
1605
1566
                    else:
1606
 
                        if old_path == '':
 
1567
                        if old_path_utf8 == '':
1607
1568
                            raise AssertionError("cannot rename directory to"
1608
1569
                                                 " itself")
1609
 
                        target_path = source_path[len(old_path) + 1:]
 
1570
                        target_path = source_path[len(old_path_utf8) + 1:]
1610
1571
                    adds.append((None, target_path, entry[0][2], entry[1][1], False))
1611
1572
                    deletes.append(
1612
1573
                        (source_path, target_path, entry[0][2], None, False))
1613
 
                deletes.append((old_path_utf8, new_path, file_id, None, False))
 
1574
                deletes.append(
 
1575
                    (old_path_utf8, new_path_utf8, file_id, None, False))
 
1576
 
1614
1577
        self._check_delta_ids_absent(new_ids, delta, 1)
1615
1578
        try:
1616
1579
            # Finish expunging deletes/first half of renames.
1685
1648
            entry_key = st(dirname, basename, file_id)
1686
1649
            block_index, present = self._find_block_index_from_key(entry_key)
1687
1650
            if not present:
1688
 
                self._raise_invalid(new_path, file_id,
1689
 
                    "Unable to find block for this record."
1690
 
                    " Was the parent added?")
 
1651
                # The block where we want to put the file is not present.
 
1652
                # However, it might have just been an empty directory. Look for
 
1653
                # the parent in the basis-so-far before throwing an error.
 
1654
                parent_dir, parent_base = osutils.split(dirname)
 
1655
                parent_block_idx, parent_entry_idx, _, parent_present = \
 
1656
                    self._get_block_entry_index(parent_dir, parent_base, 1)
 
1657
                if not parent_present:
 
1658
                    self._raise_invalid(new_path, file_id,
 
1659
                        "Unable to find block for this record."
 
1660
                        " Was the parent added?")
 
1661
                self._ensure_block(parent_block_idx, parent_entry_idx, dirname)
 
1662
 
1691
1663
            block = self._dirblocks[block_index][1]
1692
1664
            entry_index, present = self._find_entry_index(entry_key, block)
1693
1665
            if real_add:
1892
1864
                    file_id, "This parent is not a directory.")
1893
1865
 
1894
1866
    def _observed_sha1(self, entry, sha1, stat_value,
1895
 
        _stat_to_minikind=_stat_to_minikind, _pack_stat=pack_stat):
 
1867
        _stat_to_minikind=_stat_to_minikind):
1896
1868
        """Note the sha1 of a file.
1897
1869
 
1898
1870
        :param entry: The entry the sha1 is for.
1904
1876
        except KeyError:
1905
1877
            # Unhandled kind
1906
1878
            return None
1907
 
        packed_stat = _pack_stat(stat_value)
1908
1879
        if minikind == 'f':
1909
1880
            if self._cutoff_time is None:
1910
1881
                self._sha_cutoff_time()
1911
1882
            if (stat_value.st_mtime < self._cutoff_time
1912
1883
                and stat_value.st_ctime < self._cutoff_time):
1913
1884
                entry[1][0] = ('f', sha1, stat_value.st_size, entry[1][0][3],
1914
 
                               packed_stat)
 
1885
                               pack_stat(stat_value))
1915
1886
                self._mark_modified([entry])
1916
1887
 
1917
1888
    def _sha_cutoff_time(self):
1962
1933
            # paths are produced by UnicodeDirReader on purpose.
1963
1934
            abspath = abspath.encode(fs_encoding)
1964
1935
        target = os.readlink(abspath)
1965
 
        if fs_encoding not in ('UTF-8', 'US-ASCII', 'ANSI_X3.4-1968'):
 
1936
        if fs_encoding not in ('utf-8', 'ascii'):
1966
1937
            # Change encoding if needed
1967
1938
            target = target.decode(fs_encoding).encode('UTF-8')
1968
1939
        return target
2470
2441
            raise errors.BzrError('missing num_entries line')
2471
2442
        self._num_entries = int(num_entries_line[len('num_entries: '):-1])
2472
2443
 
2473
 
    def sha1_from_stat(self, path, stat_result, _pack_stat=pack_stat):
 
2444
    def sha1_from_stat(self, path, stat_result):
2474
2445
        """Find a sha1 given a stat lookup."""
2475
 
        return self._get_packed_stat_index().get(_pack_stat(stat_result), None)
 
2446
        return self._get_packed_stat_index().get(pack_stat(stat_result), None)
2476
2447
 
2477
2448
    def _get_packed_stat_index(self):
2478
2449
        """Get a packed_stat index of self._dirblocks."""
2508
2479
        #       IN_MEMORY_HASH_MODIFIED, we should only fail quietly if we fail
2509
2480
        #       to save an IN_MEMORY_HASH_MODIFIED, and fail *noisily* if we
2510
2481
        #       fail to save IN_MEMORY_MODIFIED
2511
 
        if self._worth_saving():
2512
 
            grabbed_write_lock = False
2513
 
            if self._lock_state != 'w':
2514
 
                grabbed_write_lock, new_lock = self._lock_token.temporary_write_lock()
2515
 
                # Switch over to the new lock, as the old one may be closed.
 
2482
        if not self._worth_saving():
 
2483
            return
 
2484
 
 
2485
        grabbed_write_lock = False
 
2486
        if self._lock_state != 'w':
 
2487
            grabbed_write_lock, new_lock = self._lock_token.temporary_write_lock()
 
2488
            # Switch over to the new lock, as the old one may be closed.
 
2489
            # TODO: jam 20070315 We should validate the disk file has
 
2490
            #       not changed contents, since temporary_write_lock may
 
2491
            #       not be an atomic operation.
 
2492
            self._lock_token = new_lock
 
2493
            self._state_file = new_lock.f
 
2494
            if not grabbed_write_lock:
 
2495
                # We couldn't grab a write lock, so we switch back to a read one
 
2496
                return
 
2497
        try:
 
2498
            lines = self.get_lines()
 
2499
            self._state_file.seek(0)
 
2500
            self._state_file.writelines(lines)
 
2501
            self._state_file.truncate()
 
2502
            self._state_file.flush()
 
2503
            self._maybe_fdatasync()
 
2504
            self._mark_unmodified()
 
2505
        finally:
 
2506
            if grabbed_write_lock:
 
2507
                self._lock_token = self._lock_token.restore_read_lock()
 
2508
                self._state_file = self._lock_token.f
2516
2509
                # TODO: jam 20070315 We should validate the disk file has
2517
 
                #       not changed contents. Since temporary_write_lock may
2518
 
                #       not be an atomic operation.
2519
 
                self._lock_token = new_lock
2520
 
                self._state_file = new_lock.f
2521
 
                if not grabbed_write_lock:
2522
 
                    # We couldn't grab a write lock, so we switch back to a read one
2523
 
                    return
2524
 
            try:
2525
 
                lines = self.get_lines()
2526
 
                self._state_file.seek(0)
2527
 
                self._state_file.writelines(lines)
2528
 
                self._state_file.truncate()
2529
 
                self._state_file.flush()
2530
 
                self._mark_unmodified()
2531
 
            finally:
2532
 
                if grabbed_write_lock:
2533
 
                    self._lock_token = self._lock_token.restore_read_lock()
2534
 
                    self._state_file = self._lock_token.f
2535
 
                    # TODO: jam 20070315 We should validate the disk file has
2536
 
                    #       not changed contents. Since restore_read_lock may
2537
 
                    #       not be an atomic operation.
 
2510
                #       not changed contents. Since restore_read_lock may
 
2511
                #       not be an atomic operation.                
 
2512
 
 
2513
    def _maybe_fdatasync(self):
 
2514
        """Flush to disk if possible and if not configured off."""
 
2515
        if self._config_stack.get('dirstate.fdatasync'):
 
2516
            osutils.fdatasync(self._state_file.fileno())
2538
2517
 
2539
2518
    def _worth_saving(self):
2540
2519
        """Is it worth saving the dirstate or not?"""
2596
2575
        self.update_minimal(('', '', new_id), 'd',
2597
2576
            path_utf8='', packed_stat=entry[1][0][4])
2598
2577
        self._mark_modified()
2599
 
        # XXX: This was added by Ian, we need to make sure there
2600
 
        #      are tests for it, because it isn't in bzr.dev TRUNK
2601
 
        #      It looks like the only place it is called is in setting the root
2602
 
        #      id of the tree. So probably we never had an _id_index when we
2603
 
        #      don't even have a root yet.
2604
 
        if self._id_index is not None:
2605
 
            self._add_to_id_index(self._id_index, entry[0])
2606
2578
 
2607
2579
    def set_parent_trees(self, trees, ghosts):
2608
2580
        """Set the parent trees for the dirstate.
3315
3287
        if self._id_index is not None:
3316
3288
            for file_id, entry_keys in self._id_index.iteritems():
3317
3289
                for entry_key in entry_keys:
 
3290
                    # Check that the entry in the map is pointing to the same
 
3291
                    # file_id
3318
3292
                    if entry_key[2] != file_id:
3319
3293
                        raise AssertionError(
3320
3294
                            'file_id %r did not match entry key %s'
3321
3295
                            % (file_id, entry_key))
 
3296
                    # And that from this entry key, we can look up the original
 
3297
                    # record
 
3298
                    block_index, present = self._find_block_index_from_key(entry_key)
 
3299
                    if not present:
 
3300
                        raise AssertionError('missing block for entry key: %r', entry_key)
 
3301
                    entry_index, present = self._find_entry_index(entry_key, self._dirblocks[block_index][1])
 
3302
                    if not present:
 
3303
                        raise AssertionError('missing entry for key: %r', entry_key)
3322
3304
                if len(entry_keys) != len(set(entry_keys)):
3323
3305
                    raise AssertionError(
3324
3306
                        'id_index contained non-unique data for %s'
3385
3367
 
3386
3368
 
3387
3369
def py_update_entry(state, entry, abspath, stat_value,
3388
 
                 _stat_to_minikind=DirState._stat_to_minikind,
3389
 
                 _pack_stat=pack_stat):
 
3370
                 _stat_to_minikind=DirState._stat_to_minikind):
3390
3371
    """Update the entry based on what is actually on disk.
3391
3372
 
3392
3373
    This function only calculates the sha if it needs to - if the entry is
3405
3386
    except KeyError:
3406
3387
        # Unhandled kind
3407
3388
        return None
3408
 
    packed_stat = _pack_stat(stat_value)
 
3389
    packed_stat = pack_stat(stat_value)
3409
3390
    (saved_minikind, saved_link_or_sha1, saved_file_size,
3410
3391
     saved_executable, saved_packed_stat) = entry[1][0]
3411
3392
 
4284
4265
        _bisect_path_left,
4285
4266
        _bisect_path_right,
4286
4267
        cmp_by_dirs,
 
4268
        pack_stat,
4287
4269
        ProcessEntryC as _process_entry,
4288
4270
        update_entry as update_entry,
4289
4271
        )
4295
4277
        _bisect_path_left,
4296
4278
        _bisect_path_right,
4297
4279
        cmp_by_dirs,
 
4280
        pack_stat,
4298
4281
        )
4299
4282
    # FIXME: It would be nice to be able to track moved lines so that the
4300
4283
    # corresponding python code can be moved to the _dirstate_helpers_py