~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/workingtree_4.py

UnbreakĀ statusĀ --short.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2005, 2006 Canonical Ltd
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
16
 
 
17
"""WorkingTree4 format and implementation.
 
18
 
 
19
WorkingTree4 provides the dirstate based working tree logic.
 
20
 
 
21
To get a WorkingTree, call bzrdir.open_workingtree() or
 
22
WorkingTree.open(dir).
 
23
"""
 
24
 
 
25
from cStringIO import StringIO
 
26
import os
 
27
import sys
 
28
 
 
29
from bzrlib.lazy_import import lazy_import
 
30
lazy_import(globals(), """
 
31
from bisect import bisect_left
 
32
import collections
 
33
from copy import deepcopy
 
34
import errno
 
35
import itertools
 
36
import operator
 
37
import stat
 
38
from time import time
 
39
import warnings
 
40
 
 
41
import bzrlib
 
42
from bzrlib import (
 
43
    bzrdir,
 
44
    cache_utf8,
 
45
    conflicts as _mod_conflicts,
 
46
    delta,
 
47
    dirstate,
 
48
    errors,
 
49
    generate_ids,
 
50
    globbing,
 
51
    hashcache,
 
52
    ignores,
 
53
    merge,
 
54
    osutils,
 
55
    revisiontree,
 
56
    textui,
 
57
    transform,
 
58
    urlutils,
 
59
    xml5,
 
60
    xml6,
 
61
    )
 
62
import bzrlib.branch
 
63
from bzrlib.transport import get_transport
 
64
import bzrlib.ui
 
65
""")
 
66
 
 
67
from bzrlib import symbol_versioning
 
68
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
69
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, entry_factory
 
70
from bzrlib.lockable_files import LockableFiles, TransportLock
 
71
from bzrlib.lockdir import LockDir
 
72
import bzrlib.mutabletree
 
73
from bzrlib.mutabletree import needs_tree_write_lock
 
74
from bzrlib.osutils import (
 
75
    compact_date,
 
76
    file_kind,
 
77
    isdir,
 
78
    normpath,
 
79
    pathjoin,
 
80
    rand_chars,
 
81
    realpath,
 
82
    safe_unicode,
 
83
    splitpath,
 
84
    supports_executable,
 
85
    )
 
86
from bzrlib.trace import mutter, note
 
87
from bzrlib.transport.local import LocalTransport
 
88
from bzrlib.tree import InterTree
 
89
from bzrlib.progress import DummyProgress, ProgressPhase
 
90
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
 
91
from bzrlib.rio import RioReader, rio_file, Stanza
 
92
from bzrlib.symbol_versioning import (deprecated_passed,
 
93
        deprecated_method,
 
94
        deprecated_function,
 
95
        DEPRECATED_PARAMETER,
 
96
        zero_eight,
 
97
        zero_eleven,
 
98
        zero_thirteen,
 
99
        )
 
100
from bzrlib.tree import Tree
 
101
from bzrlib.workingtree import WorkingTree, WorkingTree3, WorkingTreeFormat3
 
102
 
 
103
 
 
104
class WorkingTree4(WorkingTree3):
 
105
    """This is the Format 4 working tree.
 
106
 
 
107
    This differs from WorkingTree3 by:
 
108
     - having a consolidated internal dirstate.
 
109
     - not having a regular inventory attribute.
 
110
 
 
111
    This is new in bzr TODO FIXME SETMEBEFORE MERGE.
 
112
    """
 
113
 
 
114
    def __init__(self, basedir,
 
115
                 branch,
 
116
                 _control_files=None,
 
117
                 _format=None,
 
118
                 _bzrdir=None):
 
119
        """Construct a WorkingTree for basedir.
 
120
 
 
121
        If the branch is not supplied, it is opened automatically.
 
122
        If the branch is supplied, it must be the branch for this basedir.
 
123
        (branch.base is not cross checked, because for remote branches that
 
124
        would be meaningless).
 
125
        """
 
126
        self._format = _format
 
127
        self.bzrdir = _bzrdir
 
128
        from bzrlib.hashcache import HashCache
 
129
        from bzrlib.trace import note, mutter
 
130
        assert isinstance(basedir, basestring), \
 
131
            "base directory %r is not a string" % basedir
 
132
        basedir = safe_unicode(basedir)
 
133
        mutter("opening working tree %r", basedir)
 
134
        self._branch = branch
 
135
        assert isinstance(self.branch, bzrlib.branch.Branch), \
 
136
            "branch %r is not a Branch" % self.branch
 
137
        self.basedir = realpath(basedir)
 
138
        # if branch is at our basedir and is a format 6 or less
 
139
        # assume all other formats have their own control files.
 
140
        assert isinstance(_control_files, LockableFiles), \
 
141
            "_control_files must be a LockableFiles, not %r" % _control_files
 
142
        self._control_files = _control_files
 
143
        # update the whole cache up front and write to disk if anything changed;
 
144
        # in the future we might want to do this more selectively
 
145
        # two possible ways offer themselves : in self._unlock, write the cache
 
146
        # if needed, or, when the cache sees a change, append it to the hash
 
147
        # cache file, and have the parser take the most recent entry for a
 
148
        # given path only.
 
149
        cache_filename = self.bzrdir.get_workingtree_transport(None).local_abspath('stat-cache')
 
150
        hc = self._hashcache = HashCache(basedir, cache_filename, self._control_files._file_mode)
 
151
        hc.read()
 
152
        # is this scan needed ? it makes things kinda slow.
 
153
        #hc.scan()
 
154
 
 
155
        if hc.needs_write:
 
156
            mutter("write hc")
 
157
            hc.write()
 
158
 
 
159
        self._dirty = None
 
160
        #-------------
 
161
        # during a read or write lock these objects are set, and are
 
162
        # None the rest of the time.
 
163
        self._dirstate = None
 
164
        self._inventory = None
 
165
        #-------------
 
166
 
 
167
    @needs_tree_write_lock
 
168
    def _add(self, files, ids, kinds):
 
169
        """See MutableTree._add."""
 
170
        state = self.current_dirstate()
 
171
        for f, file_id, kind in zip(files, ids, kinds):
 
172
            f = f.strip('/')
 
173
            assert '//' not in f
 
174
            assert '..' not in f
 
175
            if file_id is None:
 
176
                file_id = generate_ids.gen_file_id(f)
 
177
            # deliberately add the file with no cached stat or sha1
 
178
            # - on the first access it will be gathered, and we can
 
179
            # always change this once tests are all passing.
 
180
            state.add(f, file_id, kind, None, '')
 
181
        self._dirty = True
 
182
 
 
183
    def break_lock(self):
 
184
        """Break a lock if one is present from another instance.
 
185
 
 
186
        Uses the ui factory to ask for confirmation if the lock may be from
 
187
        an active process.
 
188
 
 
189
        This will probe the repository for its lock as well.
 
190
        """
 
191
        # if the dirstate is locked by an active process, reject the break lock
 
192
        # call.
 
193
        try:
 
194
            if self._dirstate is None:
 
195
                clear = True
 
196
            else:
 
197
                clear = False
 
198
            state = self._current_dirstate()
 
199
            if state._lock_token is not None:
 
200
                # we already have it locked. sheese, cant break our own lock.
 
201
                raise errors.LockActive(self.basedir)
 
202
            else:
 
203
                try:
 
204
                    # try for a write lock - need permission to get one anyhow
 
205
                    # to break locks.
 
206
                    state.lock_write()
 
207
                except errors.LockContention:
 
208
                    # oslocks fail when a process is still live: fail.
 
209
                    # TODO: get the locked lockdir info and give to the user to
 
210
                    # assist in debugging.
 
211
                    raise errors.LockActive(self.basedir)
 
212
                else:
 
213
                    state.unlock()
 
214
        finally:
 
215
            if clear:
 
216
                self._dirstate = None
 
217
        self._control_files.break_lock()
 
218
        self.branch.break_lock()
 
219
 
 
220
    def current_dirstate(self):
 
221
        """Return the current dirstate object. 
 
222
 
 
223
        This is not part of the tree interface and only exposed for ease of
 
224
        testing.
 
225
 
 
226
        :raises errors.NotWriteLocked: when not in a lock. 
 
227
        """
 
228
        if not self._control_files._lock_count:
 
229
            raise errors.ObjectNotLocked(self)
 
230
        return self._current_dirstate()
 
231
 
 
232
    def _current_dirstate(self):
 
233
        """Internal function that does not check lock status.
 
234
        
 
235
        This is needed for break_lock which also needs the dirstate.
 
236
        """
 
237
        if self._dirstate is not None:
 
238
            return self._dirstate
 
239
        local_path = self.bzrdir.get_workingtree_transport(None
 
240
            ).local_abspath('dirstate')
 
241
        self._dirstate = dirstate.DirState.on_file(local_path)
 
242
        return self._dirstate
 
243
 
 
244
    def filter_unversioned_files(self, paths):
 
245
        """Filter out paths that are not versioned.
 
246
 
 
247
        :return: set of paths.
 
248
        """
 
249
        # TODO: make a generic multi-bisect routine roughly that should list
 
250
        # the paths, then process one half at a time recursively, and feed the
 
251
        # results of each bisect in further still
 
252
        paths = sorted(paths)
 
253
        result = set()
 
254
        state = self.current_dirstate()
 
255
        # TODO we want a paths_to_dirblocks helper I think
 
256
        for path in paths:
 
257
            dirname, basename = os.path.split(path.encode('utf8'))
 
258
            _, _, _, path_is_versioned = state._get_block_entry_index(
 
259
                dirname, basename, 0)
 
260
            if path_is_versioned:
 
261
                result.add(path)
 
262
        return result
 
263
 
 
264
    def flush(self):
 
265
        """Write all cached data to disk."""
 
266
        if self._control_files._lock_mode != 'w':
 
267
            raise errors.NotWriteLocked(self)
 
268
        self.current_dirstate().save()
 
269
        self._inventory = None
 
270
        self._dirty = False
 
271
 
 
272
    def _generate_inventory(self):
 
273
        """Create and set self.inventory from the dirstate object.
 
274
        
 
275
        This is relatively expensive: we have to walk the entire dirstate.
 
276
        Ideally we would not, and can deprecate this function.
 
277
        """
 
278
        #: uncomment to trap on inventory requests.
 
279
        # import pdb;pdb.set_trace()
 
280
        state = self.current_dirstate()
 
281
        state._read_dirblocks_if_needed()
 
282
        root_key, current_entry = self._get_entry(path='')
 
283
        current_id = root_key[2]
 
284
        assert current_entry[0][0] == 'd' # directory
 
285
        inv = Inventory(root_id=current_id)
 
286
        # Turn some things into local variables
 
287
        minikind_to_kind = dirstate.DirState._minikind_to_kind
 
288
        factory = entry_factory
 
289
        utf8_decode = cache_utf8._utf8_decode
 
290
        inv_byid = inv._byid
 
291
        # we could do this straight out of the dirstate; it might be fast
 
292
        # and should be profiled - RBC 20070216
 
293
        parent_ies = {'' : inv.root}
 
294
        for block in state._dirblocks[1:]: # skip the root
 
295
            dirname = block[0]
 
296
            try:
 
297
                parent_ie = parent_ies[block[0]]
 
298
            except KeyError:
 
299
                # all the paths in this block are not versioned in this tree
 
300
                continue
 
301
            for key, entry in block[1]:
 
302
                minikind, link_or_sha1, size, executable, stat = entry[0]
 
303
                if minikind in ('a', 'r'): # absent, relocated
 
304
                    # a parent tree only entry
 
305
                    continue
 
306
                name = key[1]
 
307
                name_unicode = utf8_decode(name)[0]
 
308
                file_id = key[2]
 
309
                kind = minikind_to_kind[minikind]
 
310
                inv_entry = factory[kind](file_id, name_unicode,
 
311
                                          parent_ie.file_id)
 
312
                if kind == 'file':
 
313
                    # not strictly needed: working tree
 
314
                    #entry.executable = executable
 
315
                    #entry.text_size = size
 
316
                    #entry.text_sha1 = sha1
 
317
                    pass
 
318
                elif kind == 'directory':
 
319
                    # add this entry to the parent map.
 
320
                    parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
 
321
                # These checks cost us around 40ms on a 55k entry tree
 
322
                assert file_id not in inv_byid
 
323
                assert name_unicode not in parent_ie.children
 
324
                inv_byid[file_id] = inv_entry
 
325
                parent_ie.children[name_unicode] = inv_entry
 
326
        self._inventory = inv
 
327
 
 
328
    def _get_entry(self, file_id=None, path=None):
 
329
        """Get the dirstate row for file_id or path.
 
330
 
 
331
        If either file_id or path is supplied, it is used as the key to lookup.
 
332
        If both are supplied, the fastest lookup is used, and an error is
 
333
        raised if they do not both point at the same row.
 
334
        
 
335
        :param file_id: An optional unicode file_id to be looked up.
 
336
        :param path: An optional unicode path to be looked up.
 
337
        :return: The dirstate row tuple for path/file_id, or (None, None)
 
338
        """
 
339
        if file_id is None and path is None:
 
340
            raise errors.BzrError('must supply file_id or path')
 
341
        state = self.current_dirstate()
 
342
        if path is not None:
 
343
            path = path.encode('utf8')
 
344
        return state._get_entry(0, fileid_utf8=file_id, path_utf8=path)
 
345
 
 
346
    def get_file_sha1(self, file_id, path=None, stat_value=None):
 
347
        # check file id is valid unconditionally.
 
348
        key, details = self._get_entry(file_id=file_id, path=path)
 
349
        assert key is not None, 'what error should this raise'
 
350
        # TODO:
 
351
        # if row stat is valid, use cached sha1, else, get a new sha1.
 
352
        if path is None:
 
353
            path = os.path.join(*key[0:2]).decode('utf8')
 
354
        return self._hashcache.get_sha1(path, stat_value)
 
355
 
 
356
    def _get_inventory(self):
 
357
        """Get the inventory for the tree. This is only valid within a lock."""
 
358
        if self._inventory is not None:
 
359
            return self._inventory
 
360
        self._generate_inventory()
 
361
        return self._inventory
 
362
 
 
363
    inventory = property(_get_inventory,
 
364
                         doc="Inventory of this Tree")
 
365
 
 
366
    @needs_read_lock
 
367
    def get_parent_ids(self):
 
368
        """See Tree.get_parent_ids.
 
369
        
 
370
        This implementation requests the ids list from the dirstate file.
 
371
        """
 
372
        return self.current_dirstate().get_parent_ids()
 
373
 
 
374
    @needs_read_lock
 
375
    def get_root_id(self):
 
376
        """Return the id of this trees root"""
 
377
        return self._get_entry(path='')[0][2]
 
378
 
 
379
    def has_id(self, file_id):
 
380
        state = self.current_dirstate()
 
381
        file_id = osutils.safe_file_id(file_id)
 
382
        row, parents = self._get_entry(file_id=file_id)
 
383
        if row is None:
 
384
            return False
 
385
        return osutils.lexists(pathjoin(
 
386
                    self.basedir, row[0].decode('utf8'), row[1].decode('utf8')))
 
387
 
 
388
    @needs_read_lock
 
389
    def id2path(self, file_id):
 
390
        file_id = osutils.safe_file_id(file_id)
 
391
        state = self.current_dirstate()
 
392
        possible_dir_name_ids = state._get_id_index().get(file_id, None)
 
393
        entry = self._get_entry(file_id=file_id)
 
394
        if entry == (None, None):
 
395
            return None
 
396
        path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
 
397
        return path_utf8.decode('utf8')
 
398
 
 
399
    @needs_read_lock
 
400
    def __iter__(self):
 
401
        """Iterate through file_ids for this tree.
 
402
 
 
403
        file_ids are in a WorkingTree if they are in the working inventory
 
404
        and the working file exists.
 
405
        """
 
406
        result = []
 
407
        for key, tree_details in self.current_dirstate()._iter_entries():
 
408
            if tree_details[0][0] in ('a', 'r'): # absent, relocated
 
409
                # not relevant to the working tree
 
410
                continue
 
411
            path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
 
412
            if osutils.lexists(path):
 
413
                result.append(key[2])
 
414
        return iter(result)
 
415
 
 
416
    @needs_read_lock
 
417
    def _last_revision(self):
 
418
        """See Mutable.last_revision."""
 
419
        parent_ids = self.current_dirstate().get_parent_ids()
 
420
        if parent_ids:
 
421
            return parent_ids[0]
 
422
        else:
 
423
            return None
 
424
 
 
425
    def lock_read(self):
 
426
        super(WorkingTree4, self).lock_read()
 
427
        if self._dirstate is None:
 
428
            self.current_dirstate()
 
429
            self._dirstate.lock_read()
 
430
 
 
431
    def lock_tree_write(self):
 
432
        super(WorkingTree4, self).lock_tree_write()
 
433
        if self._dirstate is None:
 
434
            self.current_dirstate()
 
435
            self._dirstate.lock_write()
 
436
 
 
437
    def lock_write(self):
 
438
        super(WorkingTree4, self).lock_write()
 
439
        if self._dirstate is None:
 
440
            self.current_dirstate()
 
441
            self._dirstate.lock_write()
 
442
 
 
443
    @needs_tree_write_lock
 
444
    def move(self, from_paths, to_dir, after=False):
 
445
        """See WorkingTree.move()."""
 
446
        if not from_paths:
 
447
            return ()
 
448
 
 
449
        state = self.current_dirstate()
 
450
 
 
451
        assert not isinstance(from_paths, basestring)
 
452
        to_dir_utf8 = to_dir.encode('utf8')
 
453
        to_entry_dirname, to_basename = os.path.split(to_dir_utf8)
 
454
        id_index = state._get_id_index()
 
455
        # check destination directory
 
456
        # get the details for it
 
457
        to_entry_block_index, to_entry_entry_index, dir_present, entry_present = \
 
458
            state._get_block_entry_index(to_entry_dirname, to_basename, 0)
 
459
        if not entry_present:
 
460
            raise errors.BzrMoveFailedError('', to_dir,
 
461
                errors.NotInWorkingDirectory(to_dir))
 
462
        to_entry = state._dirblocks[to_entry_block_index][1][to_entry_entry_index]
 
463
        # get a handle on the block itself.
 
464
        to_block_index = state._ensure_block(
 
465
            to_entry_block_index, to_entry_entry_index, to_dir_utf8)
 
466
        to_block = state._dirblocks[to_block_index]
 
467
        to_abs = self.abspath(to_dir)
 
468
        if not isdir(to_abs):
 
469
            raise errors.BzrMoveFailedError('',to_dir,
 
470
                errors.NotADirectory(to_abs))
 
471
 
 
472
        if to_entry[1][0][0] != 'd':
 
473
            raise errors.BzrMoveFailedError('',to_dir,
 
474
                errors.NotADirectory(to_abs))
 
475
 
 
476
        if self._inventory is not None:
 
477
            update_inventory = True
 
478
            inv = self.inventory
 
479
            to_dir_ie = inv[to_dir_id]
 
480
            to_dir_id = to_entry[0][2]
 
481
        else:
 
482
            update_inventory = False
 
483
 
 
484
        rollbacks = []
 
485
        def move_one(old_entry, from_path_utf8, minikind, executable,
 
486
                     fingerprint, packed_stat, size,
 
487
                     to_block, to_key, to_path_utf8):
 
488
            state._make_absent(old_entry)
 
489
            from_key = old_entry[0]
 
490
            rollbacks.append(
 
491
                lambda:state.update_minimal(from_key,
 
492
                    minikind,
 
493
                    executable=executable,
 
494
                    fingerprint=fingerprint,
 
495
                    packed_stat=packed_stat,
 
496
                    size=size,
 
497
                    path_utf8=from_path_utf8))
 
498
            state.update_minimal(to_key,
 
499
                    minikind,
 
500
                    executable=executable,
 
501
                    fingerprint=fingerprint,
 
502
                    packed_stat=packed_stat,
 
503
                    size=size,
 
504
                    path_utf8=to_path_utf8)
 
505
            added_entry_index, _ = state._find_entry_index(to_key, to_block[1])
 
506
            new_entry = to_block[1][added_entry_index]
 
507
            rollbacks.append(lambda:state._make_absent(new_entry))
 
508
 
 
509
        # create rename entries and tuples
 
510
        for from_rel in from_paths:
 
511
            # from_rel is 'pathinroot/foo/bar'
 
512
            from_rel_utf8 = from_rel.encode('utf8')
 
513
            from_dirname, from_tail = osutils.split(from_rel)
 
514
            from_dirname, from_tail_utf8 = osutils.split(from_rel_utf8)
 
515
            from_entry = self._get_entry(path=from_rel)
 
516
            if from_entry == (None, None):
 
517
                raise errors.BzrMoveFailedError(from_rel,to_dir,
 
518
                    errors.NotVersionedError(path=str(from_rel)))
 
519
 
 
520
            from_id = from_entry[0][2]
 
521
            to_rel = pathjoin(to_dir, from_tail)
 
522
            to_rel_utf8 = pathjoin(to_dir_utf8, from_tail_utf8)
 
523
            item_to_entry = self._get_entry(path=to_rel)
 
524
            if item_to_entry != (None, None):
 
525
                raise errors.BzrMoveFailedError(from_rel, to_rel,
 
526
                    "Target is already versioned.")
 
527
 
 
528
            if from_rel == to_rel:
 
529
                raise errors.BzrMoveFailedError(from_rel, to_rel,
 
530
                    "Source and target are identical.")
 
531
 
 
532
            from_missing = not self.has_filename(from_rel)
 
533
            to_missing = not self.has_filename(to_rel)
 
534
            if after:
 
535
                move_file = False
 
536
            else:
 
537
                move_file = True
 
538
            if to_missing:
 
539
                if not move_file:
 
540
                    raise errors.BzrMoveFailedError(from_rel, to_rel,
 
541
                        errors.NoSuchFile(path=to_rel,
 
542
                        extra="New file has not been created yet"))
 
543
                elif from_missing:
 
544
                    # neither path exists
 
545
                    raise errors.BzrRenameFailedError(from_rel, to_rel,
 
546
                        errors.PathsDoNotExist(paths=(from_rel, to_rel)))
 
547
            else:
 
548
                if from_missing: # implicitly just update our path mapping
 
549
                    move_file = False
 
550
                elif not after:
 
551
                    raise errors.RenameFailedFilesExist(from_rel, to_rel,
 
552
                        extra="(Use --after to update the Bazaar id)")
 
553
 
 
554
            rollbacks = []
 
555
            def rollback_rename():
 
556
                """A single rename has failed, roll it back."""
 
557
                exc_info = None
 
558
                for rollback in reversed(rollbacks):
 
559
                    try:
 
560
                        rollback()
 
561
                    except Exception, e:
 
562
                        import pdb;pdb.set_trace()
 
563
                        exc_info = sys.exc_info()
 
564
                if exc_info:
 
565
                    raise exc_info[0], exc_info[1], exc_info[2]
 
566
 
 
567
            # perform the disk move first - its the most likely failure point.
 
568
            if move_file:
 
569
                from_rel_abs = self.abspath(from_rel)
 
570
                to_rel_abs = self.abspath(to_rel)
 
571
                try:
 
572
                    osutils.rename(from_rel_abs, to_rel_abs)
 
573
                except OSError, e:
 
574
                    raise errors.BzrMoveFailedError(from_rel, to_rel, e[1])
 
575
                rollbacks.append(lambda: osutils.rename(to_rel_abs, from_rel_abs))
 
576
            try:
 
577
                # perform the rename in the inventory next if needed: its easy
 
578
                # to rollback
 
579
                if update_inventory:
 
580
                    # rename the entry
 
581
                    from_entry = inv[from_id]
 
582
                    current_parent = from_entry.parent_id
 
583
                    inv.rename(from_id, to_dir_id, from_tail)
 
584
                    rollbacks.append(
 
585
                        lambda: inv.rename(from_id, current_parent, from_tail))
 
586
                # finally do the rename in the dirstate, which is a little
 
587
                # tricky to rollback, but least likely to need it.
 
588
                old_block_index, old_entry_index, dir_present, file_present = \
 
589
                    state._get_block_entry_index(from_dirname, from_tail_utf8, 0)
 
590
                old_block = state._dirblocks[old_block_index][1]
 
591
                old_entry = old_block[old_entry_index]
 
592
                from_key, old_entry_details = old_entry
 
593
                cur_details = old_entry_details[0]
 
594
                # remove the old row
 
595
                to_key = ((to_block[0],) + from_key[1:3])
 
596
                minikind = cur_details[0]
 
597
                move_one(old_entry, from_path_utf8=from_rel_utf8,
 
598
                         minikind=minikind,
 
599
                         executable=cur_details[3],
 
600
                         fingerprint=cur_details[1],
 
601
                         packed_stat=cur_details[4],
 
602
                         size=cur_details[2],
 
603
                         to_block=to_block,
 
604
                         to_key=to_key,
 
605
                         to_path_utf8=to_rel_utf8)
 
606
 
 
607
                if minikind == 'd':
 
608
                    def update_dirblock(from_dir, to_key, to_dir_utf8):
 
609
                        """all entries in this block need updating.
 
610
 
 
611
                        TODO: This is pretty ugly, and doesn't support
 
612
                        reverting, but it works.
 
613
                        """
 
614
                        assert from_dir != '', "renaming root not supported"
 
615
                        from_key = (from_dir, '')
 
616
                        from_block_idx, present = \
 
617
                            state._find_block_index_from_key(from_key)
 
618
                        if not present:
 
619
                            # This is the old record, if it isn't present, then
 
620
                            # there is theoretically nothing to update.
 
621
                            # (Unless it isn't present because of lazy loading,
 
622
                            # but we don't do that yet)
 
623
                            return
 
624
                        from_block = state._dirblocks[from_block_idx]
 
625
                        to_block_index, to_entry_index, _, _ = \
 
626
                            state._get_block_entry_index(to_key[0], to_key[1], 0)
 
627
                        to_block_index = state._ensure_block(
 
628
                            to_block_index, to_entry_index, to_dir_utf8)
 
629
                        to_block = state._dirblocks[to_block_index]
 
630
                        for entry in from_block[1]:
 
631
                            assert entry[0][0] == from_dir
 
632
                            cur_details = entry[1][0]
 
633
                            to_key = (to_dir_utf8, entry[0][1], entry[0][2])
 
634
                            from_path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
 
635
                            to_path_utf8 = osutils.pathjoin(to_dir_utf8, entry[0][1])
 
636
                            minikind = cur_details[0]
 
637
                            move_one(entry, from_path_utf8=from_path_utf8,
 
638
                                     minikind=minikind,
 
639
                                     executable=cur_details[3],
 
640
                                     fingerprint=cur_details[1],
 
641
                                     packed_stat=cur_details[4],
 
642
                                     size=cur_details[2],
 
643
                                     to_block=to_block,
 
644
                                     to_key=to_key,
 
645
                                     to_path_utf8=to_rel_utf8)
 
646
                            if minikind == 'd':
 
647
                                # We need to move all the children of this
 
648
                                # entry
 
649
                                update_dirblock(from_path_utf8, to_key,
 
650
                                                to_path_utf8)
 
651
                    update_dirblock(from_rel_utf8, to_key, to_rel_utf8)
 
652
            except:
 
653
                rollback_rename()
 
654
                raise
 
655
            state._dirblock_state = dirstate.DirState.IN_MEMORY_MODIFIED
 
656
            self._dirty = True
 
657
 
 
658
        return #rename_tuples
 
659
 
 
660
    def _new_tree(self):
 
661
        """Initialize the state in this tree to be a new tree."""
 
662
        self._dirty = True
 
663
 
 
664
    @needs_read_lock
 
665
    def path2id(self, path):
 
666
        """Return the id for path in this tree."""
 
667
        entry = self._get_entry(path=path)
 
668
        if entry == (None, None):
 
669
            return None
 
670
        return entry[0][2]
 
671
 
 
672
    def paths2ids(self, paths, trees=[], require_versioned=True):
 
673
        """See Tree.paths2ids().
 
674
        
 
675
        This specialisation fast-paths the case where all the trees are in the
 
676
        dirstate.
 
677
        """
 
678
        if paths is None:
 
679
            return None
 
680
        parents = self.get_parent_ids()
 
681
        for tree in trees:
 
682
            if not (isinstance(tree, DirStateRevisionTree) and tree._revision_id in
 
683
                parents):
 
684
                return super(WorkingTree4, self).paths2ids(paths, trees, require_versioned)
 
685
        search_indexes = [0] + [1 + parents.index(tree._revision_id) for tree in trees]
 
686
        # -- make all paths utf8 --
 
687
        paths_utf8 = set()
 
688
        for path in paths:
 
689
            paths_utf8.add(path.encode('utf8'))
 
690
        paths = paths_utf8
 
691
        # -- paths is now a utf8 path set --
 
692
        # -- get the state object and prepare it.
 
693
        state = self.current_dirstate()
 
694
        if False and (state._dirblock_state == dirstate.DirState.NOT_IN_MEMORY
 
695
            and '' not in paths):
 
696
            paths2ids = self._paths2ids_using_bisect
 
697
        else:
 
698
            paths2ids = self._paths2ids_in_memory
 
699
        return paths2ids(paths, search_indexes,
 
700
                         require_versioned=require_versioned)
 
701
 
 
702
    def _paths2ids_in_memory(self, paths, search_indexes,
 
703
                             require_versioned=True):
 
704
        state = self.current_dirstate()
 
705
        state._read_dirblocks_if_needed()
 
706
        def _entries_for_path(path):
 
707
            """Return a list with all the entries that match path for all ids.
 
708
            """
 
709
            dirname, basename = os.path.split(path)
 
710
            key = (dirname, basename, '')
 
711
            block_index, present = state._find_block_index_from_key(key)
 
712
            if not present:
 
713
                # the block which should contain path is absent.
 
714
                return []
 
715
            result = []
 
716
            block = state._dirblocks[block_index][1]
 
717
            entry_index, _ = state._find_entry_index(key, block)
 
718
            # we may need to look at multiple entries at this path: walk while the paths match.
 
719
            while (entry_index < len(block) and
 
720
                block[entry_index][0][0:2] == key[0:2]):
 
721
                result.append(block[entry_index])
 
722
                entry_index += 1
 
723
            return result
 
724
        if require_versioned:
 
725
            # -- check all supplied paths are versioned in a search tree. --
 
726
            all_versioned = True
 
727
            for path in paths:
 
728
                path_entries = _entries_for_path(path)
 
729
                if not path_entries:
 
730
                    # this specified path is not present at all: error
 
731
                    all_versioned = False
 
732
                    break
 
733
                found_versioned = False
 
734
                # for each id at this path
 
735
                for entry in path_entries:
 
736
                    # for each tree.
 
737
                    for index in search_indexes:
 
738
                        if entry[1][index][0] != 'a': # absent
 
739
                            found_versioned = True
 
740
                            # all good: found a versioned cell
 
741
                            break
 
742
                if not found_versioned:
 
743
                    # none of the indexes was not 'absent' at all ids for this
 
744
                    # path.
 
745
                    all_versioned = False
 
746
                    break
 
747
            if not all_versioned:
 
748
                raise errors.PathsNotVersionedError(paths)
 
749
        # -- remove redundancy in supplied paths to prevent over-scanning --
 
750
        search_paths = set()
 
751
        for path in paths:
 
752
            other_paths = paths.difference(set([path]))
 
753
            if not osutils.is_inside_any(other_paths, path):
 
754
                # this is a top level path, we must check it.
 
755
                search_paths.add(path)
 
756
        # sketch: 
 
757
        # for all search_indexs in each path at or under each element of
 
758
        # search_paths, if the detail is relocated: add the id, and add the
 
759
        # relocated path as one to search if its not searched already. If the
 
760
        # detail is not relocated, add the id.
 
761
        searched_paths = set()
 
762
        found_ids = set()
 
763
        def _process_entry(entry):
 
764
            """Look at search_indexes within entry.
 
765
 
 
766
            If a specific tree's details are relocated, add the relocation
 
767
            target to search_paths if not searched already. If it is absent, do
 
768
            nothing. Otherwise add the id to found_ids.
 
769
            """
 
770
            for index in search_indexes:
 
771
                if entry[1][index][0] == 'r': # relocated
 
772
                    if not osutils.is_inside_any(searched_paths, entry[1][index][1]):
 
773
                        search_paths.add(entry[1][index][1])
 
774
                elif entry[1][index][0] != 'a': # absent
 
775
                    found_ids.add(entry[0][2])
 
776
        while search_paths:
 
777
            current_root = search_paths.pop()
 
778
            searched_paths.add(current_root)
 
779
            # process the entries for this containing directory: the rest will be
 
780
            # found by their parents recursively.
 
781
            root_entries = _entries_for_path(current_root)
 
782
            if not root_entries:
 
783
                # this specified path is not present at all, skip it.
 
784
                continue
 
785
            for entry in root_entries:
 
786
                _process_entry(entry)
 
787
            initial_key = (current_root, '', '')
 
788
            block_index, _ = state._find_block_index_from_key(initial_key)
 
789
            while (block_index < len(state._dirblocks) and
 
790
                osutils.is_inside(current_root, state._dirblocks[block_index][0])):
 
791
                for entry in state._dirblocks[block_index][1]:
 
792
                    _process_entry(entry)
 
793
                block_index += 1
 
794
        return found_ids
 
795
 
 
796
    def _paths2ids_using_bisect(self, paths, search_indexes,
 
797
                                require_versioned=True):
 
798
        state = self.current_dirstate()
 
799
        found_ids = set()
 
800
 
 
801
        split_paths = sorted(osutils.split(p) for p in paths)
 
802
        found = state._bisect_recursive(split_paths)
 
803
 
 
804
        if require_versioned:
 
805
            found_dir_names = set(dir_name_id[:2] for dir_name_id in found)
 
806
            for dir_name in split_paths:
 
807
                if dir_name not in found_dir_names:
 
808
                    raise errors.PathsNotVersionedError(paths)
 
809
 
 
810
        for dir_name_id, trees_info in found.iteritems():
 
811
            for index in search_indexes:
 
812
                if trees_info[index][0] not in ('r', 'a'):
 
813
                    found_ids.add(dir_name_id[2])
 
814
        return found_ids
 
815
 
 
816
    def read_working_inventory(self):
 
817
        """Read the working inventory.
 
818
        
 
819
        This is a meaningless operation for dirstate, but we obey it anyhow.
 
820
        """
 
821
        return self.inventory
 
822
 
 
823
    @needs_read_lock
 
824
    def revision_tree(self, revision_id):
 
825
        """See Tree.revision_tree.
 
826
 
 
827
        WorkingTree4 supplies revision_trees for any basis tree.
 
828
        """
 
829
        revision_id = osutils.safe_revision_id(revision_id)
 
830
        dirstate = self.current_dirstate()
 
831
        parent_ids = dirstate.get_parent_ids()
 
832
        if revision_id not in parent_ids:
 
833
            raise errors.NoSuchRevisionInTree(self, revision_id)
 
834
        if revision_id in dirstate.get_ghosts():
 
835
            raise errors.NoSuchRevisionInTree(self, revision_id)
 
836
        return DirStateRevisionTree(dirstate, revision_id,
 
837
            self.branch.repository)
 
838
 
 
839
    @needs_tree_write_lock
 
840
    def set_last_revision(self, new_revision):
 
841
        """Change the last revision in the working tree."""
 
842
        new_revision = osutils.safe_revision_id(new_revision)
 
843
        parents = self.get_parent_ids()
 
844
        if new_revision in (NULL_REVISION, None):
 
845
            assert len(parents) < 2, (
 
846
                "setting the last parent to none with a pending merge is "
 
847
                "unsupported.")
 
848
            self.set_parent_ids([])
 
849
        else:
 
850
            self.set_parent_ids([new_revision] + parents[1:],
 
851
                allow_leftmost_as_ghost=True)
 
852
 
 
853
    @needs_tree_write_lock
 
854
    def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
 
855
        """Set the parent ids to revision_ids.
 
856
        
 
857
        See also set_parent_trees. This api will try to retrieve the tree data
 
858
        for each element of revision_ids from the trees repository. If you have
 
859
        tree data already available, it is more efficient to use
 
860
        set_parent_trees rather than set_parent_ids. set_parent_ids is however
 
861
        an easier API to use.
 
862
 
 
863
        :param revision_ids: The revision_ids to set as the parent ids of this
 
864
            working tree. Any of these may be ghosts.
 
865
        """
 
866
        revision_ids = [osutils.safe_revision_id(r) for r in revision_ids]
 
867
        trees = []
 
868
        for revision_id in revision_ids:
 
869
            try:
 
870
                revtree = self.branch.repository.revision_tree(revision_id)
 
871
                # TODO: jam 20070213 KnitVersionedFile raises
 
872
                #       RevisionNotPresent rather than NoSuchRevision if a
 
873
                #       given revision_id is not present. Should Repository be
 
874
                #       catching it and re-raising NoSuchRevision?
 
875
            except (errors.NoSuchRevision, errors.RevisionNotPresent):
 
876
                revtree = None
 
877
            trees.append((revision_id, revtree))
 
878
        self.set_parent_trees(trees,
 
879
            allow_leftmost_as_ghost=allow_leftmost_as_ghost)
 
880
 
 
881
    @needs_tree_write_lock
 
882
    def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
 
883
        """Set the parents of the working tree.
 
884
 
 
885
        :param parents_list: A list of (revision_id, tree) tuples.
 
886
            If tree is None, then that element is treated as an unreachable
 
887
            parent tree - i.e. a ghost.
 
888
        """
 
889
        dirstate = self.current_dirstate()
 
890
        if len(parents_list) > 0:
 
891
            if not allow_leftmost_as_ghost and parents_list[0][1] is None:
 
892
                raise errors.GhostRevisionUnusableHere(parents_list[0][0])
 
893
        real_trees = []
 
894
        ghosts = []
 
895
        # convert absent trees to the null tree, which we convert back to
 
896
        # missing on access.
 
897
        for rev_id, tree in parents_list:
 
898
            rev_id = osutils.safe_revision_id(rev_id)
 
899
            if tree is not None:
 
900
                real_trees.append((rev_id, tree))
 
901
            else:
 
902
                real_trees.append((rev_id,
 
903
                    self.branch.repository.revision_tree(None)))
 
904
                ghosts.append(rev_id)
 
905
        dirstate.set_parent_trees(real_trees, ghosts=ghosts)
 
906
        self._dirty = True
 
907
 
 
908
    def _set_root_id(self, file_id):
 
909
        """See WorkingTree.set_root_id."""
 
910
        state = self.current_dirstate()
 
911
        state.set_path_id('', file_id)
 
912
        self._dirty = state._dirblock_state == dirstate.DirState.IN_MEMORY_MODIFIED
 
913
 
 
914
    def unlock(self):
 
915
        """Unlock in format 4 trees needs to write the entire dirstate."""
 
916
        if self._control_files._lock_count == 1:
 
917
            self._write_hashcache_if_dirty()
 
918
            # eventually we should do signature checking during read locks for
 
919
            # dirstate updates.
 
920
            if self._control_files._lock_mode == 'w':
 
921
                if self._dirty:
 
922
                    self.flush()
 
923
            if self._dirstate is not None:
 
924
                self._dirstate.unlock()
 
925
            self._dirstate = None
 
926
            self._inventory = None
 
927
        # reverse order of locking.
 
928
        try:
 
929
            return self._control_files.unlock()
 
930
        finally:
 
931
            self.branch.unlock()
 
932
 
 
933
    @needs_tree_write_lock
 
934
    def unversion(self, file_ids):
 
935
        """Remove the file ids in file_ids from the current versioned set.
 
936
 
 
937
        When a file_id is unversioned, all of its children are automatically
 
938
        unversioned.
 
939
 
 
940
        :param file_ids: The file ids to stop versioning.
 
941
        :raises: NoSuchId if any fileid is not currently versioned.
 
942
        """
 
943
        if not file_ids:
 
944
            return
 
945
        state = self.current_dirstate()
 
946
        state._read_dirblocks_if_needed()
 
947
        ids_to_unversion = set()
 
948
        for file_id in file_ids:
 
949
            ids_to_unversion.add(osutils.safe_file_id(file_id))
 
950
        paths_to_unversion = set()
 
951
        # sketch:
 
952
        # check if the root is to be unversioned, if so, assert for now.
 
953
        # walk the state marking unversioned things as absent.
 
954
        # if there are any un-unversioned ids at the end, raise
 
955
        for key, details in state._dirblocks[0][1]:
 
956
            if (details[0][0] not in ('a', 'r') and # absent or relocated
 
957
                key[2] in ids_to_unversion):
 
958
                # I haven't written the code to unversion / yet - it should be
 
959
                # supported.
 
960
                raise errors.BzrError('Unversioning the / is not currently supported')
 
961
        details_length = len(state._dirblocks[0][1][0][1])
 
962
        block_index = 0
 
963
        while block_index < len(state._dirblocks):
 
964
            # process one directory at a time.
 
965
            block = state._dirblocks[block_index]
 
966
            # first check: is the path one to remove - it or its children
 
967
            delete_block = False
 
968
            for path in paths_to_unversion:
 
969
                if (block[0].startswith(path) and
 
970
                    (len(block[0]) == len(path) or
 
971
                     block[0][len(path)] == '/')):
 
972
                    # this entire block should be deleted - its the block for a
 
973
                    # path to unversion; or the child of one
 
974
                    delete_block = True
 
975
                    break
 
976
            # TODO: trim paths_to_unversion as we pass by paths
 
977
            if delete_block:
 
978
                # this block is to be deleted: process it.
 
979
                # TODO: we can special case the no-parents case and
 
980
                # just forget the whole block.
 
981
                entry_index = 0
 
982
                while entry_index < len(block[1]):
 
983
                    if not state._make_absent(block[1][entry_index]):
 
984
                        entry_index += 1
 
985
                # go to the next block. (At the moment we dont delete empty
 
986
                # dirblocks)
 
987
                block_index += 1
 
988
                continue
 
989
            entry_index = 0
 
990
            while entry_index < len(block[1]):
 
991
                entry = block[1][entry_index]
 
992
                if (entry[1][0][0] in ('a', 'r') or # absent, relocated
 
993
                    # ^ some parent row.
 
994
                    entry[0][2] not in ids_to_unversion):
 
995
                    # ^ not an id to unversion
 
996
                    entry_index += 1
 
997
                    continue
 
998
                if entry[1][0][0] == 'd':
 
999
                    paths_to_unversion.add(os.path.join(*entry[0][0:2]))
 
1000
                if not state._make_absent(entry):
 
1001
                    entry_index += 1
 
1002
                # we have unversioned this id
 
1003
                ids_to_unversion.remove(entry[0][2])
 
1004
            block_index += 1
 
1005
        if ids_to_unversion:
 
1006
            raise errors.NoSuchId(self, iter(ids_to_unversion).next())
 
1007
        self._dirty = True
 
1008
        # have to change the legacy inventory too.
 
1009
        if self._inventory is not None:
 
1010
            for file_id in file_ids:
 
1011
                self._inventory.remove_recursive_id(file_id)
 
1012
 
 
1013
    @needs_tree_write_lock
 
1014
    def _write_inventory(self, inv):
 
1015
        """Write inventory as the current inventory."""
 
1016
        assert not self._dirty, "attempting to write an inventory when the dirstate is dirty will cause data loss"
 
1017
        self.current_dirstate().set_state_from_inventory(inv)
 
1018
        self._dirty = True
 
1019
        self.flush()
 
1020
 
 
1021
 
 
1022
class WorkingTreeFormat4(WorkingTreeFormat3):
 
1023
    """The first consolidated dirstate working tree format.
 
1024
 
 
1025
    This format:
 
1026
        - exists within a metadir controlling .bzr
 
1027
        - includes an explicit version marker for the workingtree control
 
1028
          files, separate from the BzrDir format
 
1029
        - modifies the hash cache format
 
1030
        - is new in bzr TODO FIXME SETBEFOREMERGE
 
1031
        - uses a LockDir to guard access to it.
 
1032
    """
 
1033
 
 
1034
    def get_format_string(self):
 
1035
        """See WorkingTreeFormat.get_format_string()."""
 
1036
        return "Bazaar Working Tree format 4\n"
 
1037
 
 
1038
    def get_format_description(self):
 
1039
        """See WorkingTreeFormat.get_format_description()."""
 
1040
        return "Working tree format 4"
 
1041
 
 
1042
    def initialize(self, a_bzrdir, revision_id=None):
 
1043
        """See WorkingTreeFormat.initialize().
 
1044
 
 
1045
        revision_id allows creating a working tree at a different
 
1046
        revision than the branch is at.
 
1047
        """
 
1048
        revision_id = osutils.safe_revision_id(revision_id)
 
1049
        if not isinstance(a_bzrdir.transport, LocalTransport):
 
1050
            raise errors.NotLocalUrl(a_bzrdir.transport.base)
 
1051
        transport = a_bzrdir.get_workingtree_transport(self)
 
1052
        control_files = self._open_control_files(a_bzrdir)
 
1053
        control_files.create_lock()
 
1054
        control_files.lock_write()
 
1055
        control_files.put_utf8('format', self.get_format_string())
 
1056
        branch = a_bzrdir.open_branch()
 
1057
        if revision_id is None:
 
1058
            revision_id = branch.last_revision()
 
1059
        local_path = transport.local_abspath('dirstate')
 
1060
        state = dirstate.DirState.initialize(local_path)
 
1061
        state.unlock()
 
1062
        wt = WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
 
1063
                         branch,
 
1064
                         _format=self,
 
1065
                         _bzrdir=a_bzrdir,
 
1066
                         _control_files=control_files)
 
1067
        wt._new_tree()
 
1068
        wt.lock_write()
 
1069
        try:
 
1070
            #wt.current_dirstate().set_path_id('', NEWROOT)
 
1071
            wt.set_last_revision(revision_id)
 
1072
            wt.flush()
 
1073
            basis = wt.basis_tree()
 
1074
            basis.lock_read()
 
1075
            transform.build_tree(basis, wt)
 
1076
            basis.unlock()
 
1077
        finally:
 
1078
            control_files.unlock()
 
1079
            wt.unlock()
 
1080
        return wt
 
1081
 
 
1082
 
 
1083
    def _open(self, a_bzrdir, control_files):
 
1084
        """Open the tree itself.
 
1085
 
 
1086
        :param a_bzrdir: the dir for the tree.
 
1087
        :param control_files: the control files for the tree.
 
1088
        """
 
1089
        return WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
 
1090
                           branch=a_bzrdir.open_branch(),
 
1091
                           _format=self,
 
1092
                           _bzrdir=a_bzrdir,
 
1093
                           _control_files=control_files)
 
1094
 
 
1095
 
 
1096
class DirStateRevisionTree(Tree):
 
1097
    """A revision tree pulling the inventory from a dirstate."""
 
1098
 
 
1099
    def __init__(self, dirstate, revision_id, repository):
 
1100
        self._dirstate = dirstate
 
1101
        self._revision_id = osutils.safe_revision_id(revision_id)
 
1102
        self._repository = repository
 
1103
        self._inventory = None
 
1104
        self._locked = 0
 
1105
        self._dirstate_locked = False
 
1106
 
 
1107
    def annotate_iter(self, file_id):
 
1108
        """See Tree.annotate_iter"""
 
1109
        w = self._repository.weave_store.get_weave(file_id,
 
1110
                           self._repository.get_transaction())
 
1111
        return w.annotate_iter(self.inventory[file_id].revision)
 
1112
 
 
1113
    def _comparison_data(self, entry, path):
 
1114
        """See Tree._comparison_data."""
 
1115
        if entry is None:
 
1116
            return None, False, None
 
1117
        # trust the entry as RevisionTree does, but this may not be
 
1118
        # sensible: the entry might not have come from us?
 
1119
        return entry.kind, entry.executable, None
 
1120
 
 
1121
    def _file_size(self, entry, stat_value):
 
1122
        return entry.text_size
 
1123
 
 
1124
    def filter_unversioned_files(self, paths):
 
1125
        """Filter out paths that are not versioned.
 
1126
 
 
1127
        :return: set of paths.
 
1128
        """
 
1129
        pred = self.has_filename
 
1130
        return set((p for p in paths if not pred(p)))
 
1131
 
 
1132
    def _get_parent_index(self):
 
1133
        """Return the index in the dirstate referenced by this tree."""
 
1134
        return self._dirstate.get_parent_ids().index(self._revision_id) + 1
 
1135
 
 
1136
    def _get_entry(self, file_id=None, path=None):
 
1137
        """Get the dirstate row for file_id or path.
 
1138
 
 
1139
        If either file_id or path is supplied, it is used as the key to lookup.
 
1140
        If both are supplied, the fastest lookup is used, and an error is
 
1141
        raised if they do not both point at the same row.
 
1142
        
 
1143
        :param file_id: An optional unicode file_id to be looked up.
 
1144
        :param path: An optional unicode path to be looked up.
 
1145
        :return: The dirstate row tuple for path/file_id, or (None, None)
 
1146
        """
 
1147
        if file_id is None and path is None:
 
1148
            raise errors.BzrError('must supply file_id or path')
 
1149
        file_id = osutils.safe_file_id(file_id)
 
1150
        if path is not None:
 
1151
            path = path.encode('utf8')
 
1152
        parent_index = self._get_parent_index()
 
1153
        return self._dirstate._get_entry(parent_index, fileid_utf8=file_id, path_utf8=path)
 
1154
 
 
1155
    def _generate_inventory(self):
 
1156
        """Create and set self.inventory from the dirstate object.
 
1157
 
 
1158
        This is relatively expensive: we have to walk the entire dirstate.
 
1159
        Ideally we would not, and instead would """
 
1160
        assert self._locked, 'cannot generate inventory of an unlocked '\
 
1161
            'dirstate revision tree'
 
1162
        # separate call for profiling - makes it clear where the costs are.
 
1163
        self._dirstate._read_dirblocks_if_needed()
 
1164
        assert self._revision_id in self._dirstate.get_parent_ids(), \
 
1165
            'parent %s has disappeared from %s' % (
 
1166
            self._revision_id, self._dirstate.get_parent_ids())
 
1167
        parent_index = self._dirstate.get_parent_ids().index(self._revision_id) + 1
 
1168
        # This is identical now to the WorkingTree _generate_inventory except
 
1169
        # for the tree index use.
 
1170
        root_key, current_entry = self._dirstate._get_entry(parent_index, path_utf8='')
 
1171
        current_id = root_key[2]
 
1172
        assert current_entry[parent_index][0] == 'd'
 
1173
        inv = Inventory(root_id=current_id, revision_id=self._revision_id)
 
1174
        inv.root.revision = current_entry[parent_index][4]
 
1175
        # Turn some things into local variables
 
1176
        minikind_to_kind = dirstate.DirState._minikind_to_kind
 
1177
        factory = entry_factory
 
1178
        utf8_decode = cache_utf8._utf8_decode
 
1179
        inv_byid = inv._byid
 
1180
        # we could do this straight out of the dirstate; it might be fast
 
1181
        # and should be profiled - RBC 20070216
 
1182
        parent_ies = {'' : inv.root}
 
1183
        for block in self._dirstate._dirblocks[1:]: #skip root
 
1184
            dirname = block[0]
 
1185
            try:
 
1186
                parent_ie = parent_ies[dirname]
 
1187
            except KeyError:
 
1188
                # all the paths in this block are not versioned in this tree
 
1189
                continue
 
1190
            for key, entry in block[1]:
 
1191
                minikind, link_or_sha1, size, executable, revid = entry[parent_index]
 
1192
                if minikind in ('a', 'r'): # absent, relocated
 
1193
                    # not this tree
 
1194
                    continue
 
1195
                name = key[1]
 
1196
                name_unicode = utf8_decode(name)[0]
 
1197
                file_id = key[2]
 
1198
                kind = minikind_to_kind[minikind]
 
1199
                inv_entry = factory[kind](file_id, name_unicode,
 
1200
                                          parent_ie.file_id)
 
1201
                inv_entry.revision = revid
 
1202
                if kind == 'file':
 
1203
                    inv_entry.executable = executable
 
1204
                    inv_entry.text_size = size
 
1205
                    inv_entry.text_sha1 = link_or_sha1
 
1206
                elif kind == 'directory':
 
1207
                    parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
 
1208
                elif kind == 'symlink':
 
1209
                    inv_entry.executable = False
 
1210
                    inv_entry.text_size = size
 
1211
                    inv_entry.symlink_target = utf8_decode(link_or_sha1)[0]
 
1212
                else:
 
1213
                    raise Exception, kind
 
1214
                # These checks cost us around 40ms on a 55k entry tree
 
1215
                assert file_id not in inv_byid
 
1216
                assert name_unicode not in parent_ie.children
 
1217
                inv_byid[file_id] = inv_entry
 
1218
                parent_ie.children[name_unicode] = inv_entry
 
1219
        self._inventory = inv
 
1220
 
 
1221
    def get_file_sha1(self, file_id, path=None, stat_value=None):
 
1222
        # TODO: if path is present, fast-path on that, as inventory
 
1223
        # might not be present
 
1224
        ie = self.inventory[file_id]
 
1225
        if ie.kind == "file":
 
1226
            return ie.text_sha1
 
1227
        return None
 
1228
 
 
1229
    def get_file(self, file_id):
 
1230
        return StringIO(self.get_file_text(file_id))
 
1231
 
 
1232
    def get_file_lines(self, file_id):
 
1233
        ie = self.inventory[file_id]
 
1234
        return self._repository.weave_store.get_weave(file_id,
 
1235
                self._repository.get_transaction()).get_lines(ie.revision)
 
1236
 
 
1237
    def get_file_size(self, file_id):
 
1238
        return self.inventory[file_id].text_size
 
1239
 
 
1240
    def get_file_text(self, file_id):
 
1241
        return ''.join(self.get_file_lines(file_id))
 
1242
 
 
1243
    def get_symlink_target(self, file_id):
 
1244
        entry = self._get_entry(file_id=file_id)
 
1245
        parent_index = self._get_parent_index()
 
1246
        if entry[1][parent_index][0] != 'l':
 
1247
            return None
 
1248
        else:
 
1249
            # At present, none of the tree implementations supports non-ascii
 
1250
            # symlink targets. So we will just assume that the dirstate path is
 
1251
            # correct.
 
1252
            return entry[1][parent_index][1]
 
1253
 
 
1254
    def get_revision_id(self):
 
1255
        """Return the revision id for this tree."""
 
1256
        return self._revision_id
 
1257
 
 
1258
    def _get_inventory(self):
 
1259
        if self._inventory is not None:
 
1260
            return self._inventory
 
1261
        self._generate_inventory()
 
1262
        return self._inventory
 
1263
 
 
1264
    inventory = property(_get_inventory,
 
1265
                         doc="Inventory of this Tree")
 
1266
 
 
1267
    def get_parent_ids(self):
 
1268
        """The parents of a tree in the dirstate are not cached."""
 
1269
        return self._repository.get_revision(self._revision_id).parent_ids
 
1270
 
 
1271
    def has_filename(self, filename):
 
1272
        return bool(self.path2id(filename))
 
1273
 
 
1274
    def kind(self, file_id):
 
1275
        return self.inventory[file_id].kind
 
1276
 
 
1277
    def is_executable(self, file_id, path=None):
 
1278
        ie = self.inventory[file_id]
 
1279
        if ie.kind != "file":
 
1280
            return None
 
1281
        return ie.executable
 
1282
 
 
1283
    def list_files(self, include_root=False):
 
1284
        # We use a standard implementation, because DirStateRevisionTree is
 
1285
        # dealing with one of the parents of the current state
 
1286
        inv = self._get_inventory()
 
1287
        entries = inv.iter_entries()
 
1288
        if self.inventory.root is not None and not include_root:
 
1289
            entries.next()
 
1290
        for path, entry in entries:
 
1291
            yield path, 'V', entry.kind, entry.file_id, entry
 
1292
 
 
1293
    def lock_read(self):
 
1294
        """Lock the tree for a set of operations."""
 
1295
        if not self._locked:
 
1296
            self._repository.lock_read()
 
1297
            if self._dirstate._lock_token is None:
 
1298
                self._dirstate.lock_read()
 
1299
                self._dirstate_locked = True
 
1300
        self._locked += 1
 
1301
 
 
1302
    @needs_read_lock
 
1303
    def path2id(self, path):
 
1304
        """Return the id for path in this tree."""
 
1305
        # lookup by path: faster than splitting and walking the ivnentory.
 
1306
        entry = self._get_entry(path=path)
 
1307
        if entry == (None, None):
 
1308
            return None
 
1309
        return entry[0][2]
 
1310
 
 
1311
    def unlock(self):
 
1312
        """Unlock, freeing any cache memory used during the lock."""
 
1313
        # outside of a lock, the inventory is suspect: release it.
 
1314
        self._locked -=1
 
1315
        if not self._locked:
 
1316
            self._inventory = None
 
1317
            self._locked = 0
 
1318
            if self._dirstate_locked:
 
1319
                self._dirstate.unlock()
 
1320
                self._dirstate_locked = False
 
1321
            self._repository.unlock()
 
1322
 
 
1323
    def walkdirs(self, prefix=""):
 
1324
        # TODO: jam 20070215 This is the cheap way by cheating and using the
 
1325
        #       RevisionTree implementation.
 
1326
        #       This should be cleaned up to use the much faster Dirstate code
 
1327
        #       This is a little tricky, though, because the dirstate is
 
1328
        #       indexed by current path, not by parent path.
 
1329
        #       So for now, we just build up the parent inventory, and extract
 
1330
        #       it the same way RevisionTree does.
 
1331
        _directory = 'directory'
 
1332
        inv = self._get_inventory()
 
1333
        top_id = inv.path2id(prefix)
 
1334
        if top_id is None:
 
1335
            pending = []
 
1336
        else:
 
1337
            pending = [(prefix, top_id)]
 
1338
        while pending:
 
1339
            dirblock = []
 
1340
            relpath, file_id = pending.pop()
 
1341
            # 0 - relpath, 1- file-id
 
1342
            if relpath:
 
1343
                relroot = relpath + '/'
 
1344
            else:
 
1345
                relroot = ""
 
1346
            # FIXME: stash the node in pending
 
1347
            entry = inv[file_id]
 
1348
            for name, child in entry.sorted_children():
 
1349
                toppath = relroot + name
 
1350
                dirblock.append((toppath, name, child.kind, None,
 
1351
                    child.file_id, child.kind
 
1352
                    ))
 
1353
            yield (relpath, entry.file_id), dirblock
 
1354
            # push the user specified dirs from dirblock
 
1355
            for dir in reversed(dirblock):
 
1356
                if dir[2] == _directory:
 
1357
                    pending.append((dir[0], dir[4]))
 
1358
 
 
1359
 
 
1360
class InterDirStateTree(InterTree):
 
1361
    """Fast path optimiser for changes_from with dirstate trees."""
 
1362
 
 
1363
    def __init__(self, source, target):
 
1364
        super(InterDirStateTree, self).__init__(source, target)
 
1365
        if not InterDirStateTree.is_compatible(source, target):
 
1366
            raise Exception, "invalid source %r and target %r" % (source, target)
 
1367
 
 
1368
    @staticmethod
 
1369
    def make_source_parent_tree(source, target):
 
1370
        """Change the source tree into a parent of the target."""
 
1371
        revid = source.commit('record tree')
 
1372
        target.branch.repository.fetch(source.branch.repository, revid)
 
1373
        target.set_parent_ids([revid])
 
1374
        return target.basis_tree(), target
 
1375
    _matching_from_tree_format = WorkingTreeFormat4()
 
1376
    _matching_to_tree_format = WorkingTreeFormat4()
 
1377
    _test_mutable_trees_to_test_trees = make_source_parent_tree
 
1378
 
 
1379
    def _iter_changes(self, include_unchanged=False,
 
1380
                      specific_files=None, pb=None, extra_trees=[],
 
1381
                      require_versioned=True):
 
1382
        """Return the changes from source to target.
 
1383
 
 
1384
        :return: An iterator that yields tuples. See InterTree._iter_changes
 
1385
            for details.
 
1386
        :param specific_files: An optional list of file paths to restrict the
 
1387
            comparison to. When mapping filenames to ids, all matches in all
 
1388
            trees (including optional extra_trees) are used, and all children of
 
1389
            matched directories are included.
 
1390
        :param include_unchanged: An optional boolean requesting the inclusion of
 
1391
            unchanged entries in the result.
 
1392
        :param extra_trees: An optional list of additional trees to use when
 
1393
            mapping the contents of specific_files (paths) to file_ids.
 
1394
        :param require_versioned: If True, all files in specific_files must be
 
1395
            versioned in one of source, target, extra_trees or
 
1396
            PathsNotVersionedError is raised.
 
1397
        """
 
1398
        # NB: show_status depends on being able to pass in non-versioned files
 
1399
        # and report them as unknown
 
1400
            # TODO: handle extra trees in the dirstate.
 
1401
        if extra_trees:
 
1402
            for f in super(InterDirStateTree, self)._iter_changes(
 
1403
                include_unchanged, specific_files, pb, extra_trees,
 
1404
                require_versioned):
 
1405
                yield f
 
1406
            return
 
1407
        assert (self.source._revision_id in self.target.get_parent_ids())
 
1408
        parents = self.target.get_parent_ids()
 
1409
        target_index = 0
 
1410
        source_index = 1 + parents.index(self.source._revision_id)
 
1411
        # -- make all specific_files utf8 --
 
1412
        if specific_files:
 
1413
            specific_files_utf8 = set()
 
1414
            for path in specific_files:
 
1415
                specific_files_utf8.add(path.encode('utf8'))
 
1416
            specific_files = specific_files_utf8
 
1417
        else:
 
1418
            specific_files = set([''])
 
1419
        # -- specific_files is now a utf8 path set --
 
1420
        # -- get the state object and prepare it.
 
1421
        state = self.target.current_dirstate()
 
1422
        state._read_dirblocks_if_needed()
 
1423
        def _entries_for_path(path):
 
1424
            """Return a list with all the entries that match path for all ids.
 
1425
            """
 
1426
            dirname, basename = os.path.split(path)
 
1427
            key = (dirname, basename, '')
 
1428
            block_index, present = state._find_block_index_from_key(key)
 
1429
            if not present:
 
1430
                # the block which should contain path is absent.
 
1431
                return []
 
1432
            result = []
 
1433
            block = state._dirblocks[block_index][1]
 
1434
            entry_index, _ = state._find_entry_index(key, block)
 
1435
            # we may need to look at multiple entries at this path: walk while the specific_files match.
 
1436
            while (entry_index < len(block) and
 
1437
                block[entry_index][0][0:2] == key[0:2]):
 
1438
                result.append(block[entry_index])
 
1439
                entry_index += 1
 
1440
            return result
 
1441
        if require_versioned:
 
1442
            # -- check all supplied paths are versioned in a search tree. --
 
1443
            all_versioned = True
 
1444
            for path in specific_files:
 
1445
                path = path.encode('utf8')
 
1446
                path_entries = _entries_for_path(path)
 
1447
                if not path_entries:
 
1448
                    # this specified path is not present at all: error
 
1449
                    all_versioned = False
 
1450
                    break
 
1451
                found_versioned = False
 
1452
                # for each id at this path
 
1453
                for entry in path_entries:
 
1454
                    # for each tree.
 
1455
                    for index in source_index, target_index:
 
1456
                        if entry[1][index][0] != 'a': # absent
 
1457
                            found_versioned = True
 
1458
                            # all good: found a versioned cell
 
1459
                            break
 
1460
                if not found_versioned:
 
1461
                    # none of the indexes was not 'absent' at all ids for this
 
1462
                    # path.
 
1463
                    all_versioned = False
 
1464
                    break
 
1465
            if not all_versioned:
 
1466
                raise errors.PathsNotVersionedError(paths)
 
1467
        # -- remove redundancy in supplied specific_files to prevent over-scanning --
 
1468
        search_specific_files = set()
 
1469
        for path in specific_files:
 
1470
            other_specific_files = specific_files.difference(set([path]))
 
1471
            if not osutils.is_inside_any(other_specific_files, path):
 
1472
                # this is a top level path, we must check it.
 
1473
                search_specific_files.add(path)
 
1474
        # sketch: 
 
1475
        # compare source_index and target_index at or under each element of search_specific_files.
 
1476
        # follow the following comparison table. Note that we only want to do diff operations when
 
1477
        # the target is fdl because thats when the walkdirs logic will have exposed the pathinfo 
 
1478
        # for the target.
 
1479
        # cases:
 
1480
        # 
 
1481
        # Source | Target | disk | action
 
1482
        #   r    | fdl    |      | add source to search, add id path move and perform
 
1483
        #        |        |      | diff check on source-target
 
1484
        #   r    | fdl    |  a   | dangling file that was present in the basis. 
 
1485
        #        |        |      | ???
 
1486
        #   r    |  a     |      | add source to search
 
1487
        #   r    |  a     |  a   | 
 
1488
        #   r    |  r     |      | this path is present in a non-examined tree, skip.
 
1489
        #   r    |  r     |  a   | this path is present in a non-examined tree, skip.
 
1490
        #   a    | fdl    |      | add new id
 
1491
        #   a    | fdl    |  a   | dangling locally added file, skip
 
1492
        #   a    |  a     |      | not present in either tree, skip
 
1493
        #   a    |  a     |  a   | not present in any tree, skip
 
1494
        #   a    |  r     |      | not present in either tree at this path, skip as it
 
1495
        #        |        |      | may not be selected by the users list of paths.
 
1496
        #   a    |  r     |  a   | not present in either tree at this path, skip as it
 
1497
        #        |        |      | may not be selected by the users list of paths.
 
1498
        #  fdl   | fdl    |      | content in both: diff them
 
1499
        #  fdl   | fdl    |  a   | deleted locally, but not unversioned - show as deleted ?
 
1500
        #  fdl   |  a     |      | unversioned: output deleted id for now
 
1501
        #  fdl   |  a     |  a   | unversioned and deleted: output deleted id
 
1502
        #  fdl   |  r     |      | relocated in this tree, so add target to search.
 
1503
        #        |        |      | Dont diff, we will see an r,fd; pair when we reach
 
1504
        #        |        |      | this id at the other path.
 
1505
        #  fdl   |  r     |  a   | relocated in this tree, so add target to search.
 
1506
        #        |        |      | Dont diff, we will see an r,fd; pair when we reach
 
1507
        #        |        |      | this id at the other path.
 
1508
 
 
1509
        # for all search_indexs in each path at or under each element of
 
1510
        # search_specific_files, if the detail is relocated: add the id, and add the
 
1511
        # relocated path as one to search if its not searched already. If the
 
1512
        # detail is not relocated, add the id.
 
1513
        searched_specific_files = set()
 
1514
        def _process_entry(entry, path_info):
 
1515
            """Compare an entry and real disk to generate delta information.
 
1516
 
 
1517
            :param path_info: top_relpath, basename, kind, lstat, abspath for
 
1518
                the path of entry. If None, then the path is considered absent.
 
1519
                (Perhaps we should pass in a concrete entry for this ?)
 
1520
            """
 
1521
            # TODO: when a parent has been renamed, dont emit path renames for children,
 
1522
            source_details = entry[1][source_index]
 
1523
            target_details = entry[1][target_index]
 
1524
            if source_details[0] in 'rfdl' and target_details[0] in 'fdl':
 
1525
                # claimed content in both: diff
 
1526
                #   r    | fdl    |      | add source to search, add id path move and perform
 
1527
                #        |        |      | diff check on source-target
 
1528
                #   r    | fdl    |  a   | dangling file that was present in the basis. 
 
1529
                #        |        |      | ???
 
1530
                if source_details[0] in 'r':
 
1531
                    # add the source to the search path to find any children it
 
1532
                    # has.  TODO ? : only add if it is a container ?
 
1533
                    if not osutils.is_inside_any(searched_specific_files, source_details[1]):
 
1534
                        search_specific_files.add(source_details[1])
 
1535
                    # generate the old path; this is needed for stating later
 
1536
                    # as well.
 
1537
                    old_path = source_details[1]
 
1538
                    old_dirname, old_basename = os.path.split(old_path)
 
1539
                    path = os.path.join(*entry[0][0:2])
 
1540
                    old_entry = state._get_entry(source_index, path_utf8=old_path)
 
1541
                    # update the source details variable to be the real
 
1542
                    # location.
 
1543
                    source_details = old_entry[1][source_index]
 
1544
                else:
 
1545
                    old_path = path = os.path.join(*entry[0][0:2])
 
1546
                    old_dirname, old_basename = entry[0][0:2]
 
1547
                if path_info is None:
 
1548
                    # the file is missing on disk, show as removed.
 
1549
                    print "missing file"
 
1550
                    old_path = os.path.join(*entry[0][0:2])
 
1551
                    result.removed.append((old_path, entry[0][2], dirstate.DirState._minikind_to_kind[source_details[0]]))
 
1552
                # use the kind from disk.
 
1553
                elif source_details[0] != path_info[2][0]:
 
1554
                    # different kind
 
1555
                    import pdb;pdb.set_trace()
 
1556
                    print "kind change"
 
1557
                else:
 
1558
                    # same kind
 
1559
                    if path_info[2][0] == 'd':
 
1560
                        # directories have no fingerprint
 
1561
                        content_change = False
 
1562
                        executable_change = False
 
1563
                    elif path_info[2][0] == 'f':
 
1564
                        # has it changed? fast path: size, slow path: sha1.
 
1565
                        executable_change = source_details[3] != bool(
 
1566
                            stat.S_ISREG(path_info[3].st_mode)
 
1567
                            and stat.S_IEXEC & path_info[3].st_mode)
 
1568
                        if source_details[2] != path_info[3].st_size:
 
1569
                            content_change = True
 
1570
                        else:
 
1571
                            # maybe the same. Get the hash
 
1572
                            new_hash = self.target._hashcache.get_sha1(path, path_info[3])
 
1573
                            content_change = (new_hash != source_details[1])
 
1574
                    elif path_info[2][0] == 'l':
 
1575
                        import pdb;pdb.set_trace()
 
1576
                        print "link"
 
1577
                    else:
 
1578
                        raise Exception, "unknown minikind"
 
1579
                    # parent id is the entry for the path in the target tree
 
1580
                    # TODO: the target is the same for an entire directory: cache em.
 
1581
                    source_parent_id = state._get_entry(source_index, path_utf8=old_dirname)[0][2]
 
1582
                    if source_parent_id == entry[0][2]:
 
1583
                        source_parent_id = None
 
1584
                    target_parent_id = state._get_entry(target_index, path_utf8=entry[0][0])[0][2]
 
1585
                    if target_parent_id == entry[0][2]:
 
1586
                        target_parent_id = None
 
1587
                    source_exec = source_details[3]
 
1588
                    target_exec = bool(
 
1589
                        stat.S_ISREG(path_info[3].st_mode)
 
1590
                        and stat.S_IEXEC & path_info[3].st_mode)
 
1591
                    return ((entry[0][2], path, content_change, (True, True), (source_parent_id, target_parent_id), (old_basename, entry[0][1]), (dirstate.DirState._minikind_to_kind[source_details[0]], path_info[2]), (source_exec, target_exec)),)
 
1592
            elif source_details[0] in 'a' and target_details[0] in 'fdl':
 
1593
                # looks like a new file
 
1594
                if path_info is not None:
 
1595
                    path = os.path.join(*entry[0][0:2])
 
1596
                    # parent id is the entry for the path in the target tree
 
1597
                    # TODO: these are the same for an entire directory: cache em.
 
1598
                    parent_id = state._get_entry(target_index, path_utf8=entry[0][0])[0][2]
 
1599
                    if parent_id == entry[0][2]:
 
1600
                        parent_id = None
 
1601
                    # basename
 
1602
                    new_executable = bool(
 
1603
                        stat.S_ISREG(path_info[3].st_mode)
 
1604
                        and stat.S_IEXEC & path_info[3].st_mode)
 
1605
                    return ((entry[0][2], path, True, (False, True), (None, parent_id), (None, entry[0][1]), (None, path_info[2]), (None, new_executable)),)
 
1606
                else:
 
1607
                    # but its not on disk: we deliberately treat this as just
 
1608
                    # never-present. (Why ?! - RBC 20070224)
 
1609
                    pass
 
1610
            elif source_details[0] in 'fdl' and target_details[0] in 'a':
 
1611
                # unversioned, possibly, or possibly not deleted: we dont care.
 
1612
                # if its still on disk, *and* theres no other entry at this
 
1613
                # path [we dont know this in this routine at the moment -
 
1614
                # perhaps we should change this - then it would be an unknown.
 
1615
                old_path = os.path.join(*entry[0][0:2])
 
1616
                # parent id is the entry for the path in the target tree
 
1617
                parent_id = state._get_entry(source_index, path_utf8=entry[0][0])[0][2]
 
1618
                if parent_id == entry[0][2]:
 
1619
                    parent_id = None
 
1620
                return ((entry[0][2], old_path, True, (True, False), (parent_id, None), (entry[0][1], None), (dirstate.DirState._minikind_to_kind[source_details[0]], None), (source_details[3], None)),)
 
1621
            elif source_details[0] in 'fdl' and target_details[0] in 'r':
 
1622
                # a rename; could be a true rename, or a rename inherited from
 
1623
                # a renamed parent. TODO: handle this efficiently. Its not
 
1624
                # common case to rename dirs though, so a correct but slow
 
1625
                # implementation will do.
 
1626
                if not osutils.is_inside_any(searched_specific_files, target_details[1]):
 
1627
                    search_specific_files.add(target_details[1])
 
1628
            else:
 
1629
                import pdb;pdb.set_trace()
 
1630
            return ()
 
1631
        while search_specific_files:
 
1632
            # TODO: the pending list should be lexically sorted?
 
1633
            current_root = search_specific_files.pop()
 
1634
            searched_specific_files.add(current_root)
 
1635
            # process the entries for this containing directory: the rest will be
 
1636
            # found by their parents recursively.
 
1637
            root_entries = _entries_for_path(current_root)
 
1638
            root_abspath = self.target.abspath(current_root)
 
1639
            try:
 
1640
                root_stat = os.lstat(root_abspath)
 
1641
            except OSError, e:
 
1642
                if e.errno == errno.ENOENT:
 
1643
                    # TODO: this directory does not exist in target. Should we
 
1644
                    # consider it missing and diff, or should we just skip? For
 
1645
                    # now, skip.
 
1646
                    continue
 
1647
                else:
 
1648
                    # some other random error: hand it up.
 
1649
                    raise
 
1650
            root_dir_info = ('', current_root,
 
1651
                osutils.file_kind_from_stat_mode(root_stat.st_mode), root_stat,
 
1652
                root_abspath)
 
1653
            #
 
1654
            if not root_entries:
 
1655
                # this specified path is not present at all, skip it.
 
1656
                continue
 
1657
            for entry in root_entries:
 
1658
                for result in _process_entry(entry, root_dir_info):
 
1659
                    # this check should probably be outside the loop: one
 
1660
                    # 'iterate two trees' api, and then _iter_changes filters
 
1661
                    # unchanged pairs. - RBC 20070226
 
1662
                    if include_unchanged or result[2] or True in map(lambda x:x[0]!=x[1], result[3:8]):
 
1663
                        yield result
 
1664
            dir_iterator = osutils.walkdirs(root_abspath, prefix=current_root)
 
1665
            initial_key = (current_root, '', '')
 
1666
            block_index, _ = state._find_block_index_from_key(initial_key)
 
1667
            if block_index == 0:
 
1668
                # we have processed the total root already, but because the
 
1669
                # initial key matched it we sould skip it here.
 
1670
                block_index +=1
 
1671
            current_dir_info = dir_iterator.next()
 
1672
            if current_dir_info[0][0] == '':
 
1673
                # remove .bzr from iteration
 
1674
                bzr_index = bisect_left(current_dir_info[1], ('.bzr',))
 
1675
                assert current_dir_info[1][bzr_index][0] == '.bzr'
 
1676
                del current_dir_info[1][bzr_index]
 
1677
            # convert the unicode relpaths in the dir index to uf8 for
 
1678
            # comparison with dirstate data.
 
1679
            # TODO: keep the utf8 version around for giving to the caller.
 
1680
            current_dir_info = ((current_dir_info[0][0].encode('utf8'), current_dir_info[0][1]),
 
1681
                [(line[0].encode('utf8'), line[1].encode('utf8')) + line[2:] for line in current_dir_info[1]])
 
1682
            # walk until both the directory listing and the versioned metadata
 
1683
            # are exhausted. TODO: reevaluate this, perhaps we should stop when
 
1684
            # the versioned data runs out.
 
1685
            if (block_index < len(state._dirblocks) and
 
1686
                osutils.is_inside(current_root, state._dirblocks[block_index][0])):
 
1687
                current_block = state._dirblocks[block_index]
 
1688
            else:
 
1689
                current_block = None
 
1690
            while (current_dir_info is not None or
 
1691
                current_block is not None):
 
1692
                if current_dir_info and current_block and current_dir_info[0][0] != current_block[0]:
 
1693
                    if current_block[0] < current_dir_info[0][0]:
 
1694
                        # extra dir on disk: pass for now? should del from info ?
 
1695
                        import pdb;pdb.set_trace()
 
1696
                        print 'unversioned dir'
 
1697
                    else:
 
1698
                        # entry referring to missing dir.
 
1699
                        import pdb;pdb.set_trace()
 
1700
                        print 'missing dir'
 
1701
                entry_index = 0
 
1702
                if current_block and entry_index < len(current_block[1]):
 
1703
                    current_entry = current_block[1][entry_index]
 
1704
                else:
 
1705
                    current_entry = None
 
1706
                advance_entry = True
 
1707
                path_index = 0
 
1708
                if current_dir_info and path_index < len(current_dir_info[1]):
 
1709
                    current_path_info = current_dir_info[1][path_index]
 
1710
                else:
 
1711
                    current_path_info = None
 
1712
                advance_path = True
 
1713
                while (current_entry is not None or
 
1714
                    current_path_info is not None):
 
1715
                    if current_entry is None:
 
1716
                        # no more entries: yield current_pathinfo as an
 
1717
                        # unversioned file: its not the same as a path in any
 
1718
                        # tree in the dirstate.
 
1719
                        new_executable = bool(
 
1720
                            stat.S_ISREG(current_path_info[3].st_mode)
 
1721
                            and stat.S_IEXEC & current_path_info[3].st_mode)
 
1722
                        yield (None, current_path_info[0], True, (False, False), (None, None), (None, current_path_info[1]), (None, current_path_info[2]), (None, new_executable))
 
1723
                    elif current_path_info is None:
 
1724
                        # no path is fine: the per entry code will handle it.
 
1725
                        for result in _process_entry(current_entry, current_path_info):
 
1726
                            # this check should probably be outside the loop: one
 
1727
                            # 'iterate two trees' api, and then _iter_changes filters
 
1728
                            # unchanged pairs. - RBC 20070226
 
1729
                            if include_unchanged or result[2] or True in map(lambda x:x[0]!=x[1], result[3:8]):
 
1730
                                yield result
 
1731
                    elif current_entry[0][1] != current_path_info[1]:
 
1732
                        if current_path_info[1] < current_entry[0][1]:
 
1733
                            # extra file on disk: pass for now
 
1734
                            import pdb;pdb.set_trace()
 
1735
                            print 'unversioned file'
 
1736
                        else:
 
1737
                            # entry referring to file not present on disk.
 
1738
                            # advance the entry only, after processing.
 
1739
                            for result in _process_entry(current_entry, None):
 
1740
                                # this check should probably be outside the loop: one
 
1741
                                # 'iterate two trees' api, and then _iter_changes filters
 
1742
                                # unchanged pairs. - RBC 20070226
 
1743
                                if include_unchanged or result[2] or True in map(lambda x:x[0]!=x[1], result[3:8]):
 
1744
                                    yield result
 
1745
                            advance_path = False
 
1746
                    else:
 
1747
                        for result in _process_entry(current_entry, current_path_info):
 
1748
                            # this check should probably be outside the loop: one
 
1749
                            # 'iterate two trees' api, and then _iter_changes filters
 
1750
                            # unchanged pairs. - RBC 20070226
 
1751
                            if include_unchanged or result[2] or True in map(lambda x:x[0]!=x[1], result[3:8]):
 
1752
                                yield result
 
1753
                    if advance_entry and current_entry is not None:
 
1754
                        entry_index += 1
 
1755
                        if entry_index < len(current_block[1]):
 
1756
                            current_entry = current_block[1][entry_index]
 
1757
                        else:
 
1758
                            current_entry = None
 
1759
                    else:
 
1760
                        advance_entry = True # reset the advance flaga
 
1761
                    if advance_path and current_path_info is not None:
 
1762
                        path_index += 1
 
1763
                        if path_index < len(current_dir_info[1]):
 
1764
                            current_path_info = current_dir_info[1][path_index]
 
1765
                        else:
 
1766
                            current_path_info = None
 
1767
                    else:
 
1768
                        advance_path = True # reset the advance flagg.
 
1769
                if current_block is not None:
 
1770
                    block_index += 1
 
1771
                    if (block_index < len(state._dirblocks) and
 
1772
                        osutils.is_inside(current_root, state._dirblocks[block_index][0])):
 
1773
                        current_block = state._dirblocks[block_index]
 
1774
                    else:
 
1775
                        current_block = None
 
1776
                if current_dir_info is not None:
 
1777
                    try:
 
1778
                        current_dir_info = dir_iterator.next()
 
1779
                        # convert the unicode relpaths in the dir index to uf8 for
 
1780
                        # comparison with dirstate data.
 
1781
                        # TODO: keep the utf8 version around for giving to the caller.
 
1782
                        current_dir_info = ((current_dir_info[0][0].encode('utf8'), current_dir_info[0][1]),
 
1783
                            [(line[0].encode('utf8'), line[1].encode('utf8')) + line[2:] for line in current_dir_info[1]])
 
1784
                    except StopIteration:
 
1785
                        current_dir_info = None
 
1786
 
 
1787
 
 
1788
    @staticmethod
 
1789
    def is_compatible(source, target):
 
1790
        # the target must be a dirstate working tree
 
1791
        if not isinstance(target, WorkingTree4):
 
1792
            return False
 
1793
        # the source must be a revtreee or dirstate rev tree.
 
1794
        if not isinstance(source,
 
1795
            (revisiontree.RevisionTree, DirStateRevisionTree)):
 
1796
            return False
 
1797
        # the source revid must be in the target dirstate
 
1798
        if not (source._revision_id == NULL_REVISION or
 
1799
            source._revision_id in target.get_parent_ids()):
 
1800
            # TODO: what about ghosts? it may well need to 
 
1801
            # check for them explicitly.
 
1802
            return False
 
1803
        return True
 
1804
 
 
1805
InterTree.register_optimiser(InterDirStateTree)