1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""WorkingTree4 format and implementation.
19
WorkingTree4 provides the dirstate based working tree logic.
21
To get a WorkingTree, call bzrdir.open_workingtree() or
22
WorkingTree.open(dir).
25
from cStringIO import StringIO
29
from bzrlib.lazy_import import lazy_import
30
lazy_import(globals(), """
31
from bisect import bisect_left
33
from copy import deepcopy
45
conflicts as _mod_conflicts,
56
revision as _mod_revision,
66
from bzrlib.transport import get_transport
70
from bzrlib import symbol_versioning
71
from bzrlib.decorators import needs_read_lock, needs_write_lock
72
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, entry_factory
73
from bzrlib.lockable_files import LockableFiles, TransportLock
74
from bzrlib.lockdir import LockDir
75
import bzrlib.mutabletree
76
from bzrlib.mutabletree import needs_tree_write_lock
77
from bzrlib.osutils import (
87
from bzrlib.trace import mutter, note
88
from bzrlib.transport.local import LocalTransport
89
from bzrlib.tree import InterTree
90
from bzrlib.progress import DummyProgress, ProgressPhase
91
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
92
from bzrlib.rio import RioReader, rio_file, Stanza
93
from bzrlib.symbol_versioning import (deprecated_passed,
98
from bzrlib.tree import Tree
99
from bzrlib.workingtree import WorkingTree, WorkingTree3, WorkingTreeFormat3
102
# This is the Windows equivalent of ENOTDIR
103
# It is defined in pywin32.winerror, but we don't want a strong dependency for
104
# just an error code.
105
ERROR_PATH_NOT_FOUND = 3
106
ERROR_DIRECTORY = 267
109
class WorkingTree4(WorkingTree3):
110
"""This is the Format 4 working tree.
112
This differs from WorkingTree3 by:
113
- Having a consolidated internal dirstate, stored in a
114
randomly-accessible sorted file on disk.
115
- Not having a regular inventory attribute. One can be synthesized
116
on demand but this is expensive and should be avoided.
118
This is new in bzr 0.15.
121
def __init__(self, basedir,
126
"""Construct a WorkingTree for basedir.
128
If the branch is not supplied, it is opened automatically.
129
If the branch is supplied, it must be the branch for this basedir.
130
(branch.base is not cross checked, because for remote branches that
131
would be meaningless).
133
self._format = _format
134
self.bzrdir = _bzrdir
135
assert isinstance(basedir, basestring), \
136
"base directory %r is not a string" % basedir
137
basedir = safe_unicode(basedir)
138
mutter("opening working tree %r", basedir)
139
self._branch = branch
140
assert isinstance(self.branch, bzrlib.branch.Branch), \
141
"branch %r is not a Branch" % self.branch
142
self.basedir = realpath(basedir)
143
# if branch is at our basedir and is a format 6 or less
144
# assume all other formats have their own control files.
145
assert isinstance(_control_files, LockableFiles), \
146
"_control_files must be a LockableFiles, not %r" % _control_files
147
self._control_files = _control_files
150
# during a read or write lock these objects are set, and are
151
# None the rest of the time.
152
self._dirstate = None
153
self._inventory = None
156
@needs_tree_write_lock
157
def _add(self, files, ids, kinds):
158
"""See MutableTree._add."""
159
state = self.current_dirstate()
160
for f, file_id, kind in zip(files, ids, kinds):
165
# special case tree root handling.
166
if f == '' and self.path2id(f) == ROOT_ID:
167
state.set_path_id('', generate_ids.gen_file_id(f))
170
file_id = generate_ids.gen_file_id(f)
171
# deliberately add the file with no cached stat or sha1
172
# - on the first access it will be gathered, and we can
173
# always change this once tests are all passing.
174
state.add(f, file_id, kind, None, '')
175
self._make_dirty(reset_inventory=True)
177
def _make_dirty(self, reset_inventory):
178
"""Make the tree state dirty.
180
:param reset_inventory: True if the cached inventory should be removed
181
(presuming there is one).
184
if reset_inventory and self._inventory is not None:
185
self._inventory = None
187
@needs_tree_write_lock
188
def add_reference(self, sub_tree):
189
# use standard implementation, which calls back to self._add
191
# So we don't store the reference_revision in the working dirstate,
192
# it's just recorded at the moment of commit.
193
self._add_reference(sub_tree)
195
def break_lock(self):
196
"""Break a lock if one is present from another instance.
198
Uses the ui factory to ask for confirmation if the lock may be from
201
This will probe the repository for its lock as well.
203
# if the dirstate is locked by an active process, reject the break lock
206
if self._dirstate is None:
210
state = self._current_dirstate()
211
if state._lock_token is not None:
212
# we already have it locked. sheese, cant break our own lock.
213
raise errors.LockActive(self.basedir)
216
# try for a write lock - need permission to get one anyhow
219
except errors.LockContention:
220
# oslocks fail when a process is still live: fail.
221
# TODO: get the locked lockdir info and give to the user to
222
# assist in debugging.
223
raise errors.LockActive(self.basedir)
228
self._dirstate = None
229
self._control_files.break_lock()
230
self.branch.break_lock()
232
def _comparison_data(self, entry, path):
233
kind, executable, stat_value = \
234
WorkingTree3._comparison_data(self, entry, path)
235
# it looks like a plain directory, but it's really a reference -- see
237
if (self._repo_supports_tree_reference and
238
kind == 'directory' and
239
self._directory_is_tree_reference(path)):
240
kind = 'tree-reference'
241
return kind, executable, stat_value
244
def commit(self, message=None, revprops=None, *args, **kwargs):
245
# mark the tree as dirty post commit - commit
246
# can change the current versioned list by doing deletes.
247
result = WorkingTree3.commit(self, message, revprops, *args, **kwargs)
248
self._make_dirty(reset_inventory=True)
251
def current_dirstate(self):
252
"""Return the current dirstate object.
254
This is not part of the tree interface and only exposed for ease of
257
:raises errors.NotWriteLocked: when not in a lock.
259
self._must_be_locked()
260
return self._current_dirstate()
262
def _current_dirstate(self):
263
"""Internal function that does not check lock status.
265
This is needed for break_lock which also needs the dirstate.
267
if self._dirstate is not None:
268
return self._dirstate
269
local_path = self.bzrdir.get_workingtree_transport(None
270
).local_abspath('dirstate')
271
self._dirstate = dirstate.DirState.on_file(local_path)
272
return self._dirstate
274
def _directory_is_tree_reference(self, relpath):
275
# as a special case, if a directory contains control files then
276
# it's a tree reference, except that the root of the tree is not
277
return relpath and osutils.isdir(self.abspath(relpath) + u"/.bzr")
278
# TODO: We could ask all the control formats whether they
279
# recognize this directory, but at the moment there's no cheap api
280
# to do that. Since we probably can only nest bzr checkouts and
281
# they always use this name it's ok for now. -- mbp 20060306
283
# FIXME: There is an unhandled case here of a subdirectory
284
# containing .bzr but not a branch; that will probably blow up
285
# when you try to commit it. It might happen if there is a
286
# checkout in a subdirectory. This can be avoided by not adding
289
def filter_unversioned_files(self, paths):
290
"""Filter out paths that are versioned.
292
:return: set of paths.
294
# TODO: make a generic multi-bisect routine roughly that should list
295
# the paths, then process one half at a time recursively, and feed the
296
# results of each bisect in further still
297
paths = sorted(paths)
299
state = self.current_dirstate()
300
# TODO we want a paths_to_dirblocks helper I think
302
dirname, basename = os.path.split(path.encode('utf8'))
303
_, _, _, path_is_versioned = state._get_block_entry_index(
304
dirname, basename, 0)
305
if not path_is_versioned:
310
"""Write all cached data to disk."""
311
if self._control_files._lock_mode != 'w':
312
raise errors.NotWriteLocked(self)
313
self.current_dirstate().save()
314
self._inventory = None
317
@needs_tree_write_lock
318
def _gather_kinds(self, files, kinds):
319
"""See MutableTree._gather_kinds."""
320
for pos, f in enumerate(files):
321
if kinds[pos] is None:
322
kinds[pos] = self._kind(f)
324
def _generate_inventory(self):
325
"""Create and set self.inventory from the dirstate object.
327
This is relatively expensive: we have to walk the entire dirstate.
328
Ideally we would not, and can deprecate this function.
330
#: uncomment to trap on inventory requests.
331
# import pdb;pdb.set_trace()
332
state = self.current_dirstate()
333
state._read_dirblocks_if_needed()
334
root_key, current_entry = self._get_entry(path='')
335
current_id = root_key[2]
336
assert current_entry[0][0] == 'd' # directory
337
inv = Inventory(root_id=current_id)
338
# Turn some things into local variables
339
minikind_to_kind = dirstate.DirState._minikind_to_kind
340
factory = entry_factory
341
utf8_decode = cache_utf8._utf8_decode
343
# we could do this straight out of the dirstate; it might be fast
344
# and should be profiled - RBC 20070216
345
parent_ies = {'' : inv.root}
346
for block in state._dirblocks[1:]: # skip the root
349
parent_ie = parent_ies[dirname]
351
# all the paths in this block are not versioned in this tree
353
for key, entry in block[1]:
354
minikind, link_or_sha1, size, executable, stat = entry[0]
355
if minikind in ('a', 'r'): # absent, relocated
356
# a parent tree only entry
359
name_unicode = utf8_decode(name)[0]
361
kind = minikind_to_kind[minikind]
362
inv_entry = factory[kind](file_id, name_unicode,
365
# This is only needed on win32, where this is the only way
366
# we know the executable bit.
367
inv_entry.executable = executable
368
# not strictly needed: working tree
369
#inv_entry.text_size = size
370
#inv_entry.text_sha1 = sha1
371
elif kind == 'directory':
372
# add this entry to the parent map.
373
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
374
elif kind == 'tree-reference':
375
assert self._repo_supports_tree_reference, \
376
"repository of %r " \
377
"doesn't support tree references " \
378
"required by entry %r" \
380
inv_entry.reference_revision = link_or_sha1 or None
381
elif kind != 'symlink':
382
raise AssertionError("unknown kind %r" % kind)
383
# These checks cost us around 40ms on a 55k entry tree
384
assert file_id not in inv_byid, ('file_id %s already in'
385
' inventory as %s' % (file_id, inv_byid[file_id]))
386
assert name_unicode not in parent_ie.children
387
inv_byid[file_id] = inv_entry
388
parent_ie.children[name_unicode] = inv_entry
389
self._inventory = inv
391
def _get_entry(self, file_id=None, path=None):
392
"""Get the dirstate row for file_id or path.
394
If either file_id or path is supplied, it is used as the key to lookup.
395
If both are supplied, the fastest lookup is used, and an error is
396
raised if they do not both point at the same row.
398
:param file_id: An optional unicode file_id to be looked up.
399
:param path: An optional unicode path to be looked up.
400
:return: The dirstate row tuple for path/file_id, or (None, None)
402
if file_id is None and path is None:
403
raise errors.BzrError('must supply file_id or path')
404
state = self.current_dirstate()
406
path = path.encode('utf8')
407
return state._get_entry(0, fileid_utf8=file_id, path_utf8=path)
409
def get_file_sha1(self, file_id, path=None, stat_value=None):
410
# check file id is valid unconditionally.
411
entry = self._get_entry(file_id=file_id, path=path)
413
raise errors.NoSuchId(self, file_id)
415
path = pathjoin(entry[0][0], entry[0][1]).decode('utf8')
417
file_abspath = self.abspath(path)
418
state = self.current_dirstate()
419
if stat_value is None:
421
stat_value = os.lstat(file_abspath)
423
if e.errno == errno.ENOENT:
427
link_or_sha1 = state.update_entry(entry, file_abspath,
428
stat_value=stat_value)
429
if entry[1][0][0] == 'f':
433
def _get_inventory(self):
434
"""Get the inventory for the tree. This is only valid within a lock."""
435
if 'evil' in debug.debug_flags:
436
trace.mutter_callsite(2,
437
"accessing .inventory forces a size of tree translation.")
438
if self._inventory is not None:
439
return self._inventory
440
self._must_be_locked()
441
self._generate_inventory()
442
return self._inventory
444
inventory = property(_get_inventory,
445
doc="Inventory of this Tree")
448
def get_parent_ids(self):
449
"""See Tree.get_parent_ids.
451
This implementation requests the ids list from the dirstate file.
453
return self.current_dirstate().get_parent_ids()
455
def get_reference_revision(self, file_id, path=None):
456
# referenced tree's revision is whatever's currently there
457
return self.get_nested_tree(file_id, path).last_revision()
459
def get_nested_tree(self, file_id, path=None):
461
path = self.id2path(file_id)
462
# else: check file_id is at path?
463
return WorkingTree.open(self.abspath(path))
466
def get_root_id(self):
467
"""Return the id of this trees root"""
468
return self._get_entry(path='')[0][2]
470
def has_id(self, file_id):
471
state = self.current_dirstate()
472
file_id = osutils.safe_file_id(file_id)
473
row, parents = self._get_entry(file_id=file_id)
476
return osutils.lexists(pathjoin(
477
self.basedir, row[0].decode('utf8'), row[1].decode('utf8')))
480
def id2path(self, file_id):
481
"Convert a file-id to a path."
482
file_id = osutils.safe_file_id(file_id)
483
state = self.current_dirstate()
484
entry = self._get_entry(file_id=file_id)
485
if entry == (None, None):
486
raise errors.NoSuchId(tree=self, file_id=file_id)
487
path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
488
return path_utf8.decode('utf8')
490
if not osutils.supports_executable():
491
def is_executable(self, file_id, path=None):
492
"""Test if a file is executable or not.
494
Note: The caller is expected to take a read-lock before calling this.
496
file_id = osutils.safe_file_id(file_id)
497
entry = self._get_entry(file_id=file_id, path=path)
498
if entry == (None, None):
500
return entry[1][0][3]
502
def is_executable(self, file_id, path=None):
503
"""Test if a file is executable or not.
505
Note: The caller is expected to take a read-lock before calling this.
508
file_id = osutils.safe_file_id(file_id)
509
path = self.id2path(file_id)
510
mode = os.lstat(self.abspath(path)).st_mode
511
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
515
"""Iterate through file_ids for this tree.
517
file_ids are in a WorkingTree if they are in the working inventory
518
and the working file exists.
521
for key, tree_details in self.current_dirstate()._iter_entries():
522
if tree_details[0][0] in ('a', 'r'): # absent, relocated
523
# not relevant to the working tree
525
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
526
if osutils.lexists(path):
527
result.append(key[2])
530
def iter_references(self):
531
for key, tree_details in self.current_dirstate()._iter_entries():
532
if tree_details[0][0] in ('a', 'r'): # absent, relocated
533
# not relevant to the working tree
536
# the root is not a reference.
538
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
540
if self._kind(path) == 'tree-reference':
542
except errors.NoSuchFile:
543
# path is missing on disk.
546
def kind(self, file_id):
547
"""Return the kind of a file.
549
This is always the actual kind that's on disk, regardless of what it
552
Note: The caller is expected to take a read-lock before calling this.
554
relpath = self.id2path(file_id)
555
assert relpath != None, \
556
"path for id {%s} is None!" % file_id
557
return self._kind(relpath)
559
def _kind(self, relpath):
560
abspath = self.abspath(relpath)
561
kind = file_kind(abspath)
562
if (self._repo_supports_tree_reference and
563
kind == 'directory' and
564
self._directory_is_tree_reference(relpath)):
565
kind = 'tree-reference'
569
def _last_revision(self):
570
"""See Mutable.last_revision."""
571
parent_ids = self.current_dirstate().get_parent_ids()
575
return _mod_revision.NULL_REVISION
578
"""See Branch.lock_read, and WorkingTree.unlock."""
579
self.branch.lock_read()
581
self._control_files.lock_read()
583
state = self.current_dirstate()
584
if not state._lock_token:
586
# set our support for tree references from the repository in
588
self._repo_supports_tree_reference = getattr(
589
self.branch.repository._format, "supports_tree_reference",
592
self._control_files.unlock()
598
def _lock_self_write(self):
599
"""This should be called after the branch is locked."""
601
self._control_files.lock_write()
603
state = self.current_dirstate()
604
if not state._lock_token:
606
# set our support for tree references from the repository in
608
self._repo_supports_tree_reference = getattr(
609
self.branch.repository._format, "supports_tree_reference",
612
self._control_files.unlock()
618
def lock_tree_write(self):
619
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
620
self.branch.lock_read()
621
self._lock_self_write()
623
def lock_write(self):
624
"""See MutableTree.lock_write, and WorkingTree.unlock."""
625
self.branch.lock_write()
626
self._lock_self_write()
628
@needs_tree_write_lock
629
def move(self, from_paths, to_dir, after=False):
630
"""See WorkingTree.move()."""
635
state = self.current_dirstate()
637
assert not isinstance(from_paths, basestring)
638
to_dir_utf8 = to_dir.encode('utf8')
639
to_entry_dirname, to_basename = os.path.split(to_dir_utf8)
640
id_index = state._get_id_index()
641
# check destination directory
642
# get the details for it
643
to_entry_block_index, to_entry_entry_index, dir_present, entry_present = \
644
state._get_block_entry_index(to_entry_dirname, to_basename, 0)
645
if not entry_present:
646
raise errors.BzrMoveFailedError('', to_dir,
647
errors.NotVersionedError(to_dir))
648
to_entry = state._dirblocks[to_entry_block_index][1][to_entry_entry_index]
649
# get a handle on the block itself.
650
to_block_index = state._ensure_block(
651
to_entry_block_index, to_entry_entry_index, to_dir_utf8)
652
to_block = state._dirblocks[to_block_index]
653
to_abs = self.abspath(to_dir)
654
if not isdir(to_abs):
655
raise errors.BzrMoveFailedError('',to_dir,
656
errors.NotADirectory(to_abs))
658
if to_entry[1][0][0] != 'd':
659
raise errors.BzrMoveFailedError('',to_dir,
660
errors.NotADirectory(to_abs))
662
if self._inventory is not None:
663
update_inventory = True
665
to_dir_ie = inv[to_dir_id]
666
to_dir_id = to_entry[0][2]
668
update_inventory = False
671
def move_one(old_entry, from_path_utf8, minikind, executable,
672
fingerprint, packed_stat, size,
673
to_block, to_key, to_path_utf8):
674
state._make_absent(old_entry)
675
from_key = old_entry[0]
677
lambda:state.update_minimal(from_key,
679
executable=executable,
680
fingerprint=fingerprint,
681
packed_stat=packed_stat,
683
path_utf8=from_path_utf8))
684
state.update_minimal(to_key,
686
executable=executable,
687
fingerprint=fingerprint,
688
packed_stat=packed_stat,
690
path_utf8=to_path_utf8)
691
added_entry_index, _ = state._find_entry_index(to_key, to_block[1])
692
new_entry = to_block[1][added_entry_index]
693
rollbacks.append(lambda:state._make_absent(new_entry))
695
for from_rel in from_paths:
696
# from_rel is 'pathinroot/foo/bar'
697
from_rel_utf8 = from_rel.encode('utf8')
698
from_dirname, from_tail = osutils.split(from_rel)
699
from_dirname, from_tail_utf8 = osutils.split(from_rel_utf8)
700
from_entry = self._get_entry(path=from_rel)
701
if from_entry == (None, None):
702
raise errors.BzrMoveFailedError(from_rel,to_dir,
703
errors.NotVersionedError(path=str(from_rel)))
705
from_id = from_entry[0][2]
706
to_rel = pathjoin(to_dir, from_tail)
707
to_rel_utf8 = pathjoin(to_dir_utf8, from_tail_utf8)
708
item_to_entry = self._get_entry(path=to_rel)
709
if item_to_entry != (None, None):
710
raise errors.BzrMoveFailedError(from_rel, to_rel,
711
"Target is already versioned.")
713
if from_rel == to_rel:
714
raise errors.BzrMoveFailedError(from_rel, to_rel,
715
"Source and target are identical.")
717
from_missing = not self.has_filename(from_rel)
718
to_missing = not self.has_filename(to_rel)
725
raise errors.BzrMoveFailedError(from_rel, to_rel,
726
errors.NoSuchFile(path=to_rel,
727
extra="New file has not been created yet"))
729
# neither path exists
730
raise errors.BzrRenameFailedError(from_rel, to_rel,
731
errors.PathsDoNotExist(paths=(from_rel, to_rel)))
733
if from_missing: # implicitly just update our path mapping
736
raise errors.RenameFailedFilesExist(from_rel, to_rel,
737
extra="(Use --after to update the Bazaar id)")
740
def rollback_rename():
741
"""A single rename has failed, roll it back."""
742
# roll back everything, even if we encounter trouble doing one
745
# TODO: at least log the other exceptions rather than just
746
# losing them mbp 20070307
748
for rollback in reversed(rollbacks):
752
exc_info = sys.exc_info()
754
raise exc_info[0], exc_info[1], exc_info[2]
756
# perform the disk move first - its the most likely failure point.
758
from_rel_abs = self.abspath(from_rel)
759
to_rel_abs = self.abspath(to_rel)
761
osutils.rename(from_rel_abs, to_rel_abs)
763
raise errors.BzrMoveFailedError(from_rel, to_rel, e[1])
764
rollbacks.append(lambda: osutils.rename(to_rel_abs, from_rel_abs))
766
# perform the rename in the inventory next if needed: its easy
770
from_entry = inv[from_id]
771
current_parent = from_entry.parent_id
772
inv.rename(from_id, to_dir_id, from_tail)
774
lambda: inv.rename(from_id, current_parent, from_tail))
775
# finally do the rename in the dirstate, which is a little
776
# tricky to rollback, but least likely to need it.
777
old_block_index, old_entry_index, dir_present, file_present = \
778
state._get_block_entry_index(from_dirname, from_tail_utf8, 0)
779
old_block = state._dirblocks[old_block_index][1]
780
old_entry = old_block[old_entry_index]
781
from_key, old_entry_details = old_entry
782
cur_details = old_entry_details[0]
784
to_key = ((to_block[0],) + from_key[1:3])
785
minikind = cur_details[0]
786
move_one(old_entry, from_path_utf8=from_rel_utf8,
788
executable=cur_details[3],
789
fingerprint=cur_details[1],
790
packed_stat=cur_details[4],
794
to_path_utf8=to_rel_utf8)
797
def update_dirblock(from_dir, to_key, to_dir_utf8):
798
"""Recursively update all entries in this dirblock."""
799
assert from_dir != '', "renaming root not supported"
800
from_key = (from_dir, '')
801
from_block_idx, present = \
802
state._find_block_index_from_key(from_key)
804
# This is the old record, if it isn't present, then
805
# there is theoretically nothing to update.
806
# (Unless it isn't present because of lazy loading,
807
# but we don't do that yet)
809
from_block = state._dirblocks[from_block_idx]
810
to_block_index, to_entry_index, _, _ = \
811
state._get_block_entry_index(to_key[0], to_key[1], 0)
812
to_block_index = state._ensure_block(
813
to_block_index, to_entry_index, to_dir_utf8)
814
to_block = state._dirblocks[to_block_index]
816
# Grab a copy since move_one may update the list.
817
for entry in from_block[1][:]:
818
assert entry[0][0] == from_dir
819
cur_details = entry[1][0]
820
to_key = (to_dir_utf8, entry[0][1], entry[0][2])
821
from_path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
822
to_path_utf8 = osutils.pathjoin(to_dir_utf8, entry[0][1])
823
minikind = cur_details[0]
825
# Deleted children of a renamed directory
826
# Do not need to be updated.
827
# Children that have been renamed out of this
828
# directory should also not be updated
830
move_one(entry, from_path_utf8=from_path_utf8,
832
executable=cur_details[3],
833
fingerprint=cur_details[1],
834
packed_stat=cur_details[4],
838
to_path_utf8=to_path_utf8)
840
# We need to move all the children of this
842
update_dirblock(from_path_utf8, to_key,
844
update_dirblock(from_rel_utf8, to_key, to_rel_utf8)
848
result.append((from_rel, to_rel))
849
state._dirblock_state = dirstate.DirState.IN_MEMORY_MODIFIED
850
self._make_dirty(reset_inventory=False)
854
def _must_be_locked(self):
855
if not self._control_files._lock_count:
856
raise errors.ObjectNotLocked(self)
859
"""Initialize the state in this tree to be a new tree."""
863
def path2id(self, path):
864
"""Return the id for path in this tree."""
865
path = path.strip('/')
866
entry = self._get_entry(path=path)
867
if entry == (None, None):
871
def paths2ids(self, paths, trees=[], require_versioned=True):
872
"""See Tree.paths2ids().
874
This specialisation fast-paths the case where all the trees are in the
879
parents = self.get_parent_ids()
881
if not (isinstance(tree, DirStateRevisionTree) and tree._revision_id in
883
return super(WorkingTree4, self).paths2ids(paths, trees, require_versioned)
884
search_indexes = [0] + [1 + parents.index(tree._revision_id) for tree in trees]
885
# -- make all paths utf8 --
888
paths_utf8.add(path.encode('utf8'))
890
# -- paths is now a utf8 path set --
891
# -- get the state object and prepare it.
892
state = self.current_dirstate()
893
if False and (state._dirblock_state == dirstate.DirState.NOT_IN_MEMORY
894
and '' not in paths):
895
paths2ids = self._paths2ids_using_bisect
897
paths2ids = self._paths2ids_in_memory
898
return paths2ids(paths, search_indexes,
899
require_versioned=require_versioned)
901
def _paths2ids_in_memory(self, paths, search_indexes,
902
require_versioned=True):
903
state = self.current_dirstate()
904
state._read_dirblocks_if_needed()
905
def _entries_for_path(path):
906
"""Return a list with all the entries that match path for all ids.
908
dirname, basename = os.path.split(path)
909
key = (dirname, basename, '')
910
block_index, present = state._find_block_index_from_key(key)
912
# the block which should contain path is absent.
915
block = state._dirblocks[block_index][1]
916
entry_index, _ = state._find_entry_index(key, block)
917
# we may need to look at multiple entries at this path: walk while the paths match.
918
while (entry_index < len(block) and
919
block[entry_index][0][0:2] == key[0:2]):
920
result.append(block[entry_index])
923
if require_versioned:
924
# -- check all supplied paths are versioned in a search tree. --
927
path_entries = _entries_for_path(path)
929
# this specified path is not present at all: error
930
all_versioned = False
932
found_versioned = False
933
# for each id at this path
934
for entry in path_entries:
936
for index in search_indexes:
937
if entry[1][index][0] != 'a': # absent
938
found_versioned = True
939
# all good: found a versioned cell
941
if not found_versioned:
942
# none of the indexes was not 'absent' at all ids for this
944
all_versioned = False
946
if not all_versioned:
947
raise errors.PathsNotVersionedError(paths)
948
# -- remove redundancy in supplied paths to prevent over-scanning --
949
search_paths = osutils.minimum_path_selection(paths)
951
# for all search_indexs in each path at or under each element of
952
# search_paths, if the detail is relocated: add the id, and add the
953
# relocated path as one to search if its not searched already. If the
954
# detail is not relocated, add the id.
955
searched_paths = set()
957
def _process_entry(entry):
958
"""Look at search_indexes within entry.
960
If a specific tree's details are relocated, add the relocation
961
target to search_paths if not searched already. If it is absent, do
962
nothing. Otherwise add the id to found_ids.
964
for index in search_indexes:
965
if entry[1][index][0] == 'r': # relocated
966
if not osutils.is_inside_any(searched_paths, entry[1][index][1]):
967
search_paths.add(entry[1][index][1])
968
elif entry[1][index][0] != 'a': # absent
969
found_ids.add(entry[0][2])
971
current_root = search_paths.pop()
972
searched_paths.add(current_root)
973
# process the entries for this containing directory: the rest will be
974
# found by their parents recursively.
975
root_entries = _entries_for_path(current_root)
977
# this specified path is not present at all, skip it.
979
for entry in root_entries:
980
_process_entry(entry)
981
initial_key = (current_root, '', '')
982
block_index, _ = state._find_block_index_from_key(initial_key)
983
while (block_index < len(state._dirblocks) and
984
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
985
for entry in state._dirblocks[block_index][1]:
986
_process_entry(entry)
990
def _paths2ids_using_bisect(self, paths, search_indexes,
991
require_versioned=True):
992
state = self.current_dirstate()
995
split_paths = sorted(osutils.split(p) for p in paths)
996
found = state._bisect_recursive(split_paths)
998
if require_versioned:
999
found_dir_names = set(dir_name_id[:2] for dir_name_id in found)
1000
for dir_name in split_paths:
1001
if dir_name not in found_dir_names:
1002
raise errors.PathsNotVersionedError(paths)
1004
for dir_name_id, trees_info in found.iteritems():
1005
for index in search_indexes:
1006
if trees_info[index][0] not in ('r', 'a'):
1007
found_ids.add(dir_name_id[2])
1010
def read_working_inventory(self):
1011
"""Read the working inventory.
1013
This is a meaningless operation for dirstate, but we obey it anyhow.
1015
return self.inventory
1018
def revision_tree(self, revision_id):
1019
"""See Tree.revision_tree.
1021
WorkingTree4 supplies revision_trees for any basis tree.
1023
revision_id = osutils.safe_revision_id(revision_id)
1024
dirstate = self.current_dirstate()
1025
parent_ids = dirstate.get_parent_ids()
1026
if revision_id not in parent_ids:
1027
raise errors.NoSuchRevisionInTree(self, revision_id)
1028
if revision_id in dirstate.get_ghosts():
1029
raise errors.NoSuchRevisionInTree(self, revision_id)
1030
return DirStateRevisionTree(dirstate, revision_id,
1031
self.branch.repository)
1033
@needs_tree_write_lock
1034
def set_last_revision(self, new_revision):
1035
"""Change the last revision in the working tree."""
1036
new_revision = osutils.safe_revision_id(new_revision)
1037
parents = self.get_parent_ids()
1038
if new_revision in (NULL_REVISION, None):
1039
assert len(parents) < 2, (
1040
"setting the last parent to none with a pending merge is "
1042
self.set_parent_ids([])
1044
self.set_parent_ids([new_revision] + parents[1:],
1045
allow_leftmost_as_ghost=True)
1047
@needs_tree_write_lock
1048
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
1049
"""Set the parent ids to revision_ids.
1051
See also set_parent_trees. This api will try to retrieve the tree data
1052
for each element of revision_ids from the trees repository. If you have
1053
tree data already available, it is more efficient to use
1054
set_parent_trees rather than set_parent_ids. set_parent_ids is however
1055
an easier API to use.
1057
:param revision_ids: The revision_ids to set as the parent ids of this
1058
working tree. Any of these may be ghosts.
1060
revision_ids = [osutils.safe_revision_id(r) for r in revision_ids]
1062
for revision_id in revision_ids:
1064
revtree = self.branch.repository.revision_tree(revision_id)
1065
# TODO: jam 20070213 KnitVersionedFile raises
1066
# RevisionNotPresent rather than NoSuchRevision if a
1067
# given revision_id is not present. Should Repository be
1068
# catching it and re-raising NoSuchRevision?
1069
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1071
trees.append((revision_id, revtree))
1072
self.set_parent_trees(trees,
1073
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
1075
@needs_tree_write_lock
1076
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
1077
"""Set the parents of the working tree.
1079
:param parents_list: A list of (revision_id, tree) tuples.
1080
If tree is None, then that element is treated as an unreachable
1081
parent tree - i.e. a ghost.
1083
dirstate = self.current_dirstate()
1084
if len(parents_list) > 0:
1085
if not allow_leftmost_as_ghost and parents_list[0][1] is None:
1086
raise errors.GhostRevisionUnusableHere(parents_list[0][0])
1089
# convert absent trees to the null tree, which we convert back to
1090
# missing on access.
1091
for rev_id, tree in parents_list:
1092
rev_id = osutils.safe_revision_id(rev_id)
1093
_mod_revision.check_not_reserved_id(rev_id)
1094
if tree is not None:
1095
real_trees.append((rev_id, tree))
1097
real_trees.append((rev_id,
1098
self.branch.repository.revision_tree(None)))
1099
ghosts.append(rev_id)
1100
dirstate.set_parent_trees(real_trees, ghosts=ghosts)
1101
self._make_dirty(reset_inventory=False)
1103
def _set_root_id(self, file_id):
1104
"""See WorkingTree.set_root_id."""
1105
state = self.current_dirstate()
1106
state.set_path_id('', file_id)
1107
if state._dirblock_state == dirstate.DirState.IN_MEMORY_MODIFIED:
1108
self._make_dirty(reset_inventory=True)
1111
def supports_tree_reference(self):
1112
return self._repo_supports_tree_reference
1115
"""Unlock in format 4 trees needs to write the entire dirstate."""
1116
# do non-implementation specific cleanup
1119
if self._control_files._lock_count == 1:
1120
# eventually we should do signature checking during read locks for
1122
if self._control_files._lock_mode == 'w':
1125
if self._dirstate is not None:
1126
# This is a no-op if there are no modifications.
1127
self._dirstate.save()
1128
self._dirstate.unlock()
1129
# TODO: jam 20070301 We shouldn't have to wipe the dirstate at this
1130
# point. Instead, it could check if the header has been
1131
# modified when it is locked, and if not, it can hang on to
1132
# the data it has in memory.
1133
self._dirstate = None
1134
self._inventory = None
1135
# reverse order of locking.
1137
return self._control_files.unlock()
1139
self.branch.unlock()
1141
@needs_tree_write_lock
1142
def unversion(self, file_ids):
1143
"""Remove the file ids in file_ids from the current versioned set.
1145
When a file_id is unversioned, all of its children are automatically
1148
:param file_ids: The file ids to stop versioning.
1149
:raises: NoSuchId if any fileid is not currently versioned.
1153
state = self.current_dirstate()
1154
state._read_dirblocks_if_needed()
1155
ids_to_unversion = set()
1156
for file_id in file_ids:
1157
ids_to_unversion.add(osutils.safe_file_id(file_id))
1158
paths_to_unversion = set()
1160
# check if the root is to be unversioned, if so, assert for now.
1161
# walk the state marking unversioned things as absent.
1162
# if there are any un-unversioned ids at the end, raise
1163
for key, details in state._dirblocks[0][1]:
1164
if (details[0][0] not in ('a', 'r') and # absent or relocated
1165
key[2] in ids_to_unversion):
1166
# I haven't written the code to unversion / yet - it should be
1168
raise errors.BzrError('Unversioning the / is not currently supported')
1170
while block_index < len(state._dirblocks):
1171
# process one directory at a time.
1172
block = state._dirblocks[block_index]
1173
# first check: is the path one to remove - it or its children
1174
delete_block = False
1175
for path in paths_to_unversion:
1176
if (block[0].startswith(path) and
1177
(len(block[0]) == len(path) or
1178
block[0][len(path)] == '/')):
1179
# this entire block should be deleted - its the block for a
1180
# path to unversion; or the child of one
1183
# TODO: trim paths_to_unversion as we pass by paths
1185
# this block is to be deleted: process it.
1186
# TODO: we can special case the no-parents case and
1187
# just forget the whole block.
1189
while entry_index < len(block[1]):
1190
# Mark this file id as having been removed
1191
entry = block[1][entry_index]
1192
ids_to_unversion.discard(entry[0][2])
1193
if (entry[1][0][0] == 'a'
1194
or not state._make_absent(entry)):
1196
# go to the next block. (At the moment we dont delete empty
1201
while entry_index < len(block[1]):
1202
entry = block[1][entry_index]
1203
if (entry[1][0][0] in ('a', 'r') or # absent, relocated
1204
# ^ some parent row.
1205
entry[0][2] not in ids_to_unversion):
1206
# ^ not an id to unversion
1209
if entry[1][0][0] == 'd':
1210
paths_to_unversion.add(pathjoin(entry[0][0], entry[0][1]))
1211
if not state._make_absent(entry):
1213
# we have unversioned this id
1214
ids_to_unversion.remove(entry[0][2])
1216
if ids_to_unversion:
1217
raise errors.NoSuchId(self, iter(ids_to_unversion).next())
1218
self._make_dirty(reset_inventory=False)
1219
# have to change the legacy inventory too.
1220
if self._inventory is not None:
1221
for file_id in file_ids:
1222
self._inventory.remove_recursive_id(file_id)
1225
def _validate(self):
1226
self._dirstate._validate()
1228
@needs_tree_write_lock
1229
def _write_inventory(self, inv):
1230
"""Write inventory as the current inventory."""
1231
assert not self._dirty, "attempting to write an inventory when the dirstate is dirty will cause data loss"
1232
self.current_dirstate().set_state_from_inventory(inv)
1233
self._make_dirty(reset_inventory=False)
1234
if self._inventory is not None:
1235
self._inventory = inv
1239
class WorkingTreeFormat4(WorkingTreeFormat3):
1240
"""The first consolidated dirstate working tree format.
1243
- exists within a metadir controlling .bzr
1244
- includes an explicit version marker for the workingtree control
1245
files, separate from the BzrDir format
1246
- modifies the hash cache format
1247
- is new in bzr 0.15
1248
- uses a LockDir to guard access to it.
1251
upgrade_recommended = False
1253
def get_format_string(self):
1254
"""See WorkingTreeFormat.get_format_string()."""
1255
return "Bazaar Working Tree Format 4 (bzr 0.15)\n"
1257
def get_format_description(self):
1258
"""See WorkingTreeFormat.get_format_description()."""
1259
return "Working tree format 4"
1261
def initialize(self, a_bzrdir, revision_id=None):
1262
"""See WorkingTreeFormat.initialize().
1264
:param revision_id: allows creating a working tree at a different
1265
revision than the branch is at.
1267
These trees get an initial random root id, if their repository supports
1268
rich root data, TREE_ROOT otherwise.
1270
revision_id = osutils.safe_revision_id(revision_id)
1271
if not isinstance(a_bzrdir.transport, LocalTransport):
1272
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1273
transport = a_bzrdir.get_workingtree_transport(self)
1274
control_files = self._open_control_files(a_bzrdir)
1275
control_files.create_lock()
1276
control_files.lock_write()
1277
control_files.put_utf8('format', self.get_format_string())
1278
branch = a_bzrdir.open_branch()
1279
if revision_id is None:
1280
revision_id = branch.last_revision()
1281
local_path = transport.local_abspath('dirstate')
1282
# write out new dirstate (must exist when we create the tree)
1283
state = dirstate.DirState.initialize(local_path)
1286
wt = WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1290
_control_files=control_files)
1292
wt.lock_tree_write()
1294
if revision_id in (None, NULL_REVISION):
1295
if branch.repository.supports_rich_root():
1296
wt._set_root_id(generate_ids.gen_root_id())
1298
wt._set_root_id(ROOT_ID)
1300
wt.set_last_revision(revision_id)
1302
basis = wt.basis_tree()
1304
# if the basis has a root id we have to use that; otherwise we use
1306
basis_root_id = basis.get_root_id()
1307
if basis_root_id is not None:
1308
wt._set_root_id(basis_root_id)
1310
transform.build_tree(basis, wt)
1313
control_files.unlock()
1317
def _open(self, a_bzrdir, control_files):
1318
"""Open the tree itself.
1320
:param a_bzrdir: the dir for the tree.
1321
:param control_files: the control files for the tree.
1323
return WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1324
branch=a_bzrdir.open_branch(),
1327
_control_files=control_files)
1329
def __get_matchingbzrdir(self):
1330
# please test against something that will let us do tree references
1331
return bzrdir.format_registry.make_bzrdir(
1332
'dirstate-with-subtree')
1334
_matchingbzrdir = property(__get_matchingbzrdir)
1337
class DirStateRevisionTree(Tree):
1338
"""A revision tree pulling the inventory from a dirstate."""
1340
def __init__(self, dirstate, revision_id, repository):
1341
self._dirstate = dirstate
1342
self._revision_id = osutils.safe_revision_id(revision_id)
1343
self._repository = repository
1344
self._inventory = None
1346
self._dirstate_locked = False
1349
return "<%s of %s in %s>" % \
1350
(self.__class__.__name__, self._revision_id, self._dirstate)
1352
def annotate_iter(self, file_id,
1353
default_revision=_mod_revision.CURRENT_REVISION):
1354
"""See Tree.annotate_iter"""
1355
w = self._get_weave(file_id)
1356
return w.annotate_iter(self.inventory[file_id].revision)
1358
def _get_ancestors(self, default_revision):
1359
return set(self._repository.get_ancestry(self._revision_id,
1361
def _comparison_data(self, entry, path):
1362
"""See Tree._comparison_data."""
1364
return None, False, None
1365
# trust the entry as RevisionTree does, but this may not be
1366
# sensible: the entry might not have come from us?
1367
return entry.kind, entry.executable, None
1369
def _file_size(self, entry, stat_value):
1370
return entry.text_size
1372
def filter_unversioned_files(self, paths):
1373
"""Filter out paths that are not versioned.
1375
:return: set of paths.
1377
pred = self.has_filename
1378
return set((p for p in paths if not pred(p)))
1380
def get_root_id(self):
1381
return self.path2id('')
1383
def _get_parent_index(self):
1384
"""Return the index in the dirstate referenced by this tree."""
1385
return self._dirstate.get_parent_ids().index(self._revision_id) + 1
1387
def _get_entry(self, file_id=None, path=None):
1388
"""Get the dirstate row for file_id or path.
1390
If either file_id or path is supplied, it is used as the key to lookup.
1391
If both are supplied, the fastest lookup is used, and an error is
1392
raised if they do not both point at the same row.
1394
:param file_id: An optional unicode file_id to be looked up.
1395
:param path: An optional unicode path to be looked up.
1396
:return: The dirstate row tuple for path/file_id, or (None, None)
1398
if file_id is None and path is None:
1399
raise errors.BzrError('must supply file_id or path')
1400
file_id = osutils.safe_file_id(file_id)
1401
if path is not None:
1402
path = path.encode('utf8')
1403
parent_index = self._get_parent_index()
1404
return self._dirstate._get_entry(parent_index, fileid_utf8=file_id, path_utf8=path)
1406
def _generate_inventory(self):
1407
"""Create and set self.inventory from the dirstate object.
1409
(So this is only called the first time the inventory is requested for
1410
this tree; it then remains in memory until it's out of date.)
1412
This is relatively expensive: we have to walk the entire dirstate.
1414
assert self._locked, 'cannot generate inventory of an unlocked '\
1415
'dirstate revision tree'
1416
# separate call for profiling - makes it clear where the costs are.
1417
self._dirstate._read_dirblocks_if_needed()
1418
assert self._revision_id in self._dirstate.get_parent_ids(), \
1419
'parent %s has disappeared from %s' % (
1420
self._revision_id, self._dirstate.get_parent_ids())
1421
parent_index = self._dirstate.get_parent_ids().index(self._revision_id) + 1
1422
# This is identical now to the WorkingTree _generate_inventory except
1423
# for the tree index use.
1424
root_key, current_entry = self._dirstate._get_entry(parent_index, path_utf8='')
1425
current_id = root_key[2]
1426
assert current_entry[parent_index][0] == 'd'
1427
inv = Inventory(root_id=current_id, revision_id=self._revision_id)
1428
inv.root.revision = current_entry[parent_index][4]
1429
# Turn some things into local variables
1430
minikind_to_kind = dirstate.DirState._minikind_to_kind
1431
factory = entry_factory
1432
utf8_decode = cache_utf8._utf8_decode
1433
inv_byid = inv._byid
1434
# we could do this straight out of the dirstate; it might be fast
1435
# and should be profiled - RBC 20070216
1436
parent_ies = {'' : inv.root}
1437
for block in self._dirstate._dirblocks[1:]: #skip root
1440
parent_ie = parent_ies[dirname]
1442
# all the paths in this block are not versioned in this tree
1444
for key, entry in block[1]:
1445
minikind, fingerprint, size, executable, revid = entry[parent_index]
1446
if minikind in ('a', 'r'): # absent, relocated
1450
name_unicode = utf8_decode(name)[0]
1452
kind = minikind_to_kind[minikind]
1453
inv_entry = factory[kind](file_id, name_unicode,
1455
inv_entry.revision = revid
1457
inv_entry.executable = executable
1458
inv_entry.text_size = size
1459
inv_entry.text_sha1 = fingerprint
1460
elif kind == 'directory':
1461
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
1462
elif kind == 'symlink':
1463
inv_entry.executable = False
1464
inv_entry.text_size = None
1465
inv_entry.symlink_target = utf8_decode(fingerprint)[0]
1466
elif kind == 'tree-reference':
1467
inv_entry.reference_revision = fingerprint or None
1469
raise AssertionError("cannot convert entry %r into an InventoryEntry"
1471
# These checks cost us around 40ms on a 55k entry tree
1472
assert file_id not in inv_byid
1473
assert name_unicode not in parent_ie.children
1474
inv_byid[file_id] = inv_entry
1475
parent_ie.children[name_unicode] = inv_entry
1476
self._inventory = inv
1478
def get_file_mtime(self, file_id, path=None):
1479
"""Return the modification time for this record.
1481
We return the timestamp of the last-changed revision.
1483
# Make sure the file exists
1484
entry = self._get_entry(file_id, path=path)
1485
if entry == (None, None): # do we raise?
1487
parent_index = self._get_parent_index()
1488
last_changed_revision = entry[1][parent_index][4]
1489
return self._repository.get_revision(last_changed_revision).timestamp
1491
def get_file_sha1(self, file_id, path=None, stat_value=None):
1492
entry = self._get_entry(file_id=file_id, path=path)
1493
parent_index = self._get_parent_index()
1494
parent_details = entry[1][parent_index]
1495
if parent_details[0] == 'f':
1496
return parent_details[1]
1499
@symbol_versioning.deprecated_method(symbol_versioning.zero_ninety)
1500
def get_weave(self, file_id):
1501
return self._get_weave(file_id)
1503
def _get_weave(self, file_id):
1504
return self._repository.weave_store.get_weave(file_id,
1505
self._repository.get_transaction())
1507
def get_file(self, file_id, path=None):
1508
return StringIO(self.get_file_text(file_id))
1510
def get_file_lines(self, file_id):
1511
ie = self.inventory[file_id]
1512
return self._get_weave(file_id).get_lines(ie.revision)
1514
def get_file_size(self, file_id):
1515
return self.inventory[file_id].text_size
1517
def get_file_text(self, file_id):
1518
return ''.join(self.get_file_lines(file_id))
1520
def get_reference_revision(self, file_id, path=None):
1521
return self.inventory[file_id].reference_revision
1523
def iter_files_bytes(self, desired_files):
1524
"""See Tree.iter_files_bytes.
1526
This version is implemented on top of Repository.iter_files_bytes"""
1527
parent_index = self._get_parent_index()
1528
repo_desired_files = []
1529
for file_id, identifier in desired_files:
1530
entry = self._get_entry(file_id)
1531
if entry == (None, None):
1532
raise errors.NoSuchId(self, file_id)
1533
repo_desired_files.append((file_id, entry[1][parent_index][4],
1535
return self._repository.iter_files_bytes(repo_desired_files)
1537
def get_symlink_target(self, file_id):
1538
entry = self._get_entry(file_id=file_id)
1539
parent_index = self._get_parent_index()
1540
if entry[1][parent_index][0] != 'l':
1543
# At present, none of the tree implementations supports non-ascii
1544
# symlink targets. So we will just assume that the dirstate path is
1546
return entry[1][parent_index][1]
1548
def get_revision_id(self):
1549
"""Return the revision id for this tree."""
1550
return self._revision_id
1552
def _get_inventory(self):
1553
if self._inventory is not None:
1554
return self._inventory
1555
self._must_be_locked()
1556
self._generate_inventory()
1557
return self._inventory
1559
inventory = property(_get_inventory,
1560
doc="Inventory of this Tree")
1562
def get_parent_ids(self):
1563
"""The parents of a tree in the dirstate are not cached."""
1564
return self._repository.get_revision(self._revision_id).parent_ids
1566
def has_filename(self, filename):
1567
return bool(self.path2id(filename))
1569
def kind(self, file_id):
1570
return self.inventory[file_id].kind
1572
def is_executable(self, file_id, path=None):
1573
ie = self.inventory[file_id]
1574
if ie.kind != "file":
1576
return ie.executable
1578
def list_files(self, include_root=False):
1579
# We use a standard implementation, because DirStateRevisionTree is
1580
# dealing with one of the parents of the current state
1581
inv = self._get_inventory()
1582
entries = inv.iter_entries()
1583
if self.inventory.root is not None and not include_root:
1585
for path, entry in entries:
1586
yield path, 'V', entry.kind, entry.file_id, entry
1588
def lock_read(self):
1589
"""Lock the tree for a set of operations."""
1590
if not self._locked:
1591
self._repository.lock_read()
1592
if self._dirstate._lock_token is None:
1593
self._dirstate.lock_read()
1594
self._dirstate_locked = True
1597
def _must_be_locked(self):
1598
if not self._locked:
1599
raise errors.ObjectNotLocked(self)
1602
def path2id(self, path):
1603
"""Return the id for path in this tree."""
1604
# lookup by path: faster than splitting and walking the ivnentory.
1605
entry = self._get_entry(path=path)
1606
if entry == (None, None):
1611
"""Unlock, freeing any cache memory used during the lock."""
1612
# outside of a lock, the inventory is suspect: release it.
1614
if not self._locked:
1615
self._inventory = None
1617
if self._dirstate_locked:
1618
self._dirstate.unlock()
1619
self._dirstate_locked = False
1620
self._repository.unlock()
1622
def walkdirs(self, prefix=""):
1623
# TODO: jam 20070215 This is the lazy way by using the RevisionTree
1624
# implementation based on an inventory.
1625
# This should be cleaned up to use the much faster Dirstate code
1626
# So for now, we just build up the parent inventory, and extract
1627
# it the same way RevisionTree does.
1628
_directory = 'directory'
1629
inv = self._get_inventory()
1630
top_id = inv.path2id(prefix)
1634
pending = [(prefix, top_id)]
1637
relpath, file_id = pending.pop()
1638
# 0 - relpath, 1- file-id
1640
relroot = relpath + '/'
1643
# FIXME: stash the node in pending
1644
entry = inv[file_id]
1645
for name, child in entry.sorted_children():
1646
toppath = relroot + name
1647
dirblock.append((toppath, name, child.kind, None,
1648
child.file_id, child.kind
1650
yield (relpath, entry.file_id), dirblock
1651
# push the user specified dirs from dirblock
1652
for dir in reversed(dirblock):
1653
if dir[2] == _directory:
1654
pending.append((dir[0], dir[4]))
1657
class InterDirStateTree(InterTree):
1658
"""Fast path optimiser for changes_from with dirstate trees.
1660
This is used only when both trees are in the dirstate working file, and
1661
the source is any parent within the dirstate, and the destination is
1662
the current working tree of the same dirstate.
1664
# this could be generalized to allow comparisons between any trees in the
1665
# dirstate, and possibly between trees stored in different dirstates.
1667
def __init__(self, source, target):
1668
super(InterDirStateTree, self).__init__(source, target)
1669
if not InterDirStateTree.is_compatible(source, target):
1670
raise Exception, "invalid source %r and target %r" % (source, target)
1673
def make_source_parent_tree(source, target):
1674
"""Change the source tree into a parent of the target."""
1675
revid = source.commit('record tree')
1676
target.branch.repository.fetch(source.branch.repository, revid)
1677
target.set_parent_ids([revid])
1678
return target.basis_tree(), target
1680
_matching_from_tree_format = WorkingTreeFormat4()
1681
_matching_to_tree_format = WorkingTreeFormat4()
1682
_test_mutable_trees_to_test_trees = make_source_parent_tree
1684
def _iter_changes(self, include_unchanged=False,
1685
specific_files=None, pb=None, extra_trees=[],
1686
require_versioned=True, want_unversioned=False):
1687
"""Return the changes from source to target.
1689
:return: An iterator that yields tuples. See InterTree._iter_changes
1691
:param specific_files: An optional list of file paths to restrict the
1692
comparison to. When mapping filenames to ids, all matches in all
1693
trees (including optional extra_trees) are used, and all children of
1694
matched directories are included.
1695
:param include_unchanged: An optional boolean requesting the inclusion of
1696
unchanged entries in the result.
1697
:param extra_trees: An optional list of additional trees to use when
1698
mapping the contents of specific_files (paths) to file_ids.
1699
:param require_versioned: If True, all files in specific_files must be
1700
versioned in one of source, target, extra_trees or
1701
PathsNotVersionedError is raised.
1702
:param want_unversioned: Should unversioned files be returned in the
1703
output. An unversioned file is defined as one with (False, False)
1704
for the versioned pair.
1706
utf8_decode = cache_utf8._utf8_decode
1707
_minikind_to_kind = dirstate.DirState._minikind_to_kind
1708
cmp_by_dirs = dirstate.cmp_by_dirs
1709
# NB: show_status depends on being able to pass in non-versioned files
1710
# and report them as unknown
1711
# TODO: handle extra trees in the dirstate.
1712
# TODO: handle comparisons as an empty tree as a different special
1713
# case? mbp 20070226
1714
if (extra_trees or (self.source._revision_id == NULL_REVISION)
1715
or specific_files == []):
1716
# we can't fast-path these cases (yet)
1717
for f in super(InterDirStateTree, self)._iter_changes(
1718
include_unchanged, specific_files, pb, extra_trees,
1719
require_versioned, want_unversioned=want_unversioned):
1722
parent_ids = self.target.get_parent_ids()
1723
assert (self.source._revision_id in parent_ids), \
1724
"revision {%s} is not stored in {%s}, but %s " \
1725
"can only be used for trees stored in the dirstate" \
1726
% (self.source._revision_id, self.target, self._iter_changes)
1728
if self.source._revision_id == NULL_REVISION:
1730
indices = (target_index,)
1732
assert (self.source._revision_id in parent_ids), \
1733
"Failure: source._revision_id: %s not in target.parent_ids(%s)" % (
1734
self.source._revision_id, parent_ids)
1735
source_index = 1 + parent_ids.index(self.source._revision_id)
1736
indices = (source_index,target_index)
1737
# -- make all specific_files utf8 --
1739
specific_files_utf8 = set()
1740
for path in specific_files:
1741
specific_files_utf8.add(path.encode('utf8'))
1742
specific_files = specific_files_utf8
1744
specific_files = set([''])
1745
# -- specific_files is now a utf8 path set --
1746
# -- get the state object and prepare it.
1747
state = self.target.current_dirstate()
1748
state._read_dirblocks_if_needed()
1749
def _entries_for_path(path):
1750
"""Return a list with all the entries that match path for all ids.
1752
dirname, basename = os.path.split(path)
1753
key = (dirname, basename, '')
1754
block_index, present = state._find_block_index_from_key(key)
1756
# the block which should contain path is absent.
1759
block = state._dirblocks[block_index][1]
1760
entry_index, _ = state._find_entry_index(key, block)
1761
# we may need to look at multiple entries at this path: walk while the specific_files match.
1762
while (entry_index < len(block) and
1763
block[entry_index][0][0:2] == key[0:2]):
1764
result.append(block[entry_index])
1767
if require_versioned:
1768
# -- check all supplied paths are versioned in a search tree. --
1769
all_versioned = True
1770
for path in specific_files:
1771
path_entries = _entries_for_path(path)
1772
if not path_entries:
1773
# this specified path is not present at all: error
1774
all_versioned = False
1776
found_versioned = False
1777
# for each id at this path
1778
for entry in path_entries:
1780
for index in indices:
1781
if entry[1][index][0] != 'a': # absent
1782
found_versioned = True
1783
# all good: found a versioned cell
1785
if not found_versioned:
1786
# none of the indexes was not 'absent' at all ids for this
1788
all_versioned = False
1790
if not all_versioned:
1791
raise errors.PathsNotVersionedError(specific_files)
1792
# -- remove redundancy in supplied specific_files to prevent over-scanning --
1793
search_specific_files = set()
1794
for path in specific_files:
1795
other_specific_files = specific_files.difference(set([path]))
1796
if not osutils.is_inside_any(other_specific_files, path):
1797
# this is a top level path, we must check it.
1798
search_specific_files.add(path)
1800
# compare source_index and target_index at or under each element of search_specific_files.
1801
# follow the following comparison table. Note that we only want to do diff operations when
1802
# the target is fdl because thats when the walkdirs logic will have exposed the pathinfo
1806
# Source | Target | disk | action
1807
# r | fdlt | | add source to search, add id path move and perform
1808
# | | | diff check on source-target
1809
# r | fdlt | a | dangling file that was present in the basis.
1811
# r | a | | add source to search
1813
# r | r | | this path is present in a non-examined tree, skip.
1814
# r | r | a | this path is present in a non-examined tree, skip.
1815
# a | fdlt | | add new id
1816
# a | fdlt | a | dangling locally added file, skip
1817
# a | a | | not present in either tree, skip
1818
# a | a | a | not present in any tree, skip
1819
# a | r | | not present in either tree at this path, skip as it
1820
# | | | may not be selected by the users list of paths.
1821
# a | r | a | not present in either tree at this path, skip as it
1822
# | | | may not be selected by the users list of paths.
1823
# fdlt | fdlt | | content in both: diff them
1824
# fdlt | fdlt | a | deleted locally, but not unversioned - show as deleted ?
1825
# fdlt | a | | unversioned: output deleted id for now
1826
# fdlt | a | a | unversioned and deleted: output deleted id
1827
# fdlt | r | | relocated in this tree, so add target to search.
1828
# | | | Dont diff, we will see an r,fd; pair when we reach
1829
# | | | this id at the other path.
1830
# fdlt | r | a | relocated in this tree, so add target to search.
1831
# | | | Dont diff, we will see an r,fd; pair when we reach
1832
# | | | this id at the other path.
1834
# for all search_indexs in each path at or under each element of
1835
# search_specific_files, if the detail is relocated: add the id, and add the
1836
# relocated path as one to search if its not searched already. If the
1837
# detail is not relocated, add the id.
1838
searched_specific_files = set()
1839
NULL_PARENT_DETAILS = dirstate.DirState.NULL_PARENT_DETAILS
1840
# Using a list so that we can access the values and change them in
1841
# nested scope. Each one is [path, file_id, entry]
1842
last_source_parent = [None, None]
1843
last_target_parent = [None, None]
1845
use_filesystem_for_exec = (sys.platform != 'win32')
1847
# Just a sentry, so that _process_entry can say that this
1848
# record is handled, but isn't interesting to process (unchanged)
1849
uninteresting = object()
1852
old_dirname_to_file_id = {}
1853
new_dirname_to_file_id = {}
1854
# TODO: jam 20070516 - Avoid the _get_entry lookup overhead by
1855
# keeping a cache of directories that we have seen.
1857
def _process_entry(entry, path_info):
1858
"""Compare an entry and real disk to generate delta information.
1860
:param path_info: top_relpath, basename, kind, lstat, abspath for
1861
the path of entry. If None, then the path is considered absent.
1862
(Perhaps we should pass in a concrete entry for this ?)
1863
Basename is returned as a utf8 string because we expect this
1864
tuple will be ignored, and don't want to take the time to
1866
:return: None if these don't match
1867
A tuple of information about the change, or
1868
the object 'uninteresting' if these match, but are
1869
basically identical.
1871
if source_index is None:
1872
source_details = NULL_PARENT_DETAILS
1874
source_details = entry[1][source_index]
1875
target_details = entry[1][target_index]
1876
target_minikind = target_details[0]
1877
if path_info is not None and target_minikind in 'fdlt':
1878
assert target_index == 0
1879
link_or_sha1 = state.update_entry(entry, abspath=path_info[4],
1880
stat_value=path_info[3])
1881
# The entry may have been modified by update_entry
1882
target_details = entry[1][target_index]
1883
target_minikind = target_details[0]
1886
file_id = entry[0][2]
1887
source_minikind = source_details[0]
1888
if source_minikind in 'fdltr' and target_minikind in 'fdlt':
1889
# claimed content in both: diff
1890
# r | fdlt | | add source to search, add id path move and perform
1891
# | | | diff check on source-target
1892
# r | fdlt | a | dangling file that was present in the basis.
1894
if source_minikind in 'r':
1895
# add the source to the search path to find any children it
1896
# has. TODO ? : only add if it is a container ?
1897
if not osutils.is_inside_any(searched_specific_files,
1899
search_specific_files.add(source_details[1])
1900
# generate the old path; this is needed for stating later
1902
old_path = source_details[1]
1903
old_dirname, old_basename = os.path.split(old_path)
1904
path = pathjoin(entry[0][0], entry[0][1])
1905
old_entry = state._get_entry(source_index,
1907
# update the source details variable to be the real
1909
source_details = old_entry[1][source_index]
1910
source_minikind = source_details[0]
1912
old_dirname = entry[0][0]
1913
old_basename = entry[0][1]
1914
old_path = path = None
1915
if path_info is None:
1916
# the file is missing on disk, show as removed.
1917
content_change = True
1921
# source and target are both versioned and disk file is present.
1922
target_kind = path_info[2]
1923
if target_kind == 'directory':
1925
old_path = path = pathjoin(old_dirname, old_basename)
1926
new_dirname_to_file_id[path] = file_id
1927
if source_minikind != 'd':
1928
content_change = True
1930
# directories have no fingerprint
1931
content_change = False
1933
elif target_kind == 'file':
1934
if source_minikind != 'f':
1935
content_change = True
1937
# We could check the size, but we already have the
1939
content_change = (link_or_sha1 != source_details[1])
1940
# Target details is updated at update_entry time
1941
if use_filesystem_for_exec:
1942
# We don't need S_ISREG here, because we are sure
1943
# we are dealing with a file.
1944
target_exec = bool(stat.S_IEXEC & path_info[3].st_mode)
1946
target_exec = target_details[3]
1947
elif target_kind == 'symlink':
1948
if source_minikind != 'l':
1949
content_change = True
1951
content_change = (link_or_sha1 != source_details[1])
1953
elif target_kind == 'tree-reference':
1954
if source_minikind != 't':
1955
content_change = True
1957
content_change = False
1960
raise Exception, "unknown kind %s" % path_info[2]
1961
if source_minikind == 'd':
1963
old_path = path = pathjoin(old_dirname, old_basename)
1964
old_dirname_to_file_id[old_path] = file_id
1965
# parent id is the entry for the path in the target tree
1966
if old_dirname == last_source_parent[0]:
1967
source_parent_id = last_source_parent[1]
1970
source_parent_id = old_dirname_to_file_id[old_dirname]
1972
source_parent_entry = state._get_entry(source_index,
1973
path_utf8=old_dirname)
1974
source_parent_id = source_parent_entry[0][2]
1975
if source_parent_id == entry[0][2]:
1976
# This is the root, so the parent is None
1977
source_parent_id = None
1979
last_source_parent[0] = old_dirname
1980
last_source_parent[1] = source_parent_id
1981
new_dirname = entry[0][0]
1982
if new_dirname == last_target_parent[0]:
1983
target_parent_id = last_target_parent[1]
1986
target_parent_id = new_dirname_to_file_id[new_dirname]
1988
# TODO: We don't always need to do the lookup, because the
1989
# parent entry will be the same as the source entry.
1990
target_parent_entry = state._get_entry(target_index,
1991
path_utf8=new_dirname)
1992
assert target_parent_entry != (None, None), (
1993
"Could not find target parent in wt: %s\nparent of: %s"
1994
% (new_dirname, entry))
1995
target_parent_id = target_parent_entry[0][2]
1996
if target_parent_id == entry[0][2]:
1997
# This is the root, so the parent is None
1998
target_parent_id = None
2000
last_target_parent[0] = new_dirname
2001
last_target_parent[1] = target_parent_id
2003
source_exec = source_details[3]
2004
if (include_unchanged
2006
or source_parent_id != target_parent_id
2007
or old_basename != entry[0][1]
2008
or source_exec != target_exec
2010
if old_path is None:
2011
old_path = path = pathjoin(old_dirname, old_basename)
2012
old_path_u = utf8_decode(old_path)[0]
2015
old_path_u = utf8_decode(old_path)[0]
2016
if old_path == path:
2019
path_u = utf8_decode(path)[0]
2020
source_kind = _minikind_to_kind[source_minikind]
2021
return (entry[0][2],
2022
(old_path_u, path_u),
2025
(source_parent_id, target_parent_id),
2026
(utf8_decode(old_basename)[0], utf8_decode(entry[0][1])[0]),
2027
(source_kind, target_kind),
2028
(source_exec, target_exec))
2030
return uninteresting
2031
elif source_minikind in 'a' and target_minikind in 'fdlt':
2032
# looks like a new file
2033
if path_info is not None:
2034
path = pathjoin(entry[0][0], entry[0][1])
2035
# parent id is the entry for the path in the target tree
2036
# TODO: these are the same for an entire directory: cache em.
2037
parent_id = state._get_entry(target_index,
2038
path_utf8=entry[0][0])[0][2]
2039
if parent_id == entry[0][2]:
2041
if use_filesystem_for_exec:
2042
# We need S_ISREG here, because we aren't sure if this
2045
stat.S_ISREG(path_info[3].st_mode)
2046
and stat.S_IEXEC & path_info[3].st_mode)
2048
target_exec = target_details[3]
2049
return (entry[0][2],
2050
(None, utf8_decode(path)[0]),
2054
(None, utf8_decode(entry[0][1])[0]),
2055
(None, path_info[2]),
2056
(None, target_exec))
2058
# but its not on disk: we deliberately treat this as just
2059
# never-present. (Why ?! - RBC 20070224)
2061
elif source_minikind in 'fdlt' and target_minikind in 'a':
2062
# unversioned, possibly, or possibly not deleted: we dont care.
2063
# if its still on disk, *and* theres no other entry at this
2064
# path [we dont know this in this routine at the moment -
2065
# perhaps we should change this - then it would be an unknown.
2066
old_path = pathjoin(entry[0][0], entry[0][1])
2067
# parent id is the entry for the path in the target tree
2068
parent_id = state._get_entry(source_index, path_utf8=entry[0][0])[0][2]
2069
if parent_id == entry[0][2]:
2071
return (entry[0][2],
2072
(utf8_decode(old_path)[0], None),
2076
(utf8_decode(entry[0][1])[0], None),
2077
(_minikind_to_kind[source_minikind], None),
2078
(source_details[3], None))
2079
elif source_minikind in 'fdlt' and target_minikind in 'r':
2080
# a rename; could be a true rename, or a rename inherited from
2081
# a renamed parent. TODO: handle this efficiently. Its not
2082
# common case to rename dirs though, so a correct but slow
2083
# implementation will do.
2084
if not osutils.is_inside_any(searched_specific_files, target_details[1]):
2085
search_specific_files.add(target_details[1])
2086
elif source_minikind in 'ra' and target_minikind in 'ra':
2087
# neither of the selected trees contain this file,
2088
# so skip over it. This is not currently directly tested, but
2089
# is indirectly via test_too_much.TestCommands.test_conflicts.
2092
raise AssertionError("don't know how to compare "
2093
"source_minikind=%r, target_minikind=%r"
2094
% (source_minikind, target_minikind))
2095
## import pdb;pdb.set_trace()
2098
while search_specific_files:
2099
# TODO: the pending list should be lexically sorted? the
2100
# interface doesn't require it.
2101
current_root = search_specific_files.pop()
2102
current_root_unicode = current_root.decode('utf8')
2103
searched_specific_files.add(current_root)
2104
# process the entries for this containing directory: the rest will be
2105
# found by their parents recursively.
2106
root_entries = _entries_for_path(current_root)
2107
root_abspath = self.target.abspath(current_root_unicode)
2109
root_stat = os.lstat(root_abspath)
2111
if e.errno == errno.ENOENT:
2112
# the path does not exist: let _process_entry know that.
2113
root_dir_info = None
2115
# some other random error: hand it up.
2118
root_dir_info = ('', current_root,
2119
osutils.file_kind_from_stat_mode(root_stat.st_mode), root_stat,
2121
if root_dir_info[2] == 'directory':
2122
if self.target._directory_is_tree_reference(
2123
current_root.decode('utf8')):
2124
root_dir_info = root_dir_info[:2] + \
2125
('tree-reference',) + root_dir_info[3:]
2127
if not root_entries and not root_dir_info:
2128
# this specified path is not present at all, skip it.
2130
path_handled = False
2131
for entry in root_entries:
2132
result = _process_entry(entry, root_dir_info)
2133
if result is not None:
2135
if result is not uninteresting:
2137
if want_unversioned and not path_handled and root_dir_info:
2138
new_executable = bool(
2139
stat.S_ISREG(root_dir_info[3].st_mode)
2140
and stat.S_IEXEC & root_dir_info[3].st_mode)
2142
(None, current_root_unicode),
2146
(None, splitpath(current_root_unicode)[-1]),
2147
(None, root_dir_info[2]),
2148
(None, new_executable)
2150
initial_key = (current_root, '', '')
2151
block_index, _ = state._find_block_index_from_key(initial_key)
2152
if block_index == 0:
2153
# we have processed the total root already, but because the
2154
# initial key matched it we should skip it here.
2156
if root_dir_info and root_dir_info[2] == 'tree-reference':
2157
current_dir_info = None
2159
dir_iterator = osutils._walkdirs_utf8(root_abspath, prefix=current_root)
2161
current_dir_info = dir_iterator.next()
2163
# on win32, python2.4 has e.errno == ERROR_DIRECTORY, but
2164
# python 2.5 has e.errno == EINVAL,
2165
# and e.winerror == ERROR_DIRECTORY
2166
e_winerror = getattr(e, 'winerror', None)
2167
win_errors = (ERROR_DIRECTORY, ERROR_PATH_NOT_FOUND)
2168
# there may be directories in the inventory even though
2169
# this path is not a file on disk: so mark it as end of
2171
if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
2172
current_dir_info = None
2173
elif (sys.platform == 'win32'
2174
and (e.errno in win_errors
2175
or e_winerror in win_errors)):
2176
current_dir_info = None
2180
if current_dir_info[0][0] == '':
2181
# remove .bzr from iteration
2182
bzr_index = bisect_left(current_dir_info[1], ('.bzr',))
2183
assert current_dir_info[1][bzr_index][0] == '.bzr'
2184
del current_dir_info[1][bzr_index]
2185
# walk until both the directory listing and the versioned metadata
2187
if (block_index < len(state._dirblocks) and
2188
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2189
current_block = state._dirblocks[block_index]
2191
current_block = None
2192
while (current_dir_info is not None or
2193
current_block is not None):
2194
if (current_dir_info and current_block
2195
and current_dir_info[0][0] != current_block[0]):
2196
if cmp_by_dirs(current_dir_info[0][0], current_block[0]) < 0:
2197
# filesystem data refers to paths not covered by the dirblock.
2198
# this has two possibilities:
2199
# A) it is versioned but empty, so there is no block for it
2200
# B) it is not versioned.
2202
# if (A) then we need to recurse into it to check for
2203
# new unknown files or directories.
2204
# if (B) then we should ignore it, because we don't
2205
# recurse into unknown directories.
2207
while path_index < len(current_dir_info[1]):
2208
current_path_info = current_dir_info[1][path_index]
2209
if want_unversioned:
2210
if current_path_info[2] == 'directory':
2211
if self.target._directory_is_tree_reference(
2212
current_path_info[0].decode('utf8')):
2213
current_path_info = current_path_info[:2] + \
2214
('tree-reference',) + current_path_info[3:]
2215
new_executable = bool(
2216
stat.S_ISREG(current_path_info[3].st_mode)
2217
and stat.S_IEXEC & current_path_info[3].st_mode)
2219
(None, utf8_decode(current_path_info[0])[0]),
2223
(None, utf8_decode(current_path_info[1])[0]),
2224
(None, current_path_info[2]),
2225
(None, new_executable))
2226
# dont descend into this unversioned path if it is
2228
if current_path_info[2] in ('directory',
2230
del current_dir_info[1][path_index]
2234
# This dir info has been handled, go to the next
2236
current_dir_info = dir_iterator.next()
2237
except StopIteration:
2238
current_dir_info = None
2240
# We have a dirblock entry for this location, but there
2241
# is no filesystem path for this. This is most likely
2242
# because a directory was removed from the disk.
2243
# We don't have to report the missing directory,
2244
# because that should have already been handled, but we
2245
# need to handle all of the files that are contained
2247
for current_entry in current_block[1]:
2248
# entry referring to file not present on disk.
2249
# advance the entry only, after processing.
2250
result = _process_entry(current_entry, None)
2251
if result is not None:
2252
if result is not uninteresting:
2255
if (block_index < len(state._dirblocks) and
2256
osutils.is_inside(current_root,
2257
state._dirblocks[block_index][0])):
2258
current_block = state._dirblocks[block_index]
2260
current_block = None
2263
if current_block and entry_index < len(current_block[1]):
2264
current_entry = current_block[1][entry_index]
2266
current_entry = None
2267
advance_entry = True
2269
if current_dir_info and path_index < len(current_dir_info[1]):
2270
current_path_info = current_dir_info[1][path_index]
2271
if current_path_info[2] == 'directory':
2272
if self.target._directory_is_tree_reference(
2273
current_path_info[0].decode('utf8')):
2274
current_path_info = current_path_info[:2] + \
2275
('tree-reference',) + current_path_info[3:]
2277
current_path_info = None
2279
path_handled = False
2280
while (current_entry is not None or
2281
current_path_info is not None):
2282
if current_entry is None:
2283
# the check for path_handled when the path is adnvaced
2284
# will yield this path if needed.
2286
elif current_path_info is None:
2287
# no path is fine: the per entry code will handle it.
2288
result = _process_entry(current_entry, current_path_info)
2289
if result is not None:
2290
if result is not uninteresting:
2292
elif (current_entry[0][1] != current_path_info[1]
2293
or current_entry[1][target_index][0] in 'ar'):
2294
# The current path on disk doesn't match the dirblock
2295
# record. Either the dirblock is marked as absent, or
2296
# the file on disk is not present at all in the
2297
# dirblock. Either way, report about the dirblock
2298
# entry, and let other code handle the filesystem one.
2300
# Compare the basename for these files to determine
2302
if current_path_info[1] < current_entry[0][1]:
2303
# extra file on disk: pass for now, but only
2304
# increment the path, not the entry
2305
advance_entry = False
2307
# entry referring to file not present on disk.
2308
# advance the entry only, after processing.
2309
result = _process_entry(current_entry, None)
2310
if result is not None:
2311
if result is not uninteresting:
2313
advance_path = False
2315
result = _process_entry(current_entry, current_path_info)
2316
if result is not None:
2318
if result is not uninteresting:
2320
if advance_entry and current_entry is not None:
2322
if entry_index < len(current_block[1]):
2323
current_entry = current_block[1][entry_index]
2325
current_entry = None
2327
advance_entry = True # reset the advance flaga
2328
if advance_path and current_path_info is not None:
2329
if not path_handled:
2330
# unversioned in all regards
2331
if want_unversioned:
2332
new_executable = bool(
2333
stat.S_ISREG(current_path_info[3].st_mode)
2334
and stat.S_IEXEC & current_path_info[3].st_mode)
2336
(None, utf8_decode(current_path_info[0])[0]),
2340
(None, utf8_decode(current_path_info[1])[0]),
2341
(None, current_path_info[2]),
2342
(None, new_executable))
2343
# dont descend into this unversioned path if it is
2345
if current_path_info[2] in ('directory'):
2346
del current_dir_info[1][path_index]
2348
# dont descend the disk iterator into any tree
2350
if current_path_info[2] == 'tree-reference':
2351
del current_dir_info[1][path_index]
2354
if path_index < len(current_dir_info[1]):
2355
current_path_info = current_dir_info[1][path_index]
2356
if current_path_info[2] == 'directory':
2357
if self.target._directory_is_tree_reference(
2358
current_path_info[0].decode('utf8')):
2359
current_path_info = current_path_info[:2] + \
2360
('tree-reference',) + current_path_info[3:]
2362
current_path_info = None
2363
path_handled = False
2365
advance_path = True # reset the advance flagg.
2366
if current_block is not None:
2368
if (block_index < len(state._dirblocks) and
2369
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2370
current_block = state._dirblocks[block_index]
2372
current_block = None
2373
if current_dir_info is not None:
2375
current_dir_info = dir_iterator.next()
2376
except StopIteration:
2377
current_dir_info = None
2381
def is_compatible(source, target):
2382
# the target must be a dirstate working tree
2383
if not isinstance(target, WorkingTree4):
2385
# the source must be a revtreee or dirstate rev tree.
2386
if not isinstance(source,
2387
(revisiontree.RevisionTree, DirStateRevisionTree)):
2389
# the source revid must be in the target dirstate
2390
if not (source._revision_id == NULL_REVISION or
2391
source._revision_id in target.get_parent_ids()):
2392
# TODO: what about ghosts? it may well need to
2393
# check for them explicitly.
2397
InterTree.register_optimiser(InterDirStateTree)
2400
class Converter3to4(object):
2401
"""Perform an in-place upgrade of format 3 to format 4 trees."""
2404
self.target_format = WorkingTreeFormat4()
2406
def convert(self, tree):
2407
# lock the control files not the tree, so that we dont get tree
2408
# on-unlock behaviours, and so that noone else diddles with the
2409
# tree during upgrade.
2410
tree._control_files.lock_write()
2412
tree.read_working_inventory()
2413
self.create_dirstate_data(tree)
2414
self.update_format(tree)
2415
self.remove_xml_files(tree)
2417
tree._control_files.unlock()
2419
def create_dirstate_data(self, tree):
2420
"""Create the dirstate based data for tree."""
2421
local_path = tree.bzrdir.get_workingtree_transport(None
2422
).local_abspath('dirstate')
2423
state = dirstate.DirState.from_tree(tree, local_path)
2427
def remove_xml_files(self, tree):
2428
"""Remove the oldformat 3 data."""
2429
transport = tree.bzrdir.get_workingtree_transport(None)
2430
for path in ['basis-inventory-cache', 'inventory', 'last-revision',
2431
'pending-merges', 'stat-cache']:
2433
transport.delete(path)
2434
except errors.NoSuchFile:
2435
# some files are optional - just deal.
2438
def update_format(self, tree):
2439
"""Change the format marker."""
2440
tree._control_files.put_utf8('format',
2441
self.target_format.get_format_string())