1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""WorkingTree4 format and implementation.
19
WorkingTree4 provides the dirstate based working tree logic.
21
To get a WorkingTree, call bzrdir.open_workingtree() or
22
WorkingTree.open(dir).
25
from cStringIO import StringIO
29
from bzrlib.lazy_import import lazy_import
30
lazy_import(globals(), """
31
from bisect import bisect_left
33
from copy import deepcopy
45
conflicts as _mod_conflicts,
55
revision as _mod_revision,
65
from bzrlib.transport import get_transport
69
from bzrlib import symbol_versioning
70
from bzrlib.decorators import needs_read_lock, needs_write_lock
71
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, entry_factory
72
from bzrlib.lockable_files import LockableFiles, TransportLock
73
from bzrlib.lockdir import LockDir
74
import bzrlib.mutabletree
75
from bzrlib.mutabletree import needs_tree_write_lock
76
from bzrlib.osutils import (
86
from bzrlib.trace import mutter, note
87
from bzrlib.transport.local import LocalTransport
88
from bzrlib.tree import InterTree
89
from bzrlib.progress import DummyProgress, ProgressPhase
90
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
91
from bzrlib.rio import RioReader, rio_file, Stanza
92
from bzrlib.symbol_versioning import (deprecated_passed,
97
from bzrlib.tree import Tree
98
from bzrlib.workingtree import WorkingTree, WorkingTree3, WorkingTreeFormat3
101
# This is the Windows equivalent of ENOTDIR
102
# It is defined in pywin32.winerror, but we don't want a strong dependency for
103
# just an error code.
104
ERROR_PATH_NOT_FOUND = 3
105
ERROR_DIRECTORY = 267
108
class WorkingTree4(WorkingTree3):
109
"""This is the Format 4 working tree.
111
This differs from WorkingTree3 by:
112
- Having a consolidated internal dirstate, stored in a
113
randomly-accessible sorted file on disk.
114
- Not having a regular inventory attribute. One can be synthesized
115
on demand but this is expensive and should be avoided.
117
This is new in bzr 0.15.
120
def __init__(self, basedir,
125
"""Construct a WorkingTree for basedir.
127
If the branch is not supplied, it is opened automatically.
128
If the branch is supplied, it must be the branch for this basedir.
129
(branch.base is not cross checked, because for remote branches that
130
would be meaningless).
132
self._format = _format
133
self.bzrdir = _bzrdir
134
basedir = safe_unicode(basedir)
135
mutter("opening working tree %r", basedir)
136
self._branch = branch
137
self.basedir = realpath(basedir)
138
# if branch is at our basedir and is a format 6 or less
139
# assume all other formats have their own control files.
140
self._control_files = _control_files
141
self._transport = self._control_files._transport
144
# during a read or write lock these objects are set, and are
145
# None the rest of the time.
146
self._dirstate = None
147
self._inventory = None
149
self._setup_directory_is_tree_reference()
150
self._detect_case_handling()
152
@needs_tree_write_lock
153
def _add(self, files, ids, kinds):
154
"""See MutableTree._add."""
155
state = self.current_dirstate()
156
for f, file_id, kind in zip(files, ids, kinds):
159
# special case tree root handling.
160
if f == '' and self.path2id(f) == ROOT_ID:
161
state.set_path_id('', generate_ids.gen_file_id(f))
164
file_id = generate_ids.gen_file_id(f)
165
# deliberately add the file with no cached stat or sha1
166
# - on the first access it will be gathered, and we can
167
# always change this once tests are all passing.
168
state.add(f, file_id, kind, None, '')
169
self._make_dirty(reset_inventory=True)
171
def _make_dirty(self, reset_inventory):
172
"""Make the tree state dirty.
174
:param reset_inventory: True if the cached inventory should be removed
175
(presuming there is one).
178
if reset_inventory and self._inventory is not None:
179
self._inventory = None
181
@needs_tree_write_lock
182
def add_reference(self, sub_tree):
183
# use standard implementation, which calls back to self._add
185
# So we don't store the reference_revision in the working dirstate,
186
# it's just recorded at the moment of commit.
187
self._add_reference(sub_tree)
189
def break_lock(self):
190
"""Break a lock if one is present from another instance.
192
Uses the ui factory to ask for confirmation if the lock may be from
195
This will probe the repository for its lock as well.
197
# if the dirstate is locked by an active process, reject the break lock
200
if self._dirstate is None:
204
state = self._current_dirstate()
205
if state._lock_token is not None:
206
# we already have it locked. sheese, cant break our own lock.
207
raise errors.LockActive(self.basedir)
210
# try for a write lock - need permission to get one anyhow
213
except errors.LockContention:
214
# oslocks fail when a process is still live: fail.
215
# TODO: get the locked lockdir info and give to the user to
216
# assist in debugging.
217
raise errors.LockActive(self.basedir)
222
self._dirstate = None
223
self._control_files.break_lock()
224
self.branch.break_lock()
226
def _comparison_data(self, entry, path):
227
kind, executable, stat_value = \
228
WorkingTree3._comparison_data(self, entry, path)
229
# it looks like a plain directory, but it's really a reference -- see
231
if (self._repo_supports_tree_reference and
232
kind == 'directory' and
233
self._directory_is_tree_reference(path)):
234
kind = 'tree-reference'
235
return kind, executable, stat_value
238
def commit(self, message=None, revprops=None, *args, **kwargs):
239
# mark the tree as dirty post commit - commit
240
# can change the current versioned list by doing deletes.
241
result = WorkingTree3.commit(self, message, revprops, *args, **kwargs)
242
self._make_dirty(reset_inventory=True)
245
def current_dirstate(self):
246
"""Return the current dirstate object.
248
This is not part of the tree interface and only exposed for ease of
251
:raises errors.NotWriteLocked: when not in a lock.
253
self._must_be_locked()
254
return self._current_dirstate()
256
def _current_dirstate(self):
257
"""Internal function that does not check lock status.
259
This is needed for break_lock which also needs the dirstate.
261
if self._dirstate is not None:
262
return self._dirstate
263
local_path = self.bzrdir.get_workingtree_transport(None
264
).local_abspath('dirstate')
265
self._dirstate = dirstate.DirState.on_file(local_path)
266
return self._dirstate
268
def filter_unversioned_files(self, paths):
269
"""Filter out paths that are versioned.
271
:return: set of paths.
273
# TODO: make a generic multi-bisect routine roughly that should list
274
# the paths, then process one half at a time recursively, and feed the
275
# results of each bisect in further still
276
paths = sorted(paths)
278
state = self.current_dirstate()
279
# TODO we want a paths_to_dirblocks helper I think
281
dirname, basename = os.path.split(path.encode('utf8'))
282
_, _, _, path_is_versioned = state._get_block_entry_index(
283
dirname, basename, 0)
284
if not path_is_versioned:
289
"""Write all cached data to disk."""
290
if self._control_files._lock_mode != 'w':
291
raise errors.NotWriteLocked(self)
292
self.current_dirstate().save()
293
self._inventory = None
296
@needs_tree_write_lock
297
def _gather_kinds(self, files, kinds):
298
"""See MutableTree._gather_kinds."""
299
for pos, f in enumerate(files):
300
if kinds[pos] is None:
301
kinds[pos] = self._kind(f)
303
def _generate_inventory(self):
304
"""Create and set self.inventory from the dirstate object.
306
This is relatively expensive: we have to walk the entire dirstate.
307
Ideally we would not, and can deprecate this function.
309
#: uncomment to trap on inventory requests.
310
# import pdb;pdb.set_trace()
311
state = self.current_dirstate()
312
state._read_dirblocks_if_needed()
313
root_key, current_entry = self._get_entry(path='')
314
current_id = root_key[2]
315
if not (current_entry[0][0] == 'd'): # directory
316
raise AssertionError(current_entry)
317
inv = Inventory(root_id=current_id)
318
# Turn some things into local variables
319
minikind_to_kind = dirstate.DirState._minikind_to_kind
320
factory = entry_factory
321
utf8_decode = cache_utf8._utf8_decode
323
# we could do this straight out of the dirstate; it might be fast
324
# and should be profiled - RBC 20070216
325
parent_ies = {'' : inv.root}
326
for block in state._dirblocks[1:]: # skip the root
329
parent_ie = parent_ies[dirname]
331
# all the paths in this block are not versioned in this tree
333
for key, entry in block[1]:
334
minikind, link_or_sha1, size, executable, stat = entry[0]
335
if minikind in ('a', 'r'): # absent, relocated
336
# a parent tree only entry
339
name_unicode = utf8_decode(name)[0]
341
kind = minikind_to_kind[minikind]
342
inv_entry = factory[kind](file_id, name_unicode,
345
# This is only needed on win32, where this is the only way
346
# we know the executable bit.
347
inv_entry.executable = executable
348
# not strictly needed: working tree
349
#inv_entry.text_size = size
350
#inv_entry.text_sha1 = sha1
351
elif kind == 'directory':
352
# add this entry to the parent map.
353
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
354
elif kind == 'tree-reference':
355
if not self._repo_supports_tree_reference:
356
raise AssertionError(
358
"doesn't support tree references "
359
"required by entry %r"
361
inv_entry.reference_revision = link_or_sha1 or None
362
elif kind != 'symlink':
363
raise AssertionError("unknown kind %r" % kind)
364
# These checks cost us around 40ms on a 55k entry tree
365
if file_id in inv_byid:
366
raise AssertionError('file_id %s already in'
367
' inventory as %s' % (file_id, inv_byid[file_id]))
368
if name_unicode in parent_ie.children:
369
raise AssertionError('name %r already in parent'
371
inv_byid[file_id] = inv_entry
372
parent_ie.children[name_unicode] = inv_entry
373
self._inventory = inv
375
def _get_entry(self, file_id=None, path=None):
376
"""Get the dirstate row for file_id or path.
378
If either file_id or path is supplied, it is used as the key to lookup.
379
If both are supplied, the fastest lookup is used, and an error is
380
raised if they do not both point at the same row.
382
:param file_id: An optional unicode file_id to be looked up.
383
:param path: An optional unicode path to be looked up.
384
:return: The dirstate row tuple for path/file_id, or (None, None)
386
if file_id is None and path is None:
387
raise errors.BzrError('must supply file_id or path')
388
state = self.current_dirstate()
390
path = path.encode('utf8')
391
return state._get_entry(0, fileid_utf8=file_id, path_utf8=path)
393
def get_file_sha1(self, file_id, path=None, stat_value=None):
394
# check file id is valid unconditionally.
395
entry = self._get_entry(file_id=file_id, path=path)
397
raise errors.NoSuchId(self, file_id)
399
path = pathjoin(entry[0][0], entry[0][1]).decode('utf8')
401
file_abspath = self.abspath(path)
402
state = self.current_dirstate()
403
if stat_value is None:
405
stat_value = os.lstat(file_abspath)
407
if e.errno == errno.ENOENT:
411
link_or_sha1 = state.update_entry(entry, file_abspath,
412
stat_value=stat_value)
413
if entry[1][0][0] == 'f':
417
def _get_inventory(self):
418
"""Get the inventory for the tree. This is only valid within a lock."""
419
if 'evil' in debug.debug_flags:
420
trace.mutter_callsite(2,
421
"accessing .inventory forces a size of tree translation.")
422
if self._inventory is not None:
423
return self._inventory
424
self._must_be_locked()
425
self._generate_inventory()
426
return self._inventory
428
inventory = property(_get_inventory,
429
doc="Inventory of this Tree")
432
def get_parent_ids(self):
433
"""See Tree.get_parent_ids.
435
This implementation requests the ids list from the dirstate file.
437
return self.current_dirstate().get_parent_ids()
439
def get_reference_revision(self, file_id, path=None):
440
# referenced tree's revision is whatever's currently there
441
return self.get_nested_tree(file_id, path).last_revision()
443
def get_nested_tree(self, file_id, path=None):
445
path = self.id2path(file_id)
446
# else: check file_id is at path?
447
return WorkingTree.open(self.abspath(path))
450
def get_root_id(self):
451
"""Return the id of this trees root"""
452
return self._get_entry(path='')[0][2]
454
def has_id(self, file_id):
455
state = self.current_dirstate()
456
row, parents = self._get_entry(file_id=file_id)
459
return osutils.lexists(pathjoin(
460
self.basedir, row[0].decode('utf8'), row[1].decode('utf8')))
463
def id2path(self, file_id):
464
"Convert a file-id to a path."
465
state = self.current_dirstate()
466
entry = self._get_entry(file_id=file_id)
467
if entry == (None, None):
468
raise errors.NoSuchId(tree=self, file_id=file_id)
469
path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
470
return path_utf8.decode('utf8')
472
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
473
entry = self._get_entry(path=path)
474
if entry == (None, None):
475
return False # Missing entries are not executable
476
return entry[1][0][3] # Executable?
478
if not osutils.supports_executable():
479
def is_executable(self, file_id, path=None):
480
"""Test if a file is executable or not.
482
Note: The caller is expected to take a read-lock before calling this.
484
entry = self._get_entry(file_id=file_id, path=path)
485
if entry == (None, None):
487
return entry[1][0][3]
489
_is_executable_from_path_and_stat = \
490
_is_executable_from_path_and_stat_from_basis
492
def is_executable(self, file_id, path=None):
493
"""Test if a file is executable or not.
495
Note: The caller is expected to take a read-lock before calling this.
497
self._must_be_locked()
499
path = self.id2path(file_id)
500
mode = os.lstat(self.abspath(path)).st_mode
501
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
503
def all_file_ids(self):
504
"""See Tree.iter_all_file_ids"""
505
self._must_be_locked()
507
for key, tree_details in self.current_dirstate()._iter_entries():
508
if tree_details[0][0] in ('a', 'r'): # relocated
515
"""Iterate through file_ids for this tree.
517
file_ids are in a WorkingTree if they are in the working inventory
518
and the working file exists.
521
for key, tree_details in self.current_dirstate()._iter_entries():
522
if tree_details[0][0] in ('a', 'r'): # absent, relocated
523
# not relevant to the working tree
525
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
526
if osutils.lexists(path):
527
result.append(key[2])
530
def iter_references(self):
531
for key, tree_details in self.current_dirstate()._iter_entries():
532
if tree_details[0][0] in ('a', 'r'): # absent, relocated
533
# not relevant to the working tree
536
# the root is not a reference.
538
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
540
if self._kind(path) == 'tree-reference':
542
except errors.NoSuchFile:
543
# path is missing on disk.
546
def kind(self, file_id):
547
"""Return the kind of a file.
549
This is always the actual kind that's on disk, regardless of what it
552
Note: The caller is expected to take a read-lock before calling this.
554
relpath = self.id2path(file_id)
556
raise AssertionError(
557
"path for id {%s} is None!" % file_id)
558
return self._kind(relpath)
560
def _kind(self, relpath):
561
abspath = self.abspath(relpath)
562
kind = file_kind(abspath)
563
if (self._repo_supports_tree_reference and
564
kind == 'directory' and
565
self._directory_is_tree_reference(relpath)):
566
kind = 'tree-reference'
570
def _last_revision(self):
571
"""See Mutable.last_revision."""
572
parent_ids = self.current_dirstate().get_parent_ids()
576
return _mod_revision.NULL_REVISION
579
"""See Branch.lock_read, and WorkingTree.unlock."""
580
self.branch.lock_read()
582
self._control_files.lock_read()
584
state = self.current_dirstate()
585
if not state._lock_token:
587
# set our support for tree references from the repository in
589
self._repo_supports_tree_reference = getattr(
590
self.branch.repository._format, "supports_tree_reference",
593
self._control_files.unlock()
599
def _lock_self_write(self):
600
"""This should be called after the branch is locked."""
602
self._control_files.lock_write()
604
state = self.current_dirstate()
605
if not state._lock_token:
607
# set our support for tree references from the repository in
609
self._repo_supports_tree_reference = getattr(
610
self.branch.repository._format, "supports_tree_reference",
613
self._control_files.unlock()
619
def lock_tree_write(self):
620
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
621
self.branch.lock_read()
622
self._lock_self_write()
624
def lock_write(self):
625
"""See MutableTree.lock_write, and WorkingTree.unlock."""
626
self.branch.lock_write()
627
self._lock_self_write()
629
@needs_tree_write_lock
630
def move(self, from_paths, to_dir, after=False):
631
"""See WorkingTree.move()."""
635
state = self.current_dirstate()
636
if isinstance(from_paths, basestring):
638
to_dir_utf8 = to_dir.encode('utf8')
639
to_entry_dirname, to_basename = os.path.split(to_dir_utf8)
640
id_index = state._get_id_index()
641
# check destination directory
642
# get the details for it
643
to_entry_block_index, to_entry_entry_index, dir_present, entry_present = \
644
state._get_block_entry_index(to_entry_dirname, to_basename, 0)
645
if not entry_present:
646
raise errors.BzrMoveFailedError('', to_dir,
647
errors.NotVersionedError(to_dir))
648
to_entry = state._dirblocks[to_entry_block_index][1][to_entry_entry_index]
649
# get a handle on the block itself.
650
to_block_index = state._ensure_block(
651
to_entry_block_index, to_entry_entry_index, to_dir_utf8)
652
to_block = state._dirblocks[to_block_index]
653
to_abs = self.abspath(to_dir)
654
if not isdir(to_abs):
655
raise errors.BzrMoveFailedError('',to_dir,
656
errors.NotADirectory(to_abs))
658
if to_entry[1][0][0] != 'd':
659
raise errors.BzrMoveFailedError('',to_dir,
660
errors.NotADirectory(to_abs))
662
if self._inventory is not None:
663
update_inventory = True
665
to_dir_id = to_entry[0][2]
666
to_dir_ie = inv[to_dir_id]
668
update_inventory = False
671
def move_one(old_entry, from_path_utf8, minikind, executable,
672
fingerprint, packed_stat, size,
673
to_block, to_key, to_path_utf8):
674
state._make_absent(old_entry)
675
from_key = old_entry[0]
677
lambda:state.update_minimal(from_key,
679
executable=executable,
680
fingerprint=fingerprint,
681
packed_stat=packed_stat,
683
path_utf8=from_path_utf8))
684
state.update_minimal(to_key,
686
executable=executable,
687
fingerprint=fingerprint,
688
packed_stat=packed_stat,
690
path_utf8=to_path_utf8)
691
added_entry_index, _ = state._find_entry_index(to_key, to_block[1])
692
new_entry = to_block[1][added_entry_index]
693
rollbacks.append(lambda:state._make_absent(new_entry))
695
for from_rel in from_paths:
696
# from_rel is 'pathinroot/foo/bar'
697
from_rel_utf8 = from_rel.encode('utf8')
698
from_dirname, from_tail = osutils.split(from_rel)
699
from_dirname, from_tail_utf8 = osutils.split(from_rel_utf8)
700
from_entry = self._get_entry(path=from_rel)
701
if from_entry == (None, None):
702
raise errors.BzrMoveFailedError(from_rel,to_dir,
703
errors.NotVersionedError(path=str(from_rel)))
705
from_id = from_entry[0][2]
706
to_rel = pathjoin(to_dir, from_tail)
707
to_rel_utf8 = pathjoin(to_dir_utf8, from_tail_utf8)
708
item_to_entry = self._get_entry(path=to_rel)
709
if item_to_entry != (None, None):
710
raise errors.BzrMoveFailedError(from_rel, to_rel,
711
"Target is already versioned.")
713
if from_rel == to_rel:
714
raise errors.BzrMoveFailedError(from_rel, to_rel,
715
"Source and target are identical.")
717
from_missing = not self.has_filename(from_rel)
718
to_missing = not self.has_filename(to_rel)
725
raise errors.BzrMoveFailedError(from_rel, to_rel,
726
errors.NoSuchFile(path=to_rel,
727
extra="New file has not been created yet"))
729
# neither path exists
730
raise errors.BzrRenameFailedError(from_rel, to_rel,
731
errors.PathsDoNotExist(paths=(from_rel, to_rel)))
733
if from_missing: # implicitly just update our path mapping
736
raise errors.RenameFailedFilesExist(from_rel, to_rel)
739
def rollback_rename():
740
"""A single rename has failed, roll it back."""
741
# roll back everything, even if we encounter trouble doing one
744
# TODO: at least log the other exceptions rather than just
745
# losing them mbp 20070307
747
for rollback in reversed(rollbacks):
751
exc_info = sys.exc_info()
753
raise exc_info[0], exc_info[1], exc_info[2]
755
# perform the disk move first - its the most likely failure point.
757
from_rel_abs = self.abspath(from_rel)
758
to_rel_abs = self.abspath(to_rel)
760
osutils.rename(from_rel_abs, to_rel_abs)
762
raise errors.BzrMoveFailedError(from_rel, to_rel, e[1])
763
rollbacks.append(lambda: osutils.rename(to_rel_abs, from_rel_abs))
765
# perform the rename in the inventory next if needed: its easy
769
from_entry = inv[from_id]
770
current_parent = from_entry.parent_id
771
inv.rename(from_id, to_dir_id, from_tail)
773
lambda: inv.rename(from_id, current_parent, from_tail))
774
# finally do the rename in the dirstate, which is a little
775
# tricky to rollback, but least likely to need it.
776
old_block_index, old_entry_index, dir_present, file_present = \
777
state._get_block_entry_index(from_dirname, from_tail_utf8, 0)
778
old_block = state._dirblocks[old_block_index][1]
779
old_entry = old_block[old_entry_index]
780
from_key, old_entry_details = old_entry
781
cur_details = old_entry_details[0]
783
to_key = ((to_block[0],) + from_key[1:3])
784
minikind = cur_details[0]
785
move_one(old_entry, from_path_utf8=from_rel_utf8,
787
executable=cur_details[3],
788
fingerprint=cur_details[1],
789
packed_stat=cur_details[4],
793
to_path_utf8=to_rel_utf8)
796
def update_dirblock(from_dir, to_key, to_dir_utf8):
797
"""Recursively update all entries in this dirblock."""
799
raise AssertionError("renaming root not supported")
800
from_key = (from_dir, '')
801
from_block_idx, present = \
802
state._find_block_index_from_key(from_key)
804
# This is the old record, if it isn't present, then
805
# there is theoretically nothing to update.
806
# (Unless it isn't present because of lazy loading,
807
# but we don't do that yet)
809
from_block = state._dirblocks[from_block_idx]
810
to_block_index, to_entry_index, _, _ = \
811
state._get_block_entry_index(to_key[0], to_key[1], 0)
812
to_block_index = state._ensure_block(
813
to_block_index, to_entry_index, to_dir_utf8)
814
to_block = state._dirblocks[to_block_index]
816
# Grab a copy since move_one may update the list.
817
for entry in from_block[1][:]:
818
if not (entry[0][0] == from_dir):
819
raise AssertionError()
820
cur_details = entry[1][0]
821
to_key = (to_dir_utf8, entry[0][1], entry[0][2])
822
from_path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
823
to_path_utf8 = osutils.pathjoin(to_dir_utf8, entry[0][1])
824
minikind = cur_details[0]
826
# Deleted children of a renamed directory
827
# Do not need to be updated.
828
# Children that have been renamed out of this
829
# directory should also not be updated
831
move_one(entry, from_path_utf8=from_path_utf8,
833
executable=cur_details[3],
834
fingerprint=cur_details[1],
835
packed_stat=cur_details[4],
839
to_path_utf8=to_path_utf8)
841
# We need to move all the children of this
843
update_dirblock(from_path_utf8, to_key,
845
update_dirblock(from_rel_utf8, to_key, to_rel_utf8)
849
result.append((from_rel, to_rel))
850
state._dirblock_state = dirstate.DirState.IN_MEMORY_MODIFIED
851
self._make_dirty(reset_inventory=False)
855
def _must_be_locked(self):
856
if not self._control_files._lock_count:
857
raise errors.ObjectNotLocked(self)
860
"""Initialize the state in this tree to be a new tree."""
864
def path2id(self, path):
865
"""Return the id for path in this tree."""
866
path = path.strip('/')
867
entry = self._get_entry(path=path)
868
if entry == (None, None):
872
def paths2ids(self, paths, trees=[], require_versioned=True):
873
"""See Tree.paths2ids().
875
This specialisation fast-paths the case where all the trees are in the
880
parents = self.get_parent_ids()
882
if not (isinstance(tree, DirStateRevisionTree) and tree._revision_id in
884
return super(WorkingTree4, self).paths2ids(paths, trees, require_versioned)
885
search_indexes = [0] + [1 + parents.index(tree._revision_id) for tree in trees]
886
# -- make all paths utf8 --
889
paths_utf8.add(path.encode('utf8'))
891
# -- paths is now a utf8 path set --
892
# -- get the state object and prepare it.
893
state = self.current_dirstate()
894
if False and (state._dirblock_state == dirstate.DirState.NOT_IN_MEMORY
895
and '' not in paths):
896
paths2ids = self._paths2ids_using_bisect
898
paths2ids = self._paths2ids_in_memory
899
return paths2ids(paths, search_indexes,
900
require_versioned=require_versioned)
902
def _paths2ids_in_memory(self, paths, search_indexes,
903
require_versioned=True):
904
state = self.current_dirstate()
905
state._read_dirblocks_if_needed()
906
def _entries_for_path(path):
907
"""Return a list with all the entries that match path for all ids.
909
dirname, basename = os.path.split(path)
910
key = (dirname, basename, '')
911
block_index, present = state._find_block_index_from_key(key)
913
# the block which should contain path is absent.
916
block = state._dirblocks[block_index][1]
917
entry_index, _ = state._find_entry_index(key, block)
918
# we may need to look at multiple entries at this path: walk while the paths match.
919
while (entry_index < len(block) and
920
block[entry_index][0][0:2] == key[0:2]):
921
result.append(block[entry_index])
924
if require_versioned:
925
# -- check all supplied paths are versioned in a search tree. --
928
path_entries = _entries_for_path(path)
930
# this specified path is not present at all: error
931
all_versioned = False
933
found_versioned = False
934
# for each id at this path
935
for entry in path_entries:
937
for index in search_indexes:
938
if entry[1][index][0] != 'a': # absent
939
found_versioned = True
940
# all good: found a versioned cell
942
if not found_versioned:
943
# none of the indexes was not 'absent' at all ids for this
945
all_versioned = False
947
if not all_versioned:
948
raise errors.PathsNotVersionedError(paths)
949
# -- remove redundancy in supplied paths to prevent over-scanning --
950
search_paths = osutils.minimum_path_selection(paths)
952
# for all search_indexs in each path at or under each element of
953
# search_paths, if the detail is relocated: add the id, and add the
954
# relocated path as one to search if its not searched already. If the
955
# detail is not relocated, add the id.
956
searched_paths = set()
958
def _process_entry(entry):
959
"""Look at search_indexes within entry.
961
If a specific tree's details are relocated, add the relocation
962
target to search_paths if not searched already. If it is absent, do
963
nothing. Otherwise add the id to found_ids.
965
for index in search_indexes:
966
if entry[1][index][0] == 'r': # relocated
967
if not osutils.is_inside_any(searched_paths, entry[1][index][1]):
968
search_paths.add(entry[1][index][1])
969
elif entry[1][index][0] != 'a': # absent
970
found_ids.add(entry[0][2])
972
current_root = search_paths.pop()
973
searched_paths.add(current_root)
974
# process the entries for this containing directory: the rest will be
975
# found by their parents recursively.
976
root_entries = _entries_for_path(current_root)
978
# this specified path is not present at all, skip it.
980
for entry in root_entries:
981
_process_entry(entry)
982
initial_key = (current_root, '', '')
983
block_index, _ = state._find_block_index_from_key(initial_key)
984
while (block_index < len(state._dirblocks) and
985
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
986
for entry in state._dirblocks[block_index][1]:
987
_process_entry(entry)
991
def _paths2ids_using_bisect(self, paths, search_indexes,
992
require_versioned=True):
993
state = self.current_dirstate()
996
split_paths = sorted(osutils.split(p) for p in paths)
997
found = state._bisect_recursive(split_paths)
999
if require_versioned:
1000
found_dir_names = set(dir_name_id[:2] for dir_name_id in found)
1001
for dir_name in split_paths:
1002
if dir_name not in found_dir_names:
1003
raise errors.PathsNotVersionedError(paths)
1005
for dir_name_id, trees_info in found.iteritems():
1006
for index in search_indexes:
1007
if trees_info[index][0] not in ('r', 'a'):
1008
found_ids.add(dir_name_id[2])
1011
def read_working_inventory(self):
1012
"""Read the working inventory.
1014
This is a meaningless operation for dirstate, but we obey it anyhow.
1016
return self.inventory
1019
def revision_tree(self, revision_id):
1020
"""See Tree.revision_tree.
1022
WorkingTree4 supplies revision_trees for any basis tree.
1024
dirstate = self.current_dirstate()
1025
parent_ids = dirstate.get_parent_ids()
1026
if revision_id not in parent_ids:
1027
raise errors.NoSuchRevisionInTree(self, revision_id)
1028
if revision_id in dirstate.get_ghosts():
1029
raise errors.NoSuchRevisionInTree(self, revision_id)
1030
return DirStateRevisionTree(dirstate, revision_id,
1031
self.branch.repository)
1033
@needs_tree_write_lock
1034
def set_last_revision(self, new_revision):
1035
"""Change the last revision in the working tree."""
1036
parents = self.get_parent_ids()
1037
if new_revision in (NULL_REVISION, None):
1038
if len(parents) >= 2:
1039
raise AssertionError(
1040
"setting the last parent to none with a pending merge is "
1042
self.set_parent_ids([])
1044
self.set_parent_ids([new_revision] + parents[1:],
1045
allow_leftmost_as_ghost=True)
1047
@needs_tree_write_lock
1048
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
1049
"""Set the parent ids to revision_ids.
1051
See also set_parent_trees. This api will try to retrieve the tree data
1052
for each element of revision_ids from the trees repository. If you have
1053
tree data already available, it is more efficient to use
1054
set_parent_trees rather than set_parent_ids. set_parent_ids is however
1055
an easier API to use.
1057
:param revision_ids: The revision_ids to set as the parent ids of this
1058
working tree. Any of these may be ghosts.
1061
for revision_id in revision_ids:
1063
revtree = self.branch.repository.revision_tree(revision_id)
1064
# TODO: jam 20070213 KnitVersionedFile raises
1065
# RevisionNotPresent rather than NoSuchRevision if a
1066
# given revision_id is not present. Should Repository be
1067
# catching it and re-raising NoSuchRevision?
1068
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1070
trees.append((revision_id, revtree))
1071
self.set_parent_trees(trees,
1072
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
1074
@needs_tree_write_lock
1075
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
1076
"""Set the parents of the working tree.
1078
:param parents_list: A list of (revision_id, tree) tuples.
1079
If tree is None, then that element is treated as an unreachable
1080
parent tree - i.e. a ghost.
1082
dirstate = self.current_dirstate()
1083
if len(parents_list) > 0:
1084
if not allow_leftmost_as_ghost and parents_list[0][1] is None:
1085
raise errors.GhostRevisionUnusableHere(parents_list[0][0])
1089
parent_ids = [rev_id for rev_id, tree in parents_list]
1090
graph = self.branch.repository.get_graph()
1091
heads = graph.heads(parent_ids)
1092
accepted_revisions = set()
1094
# convert absent trees to the null tree, which we convert back to
1095
# missing on access.
1096
for rev_id, tree in parents_list:
1097
if len(accepted_revisions) > 0:
1098
# we always accept the first tree
1099
if rev_id in accepted_revisions or rev_id not in heads:
1100
# We have already included either this tree, or its
1101
# descendent, so we skip it.
1103
_mod_revision.check_not_reserved_id(rev_id)
1104
if tree is not None:
1105
real_trees.append((rev_id, tree))
1107
real_trees.append((rev_id,
1108
self.branch.repository.revision_tree(None)))
1109
ghosts.append(rev_id)
1110
accepted_revisions.add(rev_id)
1111
dirstate.set_parent_trees(real_trees, ghosts=ghosts)
1112
self._make_dirty(reset_inventory=False)
1114
def _set_root_id(self, file_id):
1115
"""See WorkingTree.set_root_id."""
1116
state = self.current_dirstate()
1117
state.set_path_id('', file_id)
1118
if state._dirblock_state == dirstate.DirState.IN_MEMORY_MODIFIED:
1119
self._make_dirty(reset_inventory=True)
1121
def _sha_from_stat(self, path, stat_result):
1122
"""Get a sha digest from the tree's stat cache.
1124
The default implementation assumes no stat cache is present.
1126
:param path: The path.
1127
:param stat_result: The stat result being looked up.
1129
return self.current_dirstate().sha1_from_stat(path, stat_result)
1132
def supports_tree_reference(self):
1133
return self._repo_supports_tree_reference
1136
"""Unlock in format 4 trees needs to write the entire dirstate."""
1137
# do non-implementation specific cleanup
1140
if self._control_files._lock_count == 1:
1141
# eventually we should do signature checking during read locks for
1143
if self._control_files._lock_mode == 'w':
1146
if self._dirstate is not None:
1147
# This is a no-op if there are no modifications.
1148
self._dirstate.save()
1149
self._dirstate.unlock()
1150
# TODO: jam 20070301 We shouldn't have to wipe the dirstate at this
1151
# point. Instead, it could check if the header has been
1152
# modified when it is locked, and if not, it can hang on to
1153
# the data it has in memory.
1154
self._dirstate = None
1155
self._inventory = None
1156
# reverse order of locking.
1158
return self._control_files.unlock()
1160
self.branch.unlock()
1162
@needs_tree_write_lock
1163
def unversion(self, file_ids):
1164
"""Remove the file ids in file_ids from the current versioned set.
1166
When a file_id is unversioned, all of its children are automatically
1169
:param file_ids: The file ids to stop versioning.
1170
:raises: NoSuchId if any fileid is not currently versioned.
1174
state = self.current_dirstate()
1175
state._read_dirblocks_if_needed()
1176
ids_to_unversion = set(file_ids)
1177
paths_to_unversion = set()
1179
# check if the root is to be unversioned, if so, assert for now.
1180
# walk the state marking unversioned things as absent.
1181
# if there are any un-unversioned ids at the end, raise
1182
for key, details in state._dirblocks[0][1]:
1183
if (details[0][0] not in ('a', 'r') and # absent or relocated
1184
key[2] in ids_to_unversion):
1185
# I haven't written the code to unversion / yet - it should be
1187
raise errors.BzrError('Unversioning the / is not currently supported')
1189
while block_index < len(state._dirblocks):
1190
# process one directory at a time.
1191
block = state._dirblocks[block_index]
1192
# first check: is the path one to remove - it or its children
1193
delete_block = False
1194
for path in paths_to_unversion:
1195
if (block[0].startswith(path) and
1196
(len(block[0]) == len(path) or
1197
block[0][len(path)] == '/')):
1198
# this entire block should be deleted - its the block for a
1199
# path to unversion; or the child of one
1202
# TODO: trim paths_to_unversion as we pass by paths
1204
# this block is to be deleted: process it.
1205
# TODO: we can special case the no-parents case and
1206
# just forget the whole block.
1208
while entry_index < len(block[1]):
1209
# Mark this file id as having been removed
1210
entry = block[1][entry_index]
1211
ids_to_unversion.discard(entry[0][2])
1212
if (entry[1][0][0] in 'ar' # don't remove absent or renamed
1214
or not state._make_absent(entry)):
1216
# go to the next block. (At the moment we dont delete empty
1221
while entry_index < len(block[1]):
1222
entry = block[1][entry_index]
1223
if (entry[1][0][0] in ('a', 'r') or # absent, relocated
1224
# ^ some parent row.
1225
entry[0][2] not in ids_to_unversion):
1226
# ^ not an id to unversion
1229
if entry[1][0][0] == 'd':
1230
paths_to_unversion.add(pathjoin(entry[0][0], entry[0][1]))
1231
if not state._make_absent(entry):
1233
# we have unversioned this id
1234
ids_to_unversion.remove(entry[0][2])
1236
if ids_to_unversion:
1237
raise errors.NoSuchId(self, iter(ids_to_unversion).next())
1238
self._make_dirty(reset_inventory=False)
1239
# have to change the legacy inventory too.
1240
if self._inventory is not None:
1241
for file_id in file_ids:
1242
self._inventory.remove_recursive_id(file_id)
1244
@needs_tree_write_lock
1245
def rename_one(self, from_rel, to_rel, after=False):
1246
"""See WorkingTree.rename_one"""
1248
WorkingTree.rename_one(self, from_rel, to_rel, after)
1250
@needs_tree_write_lock
1251
def apply_inventory_delta(self, changes):
1252
"""See MutableTree.apply_inventory_delta"""
1253
state = self.current_dirstate()
1254
state.update_by_delta(changes)
1255
self._make_dirty(reset_inventory=True)
1257
def update_basis_by_delta(self, new_revid, delta):
1258
"""See MutableTree.update_basis_by_delta."""
1259
if self.last_revision() == new_revid:
1260
raise AssertionError()
1261
self.current_dirstate().update_basis_by_delta(delta, new_revid)
1264
def _validate(self):
1265
self._dirstate._validate()
1267
@needs_tree_write_lock
1268
def _write_inventory(self, inv):
1269
"""Write inventory as the current inventory."""
1271
raise AssertionError("attempting to write an inventory when the "
1272
"dirstate is dirty will lose pending changes")
1273
self.current_dirstate().set_state_from_inventory(inv)
1274
self._make_dirty(reset_inventory=False)
1275
if self._inventory is not None:
1276
self._inventory = inv
1280
class WorkingTreeFormat4(WorkingTreeFormat3):
1281
"""The first consolidated dirstate working tree format.
1284
- exists within a metadir controlling .bzr
1285
- includes an explicit version marker for the workingtree control
1286
files, separate from the BzrDir format
1287
- modifies the hash cache format
1288
- is new in bzr 0.15
1289
- uses a LockDir to guard access to it.
1292
upgrade_recommended = False
1294
def get_format_string(self):
1295
"""See WorkingTreeFormat.get_format_string()."""
1296
return "Bazaar Working Tree Format 4 (bzr 0.15)\n"
1298
def get_format_description(self):
1299
"""See WorkingTreeFormat.get_format_description()."""
1300
return "Working tree format 4"
1302
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
1303
accelerator_tree=None, hardlink=False):
1304
"""See WorkingTreeFormat.initialize().
1306
:param revision_id: allows creating a working tree at a different
1307
revision than the branch is at.
1308
:param accelerator_tree: A tree which can be used for retrieving file
1309
contents more quickly than the revision tree, i.e. a workingtree.
1310
The revision tree will be used for cases where accelerator_tree's
1311
content is different.
1312
:param hardlink: If true, hard-link files from accelerator_tree,
1315
These trees get an initial random root id, if their repository supports
1316
rich root data, TREE_ROOT otherwise.
1318
if not isinstance(a_bzrdir.transport, LocalTransport):
1319
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1320
transport = a_bzrdir.get_workingtree_transport(self)
1321
control_files = self._open_control_files(a_bzrdir)
1322
control_files.create_lock()
1323
control_files.lock_write()
1324
transport.put_bytes('format', self.get_format_string(),
1325
mode=a_bzrdir._get_file_mode())
1326
if from_branch is not None:
1327
branch = from_branch
1329
branch = a_bzrdir.open_branch()
1330
if revision_id is None:
1331
revision_id = branch.last_revision()
1332
local_path = transport.local_abspath('dirstate')
1333
# write out new dirstate (must exist when we create the tree)
1334
state = dirstate.DirState.initialize(local_path)
1337
wt = WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1341
_control_files=control_files)
1343
wt.lock_tree_write()
1345
if revision_id in (None, NULL_REVISION):
1346
if branch.repository.supports_rich_root():
1347
wt._set_root_id(generate_ids.gen_root_id())
1349
wt._set_root_id(ROOT_ID)
1352
# frequently, we will get here due to branching. The accelerator
1353
# tree will be the tree from the branch, so the desired basis
1354
# tree will often be a parent of the accelerator tree.
1355
if accelerator_tree is not None:
1357
basis = accelerator_tree.revision_tree(revision_id)
1358
except errors.NoSuchRevision:
1361
basis = branch.repository.revision_tree(revision_id)
1362
if revision_id == NULL_REVISION:
1365
parents_list = [(revision_id, basis)]
1368
wt.set_parent_trees(parents_list, allow_leftmost_as_ghost=True)
1370
# if the basis has a root id we have to use that; otherwise we
1371
# use a new random one
1372
basis_root_id = basis.get_root_id()
1373
if basis_root_id is not None:
1374
wt._set_root_id(basis_root_id)
1376
# delta_from_tree is safe even for DirStateRevisionTrees,
1377
# because wt4.apply_inventory_delta does not mutate the input
1378
# inventory entries.
1379
transform.build_tree(basis, wt, accelerator_tree,
1380
hardlink=hardlink, delta_from_tree=True)
1384
control_files.unlock()
1388
def _open(self, a_bzrdir, control_files):
1389
"""Open the tree itself.
1391
:param a_bzrdir: the dir for the tree.
1392
:param control_files: the control files for the tree.
1394
return WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1395
branch=a_bzrdir.open_branch(),
1398
_control_files=control_files)
1400
def __get_matchingbzrdir(self):
1401
# please test against something that will let us do tree references
1402
return bzrdir.format_registry.make_bzrdir(
1403
'dirstate-with-subtree')
1405
_matchingbzrdir = property(__get_matchingbzrdir)
1408
class DirStateRevisionTree(Tree):
1409
"""A revision tree pulling the inventory from a dirstate."""
1411
def __init__(self, dirstate, revision_id, repository):
1412
self._dirstate = dirstate
1413
self._revision_id = revision_id
1414
self._repository = repository
1415
self._inventory = None
1417
self._dirstate_locked = False
1420
return "<%s of %s in %s>" % \
1421
(self.__class__.__name__, self._revision_id, self._dirstate)
1423
def annotate_iter(self, file_id,
1424
default_revision=_mod_revision.CURRENT_REVISION):
1425
"""See Tree.annotate_iter"""
1426
w = self._get_weave(file_id)
1427
return w.annotate(self.inventory[file_id].revision)
1429
def _get_ancestors(self, default_revision):
1430
return set(self._repository.get_ancestry(self._revision_id,
1432
def _comparison_data(self, entry, path):
1433
"""See Tree._comparison_data."""
1435
return None, False, None
1436
# trust the entry as RevisionTree does, but this may not be
1437
# sensible: the entry might not have come from us?
1438
return entry.kind, entry.executable, None
1440
def _file_size(self, entry, stat_value):
1441
return entry.text_size
1443
def filter_unversioned_files(self, paths):
1444
"""Filter out paths that are not versioned.
1446
:return: set of paths.
1448
pred = self.has_filename
1449
return set((p for p in paths if not pred(p)))
1451
def get_root_id(self):
1452
return self.path2id('')
1454
def id2path(self, file_id):
1455
"Convert a file-id to a path."
1456
entry = self._get_entry(file_id=file_id)
1457
if entry == (None, None):
1458
raise errors.NoSuchId(tree=self, file_id=file_id)
1459
path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
1460
return path_utf8.decode('utf8')
1462
def _get_parent_index(self):
1463
"""Return the index in the dirstate referenced by this tree."""
1464
return self._dirstate.get_parent_ids().index(self._revision_id) + 1
1466
def _get_entry(self, file_id=None, path=None):
1467
"""Get the dirstate row for file_id or path.
1469
If either file_id or path is supplied, it is used as the key to lookup.
1470
If both are supplied, the fastest lookup is used, and an error is
1471
raised if they do not both point at the same row.
1473
:param file_id: An optional unicode file_id to be looked up.
1474
:param path: An optional unicode path to be looked up.
1475
:return: The dirstate row tuple for path/file_id, or (None, None)
1477
if file_id is None and path is None:
1478
raise errors.BzrError('must supply file_id or path')
1479
if path is not None:
1480
path = path.encode('utf8')
1481
parent_index = self._get_parent_index()
1482
return self._dirstate._get_entry(parent_index, fileid_utf8=file_id, path_utf8=path)
1484
def _generate_inventory(self):
1485
"""Create and set self.inventory from the dirstate object.
1487
(So this is only called the first time the inventory is requested for
1488
this tree; it then remains in memory until it's out of date.)
1490
This is relatively expensive: we have to walk the entire dirstate.
1492
if not self._locked:
1493
raise AssertionError(
1494
'cannot generate inventory of an unlocked '
1495
'dirstate revision tree')
1496
# separate call for profiling - makes it clear where the costs are.
1497
self._dirstate._read_dirblocks_if_needed()
1498
if self._revision_id not in self._dirstate.get_parent_ids():
1499
raise AssertionError(
1500
'parent %s has disappeared from %s' % (
1501
self._revision_id, self._dirstate.get_parent_ids()))
1502
parent_index = self._dirstate.get_parent_ids().index(self._revision_id) + 1
1503
# This is identical now to the WorkingTree _generate_inventory except
1504
# for the tree index use.
1505
root_key, current_entry = self._dirstate._get_entry(parent_index, path_utf8='')
1506
current_id = root_key[2]
1507
if current_entry[parent_index][0] != 'd':
1508
raise AssertionError()
1509
inv = Inventory(root_id=current_id, revision_id=self._revision_id)
1510
inv.root.revision = current_entry[parent_index][4]
1511
# Turn some things into local variables
1512
minikind_to_kind = dirstate.DirState._minikind_to_kind
1513
factory = entry_factory
1514
utf8_decode = cache_utf8._utf8_decode
1515
inv_byid = inv._byid
1516
# we could do this straight out of the dirstate; it might be fast
1517
# and should be profiled - RBC 20070216
1518
parent_ies = {'' : inv.root}
1519
for block in self._dirstate._dirblocks[1:]: #skip root
1522
parent_ie = parent_ies[dirname]
1524
# all the paths in this block are not versioned in this tree
1526
for key, entry in block[1]:
1527
minikind, fingerprint, size, executable, revid = entry[parent_index]
1528
if minikind in ('a', 'r'): # absent, relocated
1532
name_unicode = utf8_decode(name)[0]
1534
kind = minikind_to_kind[minikind]
1535
inv_entry = factory[kind](file_id, name_unicode,
1537
inv_entry.revision = revid
1539
inv_entry.executable = executable
1540
inv_entry.text_size = size
1541
inv_entry.text_sha1 = fingerprint
1542
elif kind == 'directory':
1543
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
1544
elif kind == 'symlink':
1545
inv_entry.executable = False
1546
inv_entry.text_size = None
1547
inv_entry.symlink_target = utf8_decode(fingerprint)[0]
1548
elif kind == 'tree-reference':
1549
inv_entry.reference_revision = fingerprint or None
1551
raise AssertionError("cannot convert entry %r into an InventoryEntry"
1553
# These checks cost us around 40ms on a 55k entry tree
1554
if file_id in inv_byid:
1555
raise AssertionError('file_id %s already in'
1556
' inventory as %s' % (file_id, inv_byid[file_id]))
1557
if name_unicode in parent_ie.children:
1558
raise AssertionError('name %r already in parent'
1560
inv_byid[file_id] = inv_entry
1561
parent_ie.children[name_unicode] = inv_entry
1562
self._inventory = inv
1564
def get_file_mtime(self, file_id, path=None):
1565
"""Return the modification time for this record.
1567
We return the timestamp of the last-changed revision.
1569
# Make sure the file exists
1570
entry = self._get_entry(file_id, path=path)
1571
if entry == (None, None): # do we raise?
1573
parent_index = self._get_parent_index()
1574
last_changed_revision = entry[1][parent_index][4]
1575
return self._repository.get_revision(last_changed_revision).timestamp
1577
def get_file_sha1(self, file_id, path=None, stat_value=None):
1578
entry = self._get_entry(file_id=file_id, path=path)
1579
parent_index = self._get_parent_index()
1580
parent_details = entry[1][parent_index]
1581
if parent_details[0] == 'f':
1582
return parent_details[1]
1585
def _get_weave(self, file_id):
1586
return self._repository.weave_store.get_weave(file_id,
1587
self._repository.get_transaction())
1589
def get_file(self, file_id, path=None):
1590
return StringIO(self.get_file_text(file_id))
1592
def get_file_lines(self, file_id):
1593
entry = self._get_entry(file_id=file_id)[1]
1595
raise errors.NoSuchId(tree=self, file_id=file_id)
1596
return self._get_weave(file_id).get_lines(entry[1][4])
1598
def get_file_size(self, file_id):
1599
"""See Tree.get_file_size"""
1600
return self.inventory[file_id].text_size
1602
def get_file_text(self, file_id):
1603
return ''.join(self.get_file_lines(file_id))
1605
def get_reference_revision(self, file_id, path=None):
1606
return self.inventory[file_id].reference_revision
1608
def iter_files_bytes(self, desired_files):
1609
"""See Tree.iter_files_bytes.
1611
This version is implemented on top of Repository.iter_files_bytes"""
1612
parent_index = self._get_parent_index()
1613
repo_desired_files = []
1614
for file_id, identifier in desired_files:
1615
entry = self._get_entry(file_id)
1616
if entry == (None, None):
1617
raise errors.NoSuchId(self, file_id)
1618
repo_desired_files.append((file_id, entry[1][parent_index][4],
1620
return self._repository.iter_files_bytes(repo_desired_files)
1622
def get_symlink_target(self, file_id):
1623
entry = self._get_entry(file_id=file_id)
1624
parent_index = self._get_parent_index()
1625
if entry[1][parent_index][0] != 'l':
1628
# At present, none of the tree implementations supports non-ascii
1629
# symlink targets. So we will just assume that the dirstate path is
1631
return entry[1][parent_index][1]
1633
def get_revision_id(self):
1634
"""Return the revision id for this tree."""
1635
return self._revision_id
1637
def _get_inventory(self):
1638
if self._inventory is not None:
1639
return self._inventory
1640
self._must_be_locked()
1641
self._generate_inventory()
1642
return self._inventory
1644
inventory = property(_get_inventory,
1645
doc="Inventory of this Tree")
1647
def get_parent_ids(self):
1648
"""The parents of a tree in the dirstate are not cached."""
1649
return self._repository.get_revision(self._revision_id).parent_ids
1651
def has_filename(self, filename):
1652
return bool(self.path2id(filename))
1654
def kind(self, file_id):
1655
entry = self._get_entry(file_id=file_id)[1]
1657
raise errors.NoSuchId(tree=self, file_id=file_id)
1658
return dirstate.DirState._minikind_to_kind[entry[1][0]]
1660
def stored_kind(self, file_id):
1661
"""See Tree.stored_kind"""
1662
return self.kind(file_id)
1664
def path_content_summary(self, path):
1665
"""See Tree.path_content_summary."""
1666
id = self.inventory.path2id(path)
1668
return ('missing', None, None, None)
1669
entry = self._inventory[id]
1672
return (kind, entry.text_size, entry.executable, entry.text_sha1)
1673
elif kind == 'symlink':
1674
return (kind, None, None, entry.symlink_target)
1676
return (kind, None, None, None)
1678
def is_executable(self, file_id, path=None):
1679
ie = self.inventory[file_id]
1680
if ie.kind != "file":
1682
return ie.executable
1684
def list_files(self, include_root=False):
1685
# We use a standard implementation, because DirStateRevisionTree is
1686
# dealing with one of the parents of the current state
1687
inv = self._get_inventory()
1688
entries = inv.iter_entries()
1689
if self.inventory.root is not None and not include_root:
1691
for path, entry in entries:
1692
yield path, 'V', entry.kind, entry.file_id, entry
1694
def lock_read(self):
1695
"""Lock the tree for a set of operations."""
1696
if not self._locked:
1697
self._repository.lock_read()
1698
if self._dirstate._lock_token is None:
1699
self._dirstate.lock_read()
1700
self._dirstate_locked = True
1703
def _must_be_locked(self):
1704
if not self._locked:
1705
raise errors.ObjectNotLocked(self)
1708
def path2id(self, path):
1709
"""Return the id for path in this tree."""
1710
# lookup by path: faster than splitting and walking the ivnentory.
1711
entry = self._get_entry(path=path)
1712
if entry == (None, None):
1717
"""Unlock, freeing any cache memory used during the lock."""
1718
# outside of a lock, the inventory is suspect: release it.
1720
if not self._locked:
1721
self._inventory = None
1723
if self._dirstate_locked:
1724
self._dirstate.unlock()
1725
self._dirstate_locked = False
1726
self._repository.unlock()
1728
def walkdirs(self, prefix=""):
1729
# TODO: jam 20070215 This is the lazy way by using the RevisionTree
1730
# implementation based on an inventory.
1731
# This should be cleaned up to use the much faster Dirstate code
1732
# So for now, we just build up the parent inventory, and extract
1733
# it the same way RevisionTree does.
1734
_directory = 'directory'
1735
inv = self._get_inventory()
1736
top_id = inv.path2id(prefix)
1740
pending = [(prefix, top_id)]
1743
relpath, file_id = pending.pop()
1744
# 0 - relpath, 1- file-id
1746
relroot = relpath + '/'
1749
# FIXME: stash the node in pending
1750
entry = inv[file_id]
1751
for name, child in entry.sorted_children():
1752
toppath = relroot + name
1753
dirblock.append((toppath, name, child.kind, None,
1754
child.file_id, child.kind
1756
yield (relpath, entry.file_id), dirblock
1757
# push the user specified dirs from dirblock
1758
for dir in reversed(dirblock):
1759
if dir[2] == _directory:
1760
pending.append((dir[0], dir[4]))
1763
class InterDirStateTree(InterTree):
1764
"""Fast path optimiser for changes_from with dirstate trees.
1766
This is used only when both trees are in the dirstate working file, and
1767
the source is any parent within the dirstate, and the destination is
1768
the current working tree of the same dirstate.
1770
# this could be generalized to allow comparisons between any trees in the
1771
# dirstate, and possibly between trees stored in different dirstates.
1773
def __init__(self, source, target):
1774
super(InterDirStateTree, self).__init__(source, target)
1775
if not InterDirStateTree.is_compatible(source, target):
1776
raise Exception, "invalid source %r and target %r" % (source, target)
1779
def make_source_parent_tree(source, target):
1780
"""Change the source tree into a parent of the target."""
1781
revid = source.commit('record tree')
1782
target.branch.repository.fetch(source.branch.repository, revid)
1783
target.set_parent_ids([revid])
1784
return target.basis_tree(), target
1786
_matching_from_tree_format = WorkingTreeFormat4()
1787
_matching_to_tree_format = WorkingTreeFormat4()
1788
_test_mutable_trees_to_test_trees = make_source_parent_tree
1790
def iter_changes(self, include_unchanged=False,
1791
specific_files=None, pb=None, extra_trees=[],
1792
require_versioned=True, want_unversioned=False):
1793
"""Return the changes from source to target.
1795
:return: An iterator that yields tuples. See InterTree.iter_changes
1797
:param specific_files: An optional list of file paths to restrict the
1798
comparison to. When mapping filenames to ids, all matches in all
1799
trees (including optional extra_trees) are used, and all children of
1800
matched directories are included.
1801
:param include_unchanged: An optional boolean requesting the inclusion of
1802
unchanged entries in the result.
1803
:param extra_trees: An optional list of additional trees to use when
1804
mapping the contents of specific_files (paths) to file_ids.
1805
:param require_versioned: If True, all files in specific_files must be
1806
versioned in one of source, target, extra_trees or
1807
PathsNotVersionedError is raised.
1808
:param want_unversioned: Should unversioned files be returned in the
1809
output. An unversioned file is defined as one with (False, False)
1810
for the versioned pair.
1812
utf8_decode = cache_utf8._utf8_decode
1813
_minikind_to_kind = dirstate.DirState._minikind_to_kind
1814
cmp_by_dirs = dirstate.cmp_by_dirs
1815
# NB: show_status depends on being able to pass in non-versioned files
1816
# and report them as unknown
1817
# TODO: handle extra trees in the dirstate.
1818
if (extra_trees or specific_files == []):
1819
# we can't fast-path these cases (yet)
1820
for f in super(InterDirStateTree, self).iter_changes(
1821
include_unchanged, specific_files, pb, extra_trees,
1822
require_versioned, want_unversioned=want_unversioned):
1825
parent_ids = self.target.get_parent_ids()
1826
if not (self.source._revision_id in parent_ids
1827
or self.source._revision_id == NULL_REVISION):
1828
raise AssertionError(
1829
"revision {%s} is not stored in {%s}, but %s "
1830
"can only be used for trees stored in the dirstate"
1831
% (self.source._revision_id, self.target, self.iter_changes))
1833
if self.source._revision_id == NULL_REVISION:
1835
indices = (target_index,)
1837
if not (self.source._revision_id in parent_ids):
1838
raise AssertionError(
1839
"Failure: source._revision_id: %s not in target.parent_ids(%s)" % (
1840
self.source._revision_id, parent_ids))
1841
source_index = 1 + parent_ids.index(self.source._revision_id)
1842
indices = (source_index, target_index)
1843
# -- make all specific_files utf8 --
1845
specific_files_utf8 = set()
1846
for path in specific_files:
1847
specific_files_utf8.add(path.encode('utf8'))
1848
specific_files = specific_files_utf8
1850
specific_files = set([''])
1851
# -- specific_files is now a utf8 path set --
1852
# -- get the state object and prepare it.
1853
state = self.target.current_dirstate()
1854
state._read_dirblocks_if_needed()
1855
def _entries_for_path(path):
1856
"""Return a list with all the entries that match path for all ids.
1858
dirname, basename = os.path.split(path)
1859
key = (dirname, basename, '')
1860
block_index, present = state._find_block_index_from_key(key)
1862
# the block which should contain path is absent.
1865
block = state._dirblocks[block_index][1]
1866
entry_index, _ = state._find_entry_index(key, block)
1867
# we may need to look at multiple entries at this path: walk while the specific_files match.
1868
while (entry_index < len(block) and
1869
block[entry_index][0][0:2] == key[0:2]):
1870
result.append(block[entry_index])
1873
if require_versioned:
1874
# -- check all supplied paths are versioned in a search tree. --
1875
all_versioned = True
1876
for path in specific_files:
1877
path_entries = _entries_for_path(path)
1878
if not path_entries:
1879
# this specified path is not present at all: error
1880
all_versioned = False
1882
found_versioned = False
1883
# for each id at this path
1884
for entry in path_entries:
1886
for index in indices:
1887
if entry[1][index][0] != 'a': # absent
1888
found_versioned = True
1889
# all good: found a versioned cell
1891
if not found_versioned:
1892
# none of the indexes was not 'absent' at all ids for this
1894
all_versioned = False
1896
if not all_versioned:
1897
raise errors.PathsNotVersionedError(specific_files)
1898
# -- remove redundancy in supplied specific_files to prevent over-scanning --
1899
search_specific_files = set()
1900
for path in specific_files:
1901
other_specific_files = specific_files.difference(set([path]))
1902
if not osutils.is_inside_any(other_specific_files, path):
1903
# this is a top level path, we must check it.
1904
search_specific_files.add(path)
1906
# compare source_index and target_index at or under each element of search_specific_files.
1907
# follow the following comparison table. Note that we only want to do diff operations when
1908
# the target is fdl because thats when the walkdirs logic will have exposed the pathinfo
1912
# Source | Target | disk | action
1913
# r | fdlt | | add source to search, add id path move and perform
1914
# | | | diff check on source-target
1915
# r | fdlt | a | dangling file that was present in the basis.
1917
# r | a | | add source to search
1919
# r | r | | this path is present in a non-examined tree, skip.
1920
# r | r | a | this path is present in a non-examined tree, skip.
1921
# a | fdlt | | add new id
1922
# a | fdlt | a | dangling locally added file, skip
1923
# a | a | | not present in either tree, skip
1924
# a | a | a | not present in any tree, skip
1925
# a | r | | not present in either tree at this path, skip as it
1926
# | | | may not be selected by the users list of paths.
1927
# a | r | a | not present in either tree at this path, skip as it
1928
# | | | may not be selected by the users list of paths.
1929
# fdlt | fdlt | | content in both: diff them
1930
# fdlt | fdlt | a | deleted locally, but not unversioned - show as deleted ?
1931
# fdlt | a | | unversioned: output deleted id for now
1932
# fdlt | a | a | unversioned and deleted: output deleted id
1933
# fdlt | r | | relocated in this tree, so add target to search.
1934
# | | | Dont diff, we will see an r,fd; pair when we reach
1935
# | | | this id at the other path.
1936
# fdlt | r | a | relocated in this tree, so add target to search.
1937
# | | | Dont diff, we will see an r,fd; pair when we reach
1938
# | | | this id at the other path.
1940
# for all search_indexs in each path at or under each element of
1941
# search_specific_files, if the detail is relocated: add the id, and add the
1942
# relocated path as one to search if its not searched already. If the
1943
# detail is not relocated, add the id.
1944
searched_specific_files = set()
1945
NULL_PARENT_DETAILS = dirstate.DirState.NULL_PARENT_DETAILS
1946
# Using a list so that we can access the values and change them in
1947
# nested scope. Each one is [path, file_id, entry]
1948
last_source_parent = [None, None]
1949
last_target_parent = [None, None]
1951
use_filesystem_for_exec = (sys.platform != 'win32')
1953
# Just a sentry, so that _process_entry can say that this
1954
# record is handled, but isn't interesting to process (unchanged)
1955
uninteresting = object()
1958
old_dirname_to_file_id = {}
1959
new_dirname_to_file_id = {}
1960
# TODO: jam 20070516 - Avoid the _get_entry lookup overhead by
1961
# keeping a cache of directories that we have seen.
1963
def _process_entry(entry, path_info):
1964
"""Compare an entry and real disk to generate delta information.
1966
:param path_info: top_relpath, basename, kind, lstat, abspath for
1967
the path of entry. If None, then the path is considered absent.
1968
(Perhaps we should pass in a concrete entry for this ?)
1969
Basename is returned as a utf8 string because we expect this
1970
tuple will be ignored, and don't want to take the time to
1972
:return: None if these don't match
1973
A tuple of information about the change, or
1974
the object 'uninteresting' if these match, but are
1975
basically identical.
1977
if source_index is None:
1978
source_details = NULL_PARENT_DETAILS
1980
source_details = entry[1][source_index]
1981
target_details = entry[1][target_index]
1982
target_minikind = target_details[0]
1983
if path_info is not None and target_minikind in 'fdlt':
1984
if not (target_index == 0):
1985
raise AssertionError()
1986
link_or_sha1 = state.update_entry(entry, abspath=path_info[4],
1987
stat_value=path_info[3])
1988
# The entry may have been modified by update_entry
1989
target_details = entry[1][target_index]
1990
target_minikind = target_details[0]
1993
file_id = entry[0][2]
1994
source_minikind = source_details[0]
1995
if source_minikind in 'fdltr' and target_minikind in 'fdlt':
1996
# claimed content in both: diff
1997
# r | fdlt | | add source to search, add id path move and perform
1998
# | | | diff check on source-target
1999
# r | fdlt | a | dangling file that was present in the basis.
2001
if source_minikind in 'r':
2002
# add the source to the search path to find any children it
2003
# has. TODO ? : only add if it is a container ?
2004
if not osutils.is_inside_any(searched_specific_files,
2006
search_specific_files.add(source_details[1])
2007
# generate the old path; this is needed for stating later
2009
old_path = source_details[1]
2010
old_dirname, old_basename = os.path.split(old_path)
2011
path = pathjoin(entry[0][0], entry[0][1])
2012
old_entry = state._get_entry(source_index,
2014
# update the source details variable to be the real
2016
if old_entry == (None, None):
2017
raise errors.CorruptDirstate(state._filename,
2018
"entry '%s/%s' is considered renamed from %r"
2019
" but source does not exist\n"
2020
"entry: %s" % (entry[0][0], entry[0][1], old_path, entry))
2021
source_details = old_entry[1][source_index]
2022
source_minikind = source_details[0]
2024
old_dirname = entry[0][0]
2025
old_basename = entry[0][1]
2026
old_path = path = None
2027
if path_info is None:
2028
# the file is missing on disk, show as removed.
2029
content_change = True
2033
# source and target are both versioned and disk file is present.
2034
target_kind = path_info[2]
2035
if target_kind == 'directory':
2037
old_path = path = pathjoin(old_dirname, old_basename)
2038
new_dirname_to_file_id[path] = file_id
2039
if source_minikind != 'd':
2040
content_change = True
2042
# directories have no fingerprint
2043
content_change = False
2045
elif target_kind == 'file':
2046
if source_minikind != 'f':
2047
content_change = True
2049
# We could check the size, but we already have the
2051
content_change = (link_or_sha1 != source_details[1])
2052
# Target details is updated at update_entry time
2053
if use_filesystem_for_exec:
2054
# We don't need S_ISREG here, because we are sure
2055
# we are dealing with a file.
2056
target_exec = bool(stat.S_IEXEC & path_info[3].st_mode)
2058
target_exec = target_details[3]
2059
elif target_kind == 'symlink':
2060
if source_minikind != 'l':
2061
content_change = True
2063
content_change = (link_or_sha1 != source_details[1])
2065
elif target_kind == 'tree-reference':
2066
if source_minikind != 't':
2067
content_change = True
2069
content_change = False
2072
raise Exception, "unknown kind %s" % path_info[2]
2073
if source_minikind == 'd':
2075
old_path = path = pathjoin(old_dirname, old_basename)
2076
old_dirname_to_file_id[old_path] = file_id
2077
# parent id is the entry for the path in the target tree
2078
if old_dirname == last_source_parent[0]:
2079
source_parent_id = last_source_parent[1]
2082
source_parent_id = old_dirname_to_file_id[old_dirname]
2084
source_parent_entry = state._get_entry(source_index,
2085
path_utf8=old_dirname)
2086
source_parent_id = source_parent_entry[0][2]
2087
if source_parent_id == entry[0][2]:
2088
# This is the root, so the parent is None
2089
source_parent_id = None
2091
last_source_parent[0] = old_dirname
2092
last_source_parent[1] = source_parent_id
2093
new_dirname = entry[0][0]
2094
if new_dirname == last_target_parent[0]:
2095
target_parent_id = last_target_parent[1]
2098
target_parent_id = new_dirname_to_file_id[new_dirname]
2100
# TODO: We don't always need to do the lookup, because the
2101
# parent entry will be the same as the source entry.
2102
target_parent_entry = state._get_entry(target_index,
2103
path_utf8=new_dirname)
2104
if target_parent_entry == (None, None):
2105
raise AssertionError(
2106
"Could not find target parent in wt: %s\nparent of: %s"
2107
% (new_dirname, entry))
2108
target_parent_id = target_parent_entry[0][2]
2109
if target_parent_id == entry[0][2]:
2110
# This is the root, so the parent is None
2111
target_parent_id = None
2113
last_target_parent[0] = new_dirname
2114
last_target_parent[1] = target_parent_id
2116
source_exec = source_details[3]
2117
if (include_unchanged
2119
or source_parent_id != target_parent_id
2120
or old_basename != entry[0][1]
2121
or source_exec != target_exec
2123
if old_path is None:
2124
old_path = path = pathjoin(old_dirname, old_basename)
2125
old_path_u = utf8_decode(old_path)[0]
2128
old_path_u = utf8_decode(old_path)[0]
2129
if old_path == path:
2132
path_u = utf8_decode(path)[0]
2133
source_kind = _minikind_to_kind[source_minikind]
2134
return (entry[0][2],
2135
(old_path_u, path_u),
2138
(source_parent_id, target_parent_id),
2139
(utf8_decode(old_basename)[0], utf8_decode(entry[0][1])[0]),
2140
(source_kind, target_kind),
2141
(source_exec, target_exec))
2143
return uninteresting
2144
elif source_minikind in 'a' and target_minikind in 'fdlt':
2145
# looks like a new file
2146
if path_info is not None:
2147
path = pathjoin(entry[0][0], entry[0][1])
2148
# parent id is the entry for the path in the target tree
2149
# TODO: these are the same for an entire directory: cache em.
2150
parent_id = state._get_entry(target_index,
2151
path_utf8=entry[0][0])[0][2]
2152
if parent_id == entry[0][2]:
2154
if use_filesystem_for_exec:
2155
# We need S_ISREG here, because we aren't sure if this
2158
stat.S_ISREG(path_info[3].st_mode)
2159
and stat.S_IEXEC & path_info[3].st_mode)
2161
target_exec = target_details[3]
2162
return (entry[0][2],
2163
(None, utf8_decode(path)[0]),
2167
(None, utf8_decode(entry[0][1])[0]),
2168
(None, path_info[2]),
2169
(None, target_exec))
2171
# but its not on disk: we deliberately treat this as just
2172
# never-present. (Why ?! - RBC 20070224)
2174
elif source_minikind in 'fdlt' and target_minikind in 'a':
2175
# unversioned, possibly, or possibly not deleted: we dont care.
2176
# if its still on disk, *and* theres no other entry at this
2177
# path [we dont know this in this routine at the moment -
2178
# perhaps we should change this - then it would be an unknown.
2179
old_path = pathjoin(entry[0][0], entry[0][1])
2180
# parent id is the entry for the path in the target tree
2181
parent_id = state._get_entry(source_index, path_utf8=entry[0][0])[0][2]
2182
if parent_id == entry[0][2]:
2184
return (entry[0][2],
2185
(utf8_decode(old_path)[0], None),
2189
(utf8_decode(entry[0][1])[0], None),
2190
(_minikind_to_kind[source_minikind], None),
2191
(source_details[3], None))
2192
elif source_minikind in 'fdlt' and target_minikind in 'r':
2193
# a rename; could be a true rename, or a rename inherited from
2194
# a renamed parent. TODO: handle this efficiently. Its not
2195
# common case to rename dirs though, so a correct but slow
2196
# implementation will do.
2197
if not osutils.is_inside_any(searched_specific_files, target_details[1]):
2198
search_specific_files.add(target_details[1])
2199
elif source_minikind in 'ra' and target_minikind in 'ra':
2200
# neither of the selected trees contain this file,
2201
# so skip over it. This is not currently directly tested, but
2202
# is indirectly via test_too_much.TestCommands.test_conflicts.
2205
raise AssertionError("don't know how to compare "
2206
"source_minikind=%r, target_minikind=%r"
2207
% (source_minikind, target_minikind))
2208
## import pdb;pdb.set_trace()
2211
while search_specific_files:
2212
# TODO: the pending list should be lexically sorted? the
2213
# interface doesn't require it.
2214
current_root = search_specific_files.pop()
2215
current_root_unicode = current_root.decode('utf8')
2216
searched_specific_files.add(current_root)
2217
# process the entries for this containing directory: the rest will be
2218
# found by their parents recursively.
2219
root_entries = _entries_for_path(current_root)
2220
root_abspath = self.target.abspath(current_root_unicode)
2222
root_stat = os.lstat(root_abspath)
2224
if e.errno == errno.ENOENT:
2225
# the path does not exist: let _process_entry know that.
2226
root_dir_info = None
2228
# some other random error: hand it up.
2231
root_dir_info = ('', current_root,
2232
osutils.file_kind_from_stat_mode(root_stat.st_mode), root_stat,
2234
if root_dir_info[2] == 'directory':
2235
if self.target._directory_is_tree_reference(
2236
current_root.decode('utf8')):
2237
root_dir_info = root_dir_info[:2] + \
2238
('tree-reference',) + root_dir_info[3:]
2240
if not root_entries and not root_dir_info:
2241
# this specified path is not present at all, skip it.
2243
path_handled = False
2244
for entry in root_entries:
2245
result = _process_entry(entry, root_dir_info)
2246
if result is not None:
2248
if result is not uninteresting:
2250
if want_unversioned and not path_handled and root_dir_info:
2251
new_executable = bool(
2252
stat.S_ISREG(root_dir_info[3].st_mode)
2253
and stat.S_IEXEC & root_dir_info[3].st_mode)
2255
(None, current_root_unicode),
2259
(None, splitpath(current_root_unicode)[-1]),
2260
(None, root_dir_info[2]),
2261
(None, new_executable)
2263
initial_key = (current_root, '', '')
2264
block_index, _ = state._find_block_index_from_key(initial_key)
2265
if block_index == 0:
2266
# we have processed the total root already, but because the
2267
# initial key matched it we should skip it here.
2269
if root_dir_info and root_dir_info[2] == 'tree-reference':
2270
current_dir_info = None
2272
dir_iterator = osutils._walkdirs_utf8(root_abspath, prefix=current_root)
2274
current_dir_info = dir_iterator.next()
2276
# on win32, python2.4 has e.errno == ERROR_DIRECTORY, but
2277
# python 2.5 has e.errno == EINVAL,
2278
# and e.winerror == ERROR_DIRECTORY
2279
e_winerror = getattr(e, 'winerror', None)
2280
win_errors = (ERROR_DIRECTORY, ERROR_PATH_NOT_FOUND)
2281
# there may be directories in the inventory even though
2282
# this path is not a file on disk: so mark it as end of
2284
if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
2285
current_dir_info = None
2286
elif (sys.platform == 'win32'
2287
and (e.errno in win_errors
2288
or e_winerror in win_errors)):
2289
current_dir_info = None
2293
if current_dir_info[0][0] == '':
2294
# remove .bzr from iteration
2295
bzr_index = bisect_left(current_dir_info[1], ('.bzr',))
2296
if current_dir_info[1][bzr_index][0] != '.bzr':
2297
raise AssertionError()
2298
del current_dir_info[1][bzr_index]
2299
# walk until both the directory listing and the versioned metadata
2301
if (block_index < len(state._dirblocks) and
2302
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2303
current_block = state._dirblocks[block_index]
2305
current_block = None
2306
while (current_dir_info is not None or
2307
current_block is not None):
2308
if (current_dir_info and current_block
2309
and current_dir_info[0][0] != current_block[0]):
2310
if cmp_by_dirs(current_dir_info[0][0], current_block[0]) < 0:
2311
# filesystem data refers to paths not covered by the dirblock.
2312
# this has two possibilities:
2313
# A) it is versioned but empty, so there is no block for it
2314
# B) it is not versioned.
2316
# if (A) then we need to recurse into it to check for
2317
# new unknown files or directories.
2318
# if (B) then we should ignore it, because we don't
2319
# recurse into unknown directories.
2321
while path_index < len(current_dir_info[1]):
2322
current_path_info = current_dir_info[1][path_index]
2323
if want_unversioned:
2324
if current_path_info[2] == 'directory':
2325
if self.target._directory_is_tree_reference(
2326
current_path_info[0].decode('utf8')):
2327
current_path_info = current_path_info[:2] + \
2328
('tree-reference',) + current_path_info[3:]
2329
new_executable = bool(
2330
stat.S_ISREG(current_path_info[3].st_mode)
2331
and stat.S_IEXEC & current_path_info[3].st_mode)
2333
(None, utf8_decode(current_path_info[0])[0]),
2337
(None, utf8_decode(current_path_info[1])[0]),
2338
(None, current_path_info[2]),
2339
(None, new_executable))
2340
# dont descend into this unversioned path if it is
2342
if current_path_info[2] in ('directory',
2344
del current_dir_info[1][path_index]
2348
# This dir info has been handled, go to the next
2350
current_dir_info = dir_iterator.next()
2351
except StopIteration:
2352
current_dir_info = None
2354
# We have a dirblock entry for this location, but there
2355
# is no filesystem path for this. This is most likely
2356
# because a directory was removed from the disk.
2357
# We don't have to report the missing directory,
2358
# because that should have already been handled, but we
2359
# need to handle all of the files that are contained
2361
for current_entry in current_block[1]:
2362
# entry referring to file not present on disk.
2363
# advance the entry only, after processing.
2364
result = _process_entry(current_entry, None)
2365
if result is not None:
2366
if result is not uninteresting:
2369
if (block_index < len(state._dirblocks) and
2370
osutils.is_inside(current_root,
2371
state._dirblocks[block_index][0])):
2372
current_block = state._dirblocks[block_index]
2374
current_block = None
2377
if current_block and entry_index < len(current_block[1]):
2378
current_entry = current_block[1][entry_index]
2380
current_entry = None
2381
advance_entry = True
2383
if current_dir_info and path_index < len(current_dir_info[1]):
2384
current_path_info = current_dir_info[1][path_index]
2385
if current_path_info[2] == 'directory':
2386
if self.target._directory_is_tree_reference(
2387
current_path_info[0].decode('utf8')):
2388
current_path_info = current_path_info[:2] + \
2389
('tree-reference',) + current_path_info[3:]
2391
current_path_info = None
2393
path_handled = False
2394
while (current_entry is not None or
2395
current_path_info is not None):
2396
if current_entry is None:
2397
# the check for path_handled when the path is adnvaced
2398
# will yield this path if needed.
2400
elif current_path_info is None:
2401
# no path is fine: the per entry code will handle it.
2402
result = _process_entry(current_entry, current_path_info)
2403
if result is not None:
2404
if result is not uninteresting:
2406
elif (current_entry[0][1] != current_path_info[1]
2407
or current_entry[1][target_index][0] in 'ar'):
2408
# The current path on disk doesn't match the dirblock
2409
# record. Either the dirblock is marked as absent, or
2410
# the file on disk is not present at all in the
2411
# dirblock. Either way, report about the dirblock
2412
# entry, and let other code handle the filesystem one.
2414
# Compare the basename for these files to determine
2416
if current_path_info[1] < current_entry[0][1]:
2417
# extra file on disk: pass for now, but only
2418
# increment the path, not the entry
2419
advance_entry = False
2421
# entry referring to file not present on disk.
2422
# advance the entry only, after processing.
2423
result = _process_entry(current_entry, None)
2424
if result is not None:
2425
if result is not uninteresting:
2427
advance_path = False
2429
result = _process_entry(current_entry, current_path_info)
2430
if result is not None:
2432
if result is not uninteresting:
2434
if advance_entry and current_entry is not None:
2436
if entry_index < len(current_block[1]):
2437
current_entry = current_block[1][entry_index]
2439
current_entry = None
2441
advance_entry = True # reset the advance flaga
2442
if advance_path and current_path_info is not None:
2443
if not path_handled:
2444
# unversioned in all regards
2445
if want_unversioned:
2446
new_executable = bool(
2447
stat.S_ISREG(current_path_info[3].st_mode)
2448
and stat.S_IEXEC & current_path_info[3].st_mode)
2450
(None, utf8_decode(current_path_info[0])[0]),
2454
(None, utf8_decode(current_path_info[1])[0]),
2455
(None, current_path_info[2]),
2456
(None, new_executable))
2457
# dont descend into this unversioned path if it is
2459
if current_path_info[2] in ('directory'):
2460
del current_dir_info[1][path_index]
2462
# dont descend the disk iterator into any tree
2464
if current_path_info[2] == 'tree-reference':
2465
del current_dir_info[1][path_index]
2468
if path_index < len(current_dir_info[1]):
2469
current_path_info = current_dir_info[1][path_index]
2470
if current_path_info[2] == 'directory':
2471
if self.target._directory_is_tree_reference(
2472
current_path_info[0].decode('utf8')):
2473
current_path_info = current_path_info[:2] + \
2474
('tree-reference',) + current_path_info[3:]
2476
current_path_info = None
2477
path_handled = False
2479
advance_path = True # reset the advance flagg.
2480
if current_block is not None:
2482
if (block_index < len(state._dirblocks) and
2483
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2484
current_block = state._dirblocks[block_index]
2486
current_block = None
2487
if current_dir_info is not None:
2489
current_dir_info = dir_iterator.next()
2490
except StopIteration:
2491
current_dir_info = None
2495
def is_compatible(source, target):
2496
# the target must be a dirstate working tree
2497
if not isinstance(target, WorkingTree4):
2499
# the source must be a revtreee or dirstate rev tree.
2500
if not isinstance(source,
2501
(revisiontree.RevisionTree, DirStateRevisionTree)):
2503
# the source revid must be in the target dirstate
2504
if not (source._revision_id == NULL_REVISION or
2505
source._revision_id in target.get_parent_ids()):
2506
# TODO: what about ghosts? it may well need to
2507
# check for them explicitly.
2511
InterTree.register_optimiser(InterDirStateTree)
2514
class Converter3to4(object):
2515
"""Perform an in-place upgrade of format 3 to format 4 trees."""
2518
self.target_format = WorkingTreeFormat4()
2520
def convert(self, tree):
2521
# lock the control files not the tree, so that we dont get tree
2522
# on-unlock behaviours, and so that noone else diddles with the
2523
# tree during upgrade.
2524
tree._control_files.lock_write()
2526
tree.read_working_inventory()
2527
self.create_dirstate_data(tree)
2528
self.update_format(tree)
2529
self.remove_xml_files(tree)
2531
tree._control_files.unlock()
2533
def create_dirstate_data(self, tree):
2534
"""Create the dirstate based data for tree."""
2535
local_path = tree.bzrdir.get_workingtree_transport(None
2536
).local_abspath('dirstate')
2537
state = dirstate.DirState.from_tree(tree, local_path)
2541
def remove_xml_files(self, tree):
2542
"""Remove the oldformat 3 data."""
2543
transport = tree.bzrdir.get_workingtree_transport(None)
2544
for path in ['basis-inventory-cache', 'inventory', 'last-revision',
2545
'pending-merges', 'stat-cache']:
2547
transport.delete(path)
2548
except errors.NoSuchFile:
2549
# some files are optional - just deal.
2552
def update_format(self, tree):
2553
"""Change the format marker."""
2554
tree._transport.put_bytes('format',
2555
self.target_format.get_format_string(),
2556
mode=tree.bzrdir._get_file_mode())