1
# Copyright (C) 2005, 2006 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""WorkingTree object and friends.
19
A WorkingTree represents the editable working copy of a branch.
20
Operations which represent the WorkingTree are also done here,
21
such as renaming or adding files. The WorkingTree has an inventory
22
which is updated by these operations. A commit produces a
23
new revision based on the workingtree and its inventory.
25
At the moment every WorkingTree has its own branch. Remote
26
WorkingTrees aren't supported.
28
To get a WorkingTree, call bzrdir.open_workingtree() or
29
WorkingTree.open(dir).
32
MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1"
33
CONFLICT_HEADER_1 = "BZR conflict list format 1"
35
# TODO: Give the workingtree sole responsibility for the working inventory;
36
# remove the variable and references to it from the branch. This may require
37
# updating the commit code so as to update the inventory within the working
38
# copy, and making sure there's only one WorkingTree for any directory on disk.
39
# At the moment they may alias the inventory and have old copies of it in
40
# memory. (Now done? -- mbp 20060309)
42
from binascii import hexlify
44
from copy import deepcopy
45
from cStringIO import StringIO
55
from bzrlib import bzrdir, errors, ignores, osutils, urlutils
56
from bzrlib.atomicfile import AtomicFile
58
from bzrlib.conflicts import Conflict, ConflictList, CONFLICT_SUFFIXES
59
from bzrlib.decorators import needs_read_lock, needs_write_lock
60
from bzrlib.errors import (BzrCheckError,
63
WeaveRevisionNotPresent,
67
MergeModifiedFormatError,
70
from bzrlib.inventory import InventoryEntry, Inventory
71
from bzrlib.lockable_files import LockableFiles, TransportLock
72
from bzrlib.lockdir import LockDir
73
from bzrlib.merge import merge_inner, transform_tree
74
import bzrlib.mutabletree
75
from bzrlib.mutabletree import needs_tree_write_lock
76
from bzrlib.osutils import (
93
from bzrlib.progress import DummyProgress, ProgressPhase
94
from bzrlib.revision import NULL_REVISION
95
import bzrlib.revisiontree
96
from bzrlib.rio import RioReader, rio_file, Stanza
97
from bzrlib.symbol_versioning import (deprecated_passed,
100
DEPRECATED_PARAMETER,
104
from bzrlib.trace import mutter, note
105
from bzrlib.transform import build_tree
106
from bzrlib.transport import get_transport
107
from bzrlib.transport.local import LocalTransport
108
from bzrlib.textui import show_status
113
# the regex removes any weird characters; we don't escape them
114
# but rather just pull them out
115
_gen_file_id_re = re.compile(r'[^\w.]')
116
_gen_id_suffix = None
120
def _next_id_suffix():
121
"""Create a new file id suffix that is reasonably unique.
123
On the first call we combine the current time with 64 bits of randomness
124
to give a highly probably globally unique number. Then each call in the same
125
process adds 1 to a serial number we append to that unique value.
127
# XXX TODO: change bzrlib.add.smart_add to call workingtree.add() rather
128
# than having to move the id randomness out of the inner loop like this.
129
# XXX TODO: for the global randomness this uses we should add the thread-id
130
# before the serial #.
131
global _gen_id_suffix, _gen_id_serial
132
if _gen_id_suffix is None:
133
_gen_id_suffix = "-%s-%s-" % (compact_date(time()), rand_chars(16))
135
return _gen_id_suffix + str(_gen_id_serial)
138
def gen_file_id(name):
139
"""Return new file id for the basename 'name'.
141
The uniqueness is supplied from _next_id_suffix.
143
# The real randomness is in the _next_id_suffix, the
144
# rest of the identifier is just to be nice.
146
# 1) Remove non-ascii word characters to keep the ids portable
147
# 2) squash to lowercase, so the file id doesn't have to
148
# be escaped (case insensitive filesystems would bork for ids
149
# that only differred in case without escaping).
150
# 3) truncate the filename to 20 chars. Long filenames also bork on some
152
# 4) Removing starting '.' characters to prevent the file ids from
153
# being considered hidden.
154
ascii_word_only = _gen_file_id_re.sub('', name.lower())
155
short_no_dots = ascii_word_only.lstrip('.')[:20]
156
return short_no_dots + _next_id_suffix()
160
"""Return a new tree-root file id."""
161
return gen_file_id('TREE_ROOT')
164
class TreeEntry(object):
165
"""An entry that implements the minimum interface used by commands.
167
This needs further inspection, it may be better to have
168
InventoryEntries without ids - though that seems wrong. For now,
169
this is a parallel hierarchy to InventoryEntry, and needs to become
170
one of several things: decorates to that hierarchy, children of, or
172
Another note is that these objects are currently only used when there is
173
no InventoryEntry available - i.e. for unversioned objects.
174
Perhaps they should be UnversionedEntry et al. ? - RBC 20051003
177
def __eq__(self, other):
178
# yes, this us ugly, TODO: best practice __eq__ style.
179
return (isinstance(other, TreeEntry)
180
and other.__class__ == self.__class__)
182
def kind_character(self):
186
class TreeDirectory(TreeEntry):
187
"""See TreeEntry. This is a directory in a working tree."""
189
def __eq__(self, other):
190
return (isinstance(other, TreeDirectory)
191
and other.__class__ == self.__class__)
193
def kind_character(self):
197
class TreeFile(TreeEntry):
198
"""See TreeEntry. This is a regular file in a working tree."""
200
def __eq__(self, other):
201
return (isinstance(other, TreeFile)
202
and other.__class__ == self.__class__)
204
def kind_character(self):
208
class TreeLink(TreeEntry):
209
"""See TreeEntry. This is a symlink in a working tree."""
211
def __eq__(self, other):
212
return (isinstance(other, TreeLink)
213
and other.__class__ == self.__class__)
215
def kind_character(self):
219
class WorkingTree(bzrlib.mutabletree.MutableTree):
220
"""Working copy tree.
222
The inventory is held in the `Branch` working-inventory, and the
223
files are in a directory on disk.
225
It is possible for a `WorkingTree` to have a filename which is
226
not listed in the Inventory and vice versa.
229
def __init__(self, basedir='.',
230
branch=DEPRECATED_PARAMETER,
236
"""Construct a WorkingTree for basedir.
238
If the branch is not supplied, it is opened automatically.
239
If the branch is supplied, it must be the branch for this basedir.
240
(branch.base is not cross checked, because for remote branches that
241
would be meaningless).
243
self._format = _format
244
self.bzrdir = _bzrdir
246
# not created via open etc.
247
warnings.warn("WorkingTree() is deprecated as of bzr version 0.8. "
248
"Please use bzrdir.open_workingtree or WorkingTree.open().",
251
wt = WorkingTree.open(basedir)
252
self._branch = wt.branch
253
self.basedir = wt.basedir
254
self._control_files = wt._control_files
255
self._hashcache = wt._hashcache
256
self._set_inventory(wt._inventory)
257
self._format = wt._format
258
self.bzrdir = wt.bzrdir
259
from bzrlib.hashcache import HashCache
260
from bzrlib.trace import note, mutter
261
assert isinstance(basedir, basestring), \
262
"base directory %r is not a string" % basedir
263
basedir = safe_unicode(basedir)
264
mutter("opening working tree %r", basedir)
265
if deprecated_passed(branch):
267
warnings.warn("WorkingTree(..., branch=XXX) is deprecated as of bzr 0.8."
268
" Please use bzrdir.open_workingtree() or"
269
" WorkingTree.open().",
273
self._branch = branch
275
self._branch = self.bzrdir.open_branch()
276
self.basedir = realpath(basedir)
277
# if branch is at our basedir and is a format 6 or less
278
if isinstance(self._format, WorkingTreeFormat2):
279
# share control object
280
self._control_files = self.branch.control_files
282
# assume all other formats have their own control files.
283
assert isinstance(_control_files, LockableFiles), \
284
"_control_files must be a LockableFiles, not %r" \
286
self._control_files = _control_files
287
# update the whole cache up front and write to disk if anything changed;
288
# in the future we might want to do this more selectively
289
# two possible ways offer themselves : in self._unlock, write the cache
290
# if needed, or, when the cache sees a change, append it to the hash
291
# cache file, and have the parser take the most recent entry for a
293
cache_filename = self.bzrdir.get_workingtree_transport(None).local_abspath('stat-cache')
294
hc = self._hashcache = HashCache(basedir, cache_filename, self._control_files._file_mode)
296
# is this scan needed ? it makes things kinda slow.
303
if _inventory is None:
304
self._set_inventory(self.read_working_inventory())
306
self._set_inventory(_inventory)
309
fget=lambda self: self._branch,
310
doc="""The branch this WorkingTree is connected to.
312
This cannot be set - it is reflective of the actual disk structure
313
the working tree has been constructed from.
316
def break_lock(self):
317
"""Break a lock if one is present from another instance.
319
Uses the ui factory to ask for confirmation if the lock may be from
322
This will probe the repository for its lock as well.
324
self._control_files.break_lock()
325
self.branch.break_lock()
327
def _set_inventory(self, inv):
328
assert inv.root is not None
329
self._inventory = inv
332
def open(path=None, _unsupported=False):
333
"""Open an existing working tree at path.
337
path = os.path.getcwdu()
338
control = bzrdir.BzrDir.open(path, _unsupported)
339
return control.open_workingtree(_unsupported)
342
def open_containing(path=None):
343
"""Open an existing working tree which has its root about path.
345
This probes for a working tree at path and searches upwards from there.
347
Basically we keep looking up until we find the control directory or
348
run into /. If there isn't one, raises NotBranchError.
349
TODO: give this a new exception.
350
If there is one, it is returned, along with the unused portion of path.
352
:return: The WorkingTree that contains 'path', and the rest of path
355
path = osutils.getcwd()
356
control, relpath = bzrdir.BzrDir.open_containing(path)
358
return control.open_workingtree(), relpath
361
def open_downlevel(path=None):
362
"""Open an unsupported working tree.
364
Only intended for advanced situations like upgrading part of a bzrdir.
366
return WorkingTree.open(path, _unsupported=True)
369
"""Iterate through file_ids for this tree.
371
file_ids are in a WorkingTree if they are in the working inventory
372
and the working file exists.
374
inv = self._inventory
375
for path, ie in inv.iter_entries():
376
if osutils.lexists(self.abspath(path)):
380
return "<%s of %s>" % (self.__class__.__name__,
381
getattr(self, 'basedir', None))
383
def abspath(self, filename):
384
return pathjoin(self.basedir, filename)
386
def basis_tree(self):
387
"""Return RevisionTree for the current last revision.
389
If the left most parent is a ghost then the returned tree will be an
390
empty tree - one obtained by calling repository.revision_tree(None).
393
revision_id = self.get_parent_ids()[0]
395
# no parents, return an empty revision tree.
396
# in the future this should return the tree for
397
# 'empty:' - the implicit root empty tree.
398
return self.branch.repository.revision_tree(None)
401
xml = self.read_basis_inventory()
402
inv = bzrlib.xml6.serializer_v6.read_inventory_from_string(xml)
403
if inv is not None and inv.revision_id == revision_id:
404
return bzrlib.tree.RevisionTree(self.branch.repository,
406
except (NoSuchFile, errors.BadInventoryFormat):
408
# No cached copy available, retrieve from the repository.
409
# FIXME? RBC 20060403 should we cache the inventory locally
412
return self.branch.repository.revision_tree(revision_id)
413
except errors.RevisionNotPresent:
414
# the basis tree *may* be a ghost or a low level error may have
415
# occured. If the revision is present, its a problem, if its not
417
if self.branch.repository.has_revision(revision_id):
419
# the basis tree is a ghost so return an empty tree.
420
return self.branch.repository.revision_tree(None)
423
@deprecated_method(zero_eight)
424
def create(branch, directory):
425
"""Create a workingtree for branch at directory.
427
If existing_directory already exists it must have a .bzr directory.
428
If it does not exist, it will be created.
430
This returns a new WorkingTree object for the new checkout.
432
TODO FIXME RBC 20060124 when we have checkout formats in place this
433
should accept an optional revisionid to checkout [and reject this if
434
checking out into the same dir as a pre-checkout-aware branch format.]
436
XXX: When BzrDir is present, these should be created through that
439
warnings.warn('delete WorkingTree.create', stacklevel=3)
440
transport = get_transport(directory)
441
if branch.bzrdir.root_transport.base == transport.base:
443
return branch.bzrdir.create_workingtree()
444
# different directory,
445
# create a branch reference
446
# and now a working tree.
447
raise NotImplementedError
450
@deprecated_method(zero_eight)
451
def create_standalone(directory):
452
"""Create a checkout and a branch and a repo at directory.
454
Directory must exist and be empty.
456
please use BzrDir.create_standalone_workingtree
458
return bzrdir.BzrDir.create_standalone_workingtree(directory)
460
def relpath(self, path):
461
"""Return the local path portion from a given path.
463
The path may be absolute or relative. If its a relative path it is
464
interpreted relative to the python current working directory.
466
return relpath(self.basedir, path)
468
def has_filename(self, filename):
469
return osutils.lexists(self.abspath(filename))
471
def get_file(self, file_id):
472
return self.get_file_byname(self.id2path(file_id))
474
def get_file_text(self, file_id):
475
return self.get_file(file_id).read()
477
def get_file_byname(self, filename):
478
return file(self.abspath(filename), 'rb')
480
def get_parent_ids(self):
481
"""See Tree.get_parent_ids.
483
This implementation reads the pending merges list and last_revision
484
value and uses that to decide what the parents list should be.
486
last_rev = self._last_revision()
492
merges_file = self._control_files.get_utf8('pending-merges')
496
for l in merges_file.readlines():
497
parents.append(l.rstrip('\n'))
500
def get_root_id(self):
501
"""Return the id of this trees root"""
502
inv = self.read_working_inventory()
503
return inv.root.file_id
505
def _get_store_filename(self, file_id):
506
## XXX: badly named; this is not in the store at all
507
return self.abspath(self.id2path(file_id))
510
def clone(self, to_bzrdir, revision_id=None, basis=None):
511
"""Duplicate this working tree into to_bzr, including all state.
513
Specifically modified files are kept as modified, but
514
ignored and unknown files are discarded.
516
If you want to make a new line of development, see bzrdir.sprout()
519
If not None, the cloned tree will have its last revision set to
520
revision, and and difference between the source trees last revision
521
and this one merged in.
524
If not None, a closer copy of a tree which may have some files in
525
common, and which file content should be preferentially copied from.
527
# assumes the target bzr dir format is compatible.
528
result = self._format.initialize(to_bzrdir)
529
self.copy_content_into(result, revision_id)
533
def copy_content_into(self, tree, revision_id=None):
534
"""Copy the current content and user files of this tree into tree."""
535
if revision_id is None:
536
transform_tree(tree, self)
538
# TODO now merge from tree.last_revision to revision (to preserve
539
# user local changes)
540
transform_tree(tree, self)
541
tree.set_parent_ids([revision_id])
543
def id2abspath(self, file_id):
544
return self.abspath(self.id2path(file_id))
546
def has_id(self, file_id):
547
# files that have been deleted are excluded
548
inv = self._inventory
549
if not inv.has_id(file_id):
551
path = inv.id2path(file_id)
552
return osutils.lexists(self.abspath(path))
554
def has_or_had_id(self, file_id):
555
if file_id == self.inventory.root.file_id:
557
return self.inventory.has_id(file_id)
559
__contains__ = has_id
561
def get_file_size(self, file_id):
562
return os.path.getsize(self.id2abspath(file_id))
565
def get_file_sha1(self, file_id, path=None):
567
path = self._inventory.id2path(file_id)
568
return self._hashcache.get_sha1(path)
570
def get_file_mtime(self, file_id, path=None):
572
path = self._inventory.id2path(file_id)
573
return os.lstat(self.abspath(path)).st_mtime
575
if not supports_executable():
576
def is_executable(self, file_id, path=None):
577
return self._inventory[file_id].executable
579
def is_executable(self, file_id, path=None):
581
path = self._inventory.id2path(file_id)
582
mode = os.lstat(self.abspath(path)).st_mode
583
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
586
def _add(self, files, ids, kinds):
587
"""See MutableTree._add."""
588
# TODO: Re-adding a file that is removed in the working copy
589
# should probably put it back with the previous ID.
590
# the read and write working inventory should not occur in this
591
# function - they should be part of lock_write and unlock.
592
inv = self.read_working_inventory()
593
for f, file_id, kind in zip(files, ids, kinds):
594
assert kind is not None
596
inv.add_path(f, kind=kind)
598
inv.add_path(f, kind=kind, file_id=file_id)
599
self._write_inventory(inv)
601
@needs_tree_write_lock
602
def _gather_kinds(self, files, kinds):
603
"""See MutableTree._gather_kinds."""
604
for pos, f in enumerate(files):
605
if kinds[pos] is None:
606
fullpath = normpath(self.abspath(f))
608
kinds[pos] = file_kind(fullpath)
610
if e.errno == errno.ENOENT:
611
raise NoSuchFile(fullpath)
614
def add_parent_tree_id(self, revision_id, allow_leftmost_as_ghost=False):
615
"""Add revision_id as a parent.
617
This is equivalent to retrieving the current list of parent ids
618
and setting the list to its value plus revision_id.
620
:param revision_id: The revision id to add to the parent list. It may
621
be a ghost revision as long as its not the first parent to be added,
622
or the allow_leftmost_as_ghost parameter is set True.
623
:param allow_leftmost_as_ghost: Allow the first parent to be a ghost.
625
parents = self.get_parent_ids() + [revision_id]
626
self.set_parent_ids(parents,
627
allow_leftmost_as_ghost=len(parents) > 1 or allow_leftmost_as_ghost)
629
@needs_tree_write_lock
630
def add_parent_tree(self, parent_tuple, allow_leftmost_as_ghost=False):
631
"""Add revision_id, tree tuple as a parent.
633
This is equivalent to retrieving the current list of parent trees
634
and setting the list to its value plus parent_tuple. See also
635
add_parent_tree_id - if you only have a parent id available it will be
636
simpler to use that api. If you have the parent already available, using
637
this api is preferred.
639
:param parent_tuple: The (revision id, tree) to add to the parent list.
640
If the revision_id is a ghost, pass None for the tree.
641
:param allow_leftmost_as_ghost: Allow the first parent to be a ghost.
643
parent_ids = self.get_parent_ids() + [parent_tuple[0]]
644
if len(parent_ids) > 1:
645
# the leftmost may have already been a ghost, preserve that if it
647
allow_leftmost_as_ghost = True
648
self.set_parent_ids(parent_ids,
649
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
651
@needs_tree_write_lock
652
def add_pending_merge(self, *revision_ids):
653
# TODO: Perhaps should check at this point that the
654
# history of the revision is actually present?
655
parents = self.get_parent_ids()
657
for rev_id in revision_ids:
658
if rev_id in parents:
660
parents.append(rev_id)
663
self.set_parent_ids(parents, allow_leftmost_as_ghost=True)
665
@deprecated_method(zero_eleven)
667
def pending_merges(self):
668
"""Return a list of pending merges.
670
These are revisions that have been merged into the working
671
directory but not yet committed.
673
As of 0.11 this is deprecated. Please see WorkingTree.get_parent_ids()
674
instead - which is available on all tree objects.
676
return self.get_parent_ids()[1:]
678
@needs_tree_write_lock
679
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
680
"""Set the parent ids to revision_ids.
682
See also set_parent_trees. This api will try to retrieve the tree data
683
for each element of revision_ids from the trees repository. If you have
684
tree data already available, it is more efficient to use
685
set_parent_trees rather than set_parent_ids. set_parent_ids is however
686
an easier API to use.
688
:param revision_ids: The revision_ids to set as the parent ids of this
689
working tree. Any of these may be ghosts.
691
if len(revision_ids) > 0:
692
leftmost_id = revision_ids[0]
693
if (not allow_leftmost_as_ghost and not
694
self.branch.repository.has_revision(leftmost_id)):
695
raise errors.GhostRevisionUnusableHere(leftmost_id)
696
self.set_last_revision(leftmost_id)
698
self.set_last_revision(None)
699
merges = revision_ids[1:]
700
self._control_files.put_utf8('pending-merges', '\n'.join(merges))
702
@needs_tree_write_lock
703
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
704
"""See MutableTree.set_parent_trees."""
705
# parent trees are not used in current format trees, delegate to
707
self.set_parent_ids([rev for (rev, tree) in parents_list],
708
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
710
@needs_tree_write_lock
711
def set_pending_merges(self, rev_list):
712
parents = self.get_parent_ids()
713
leftmost = parents[:1]
714
new_parents = leftmost + rev_list
715
self.set_parent_ids(new_parents)
717
@needs_tree_write_lock
718
def set_merge_modified(self, modified_hashes):
720
for file_id, hash in modified_hashes.iteritems():
721
yield Stanza(file_id=file_id, hash=hash)
722
self._put_rio('merge-hashes', iter_stanzas(), MERGE_MODIFIED_HEADER_1)
724
@needs_tree_write_lock
725
def _put_rio(self, filename, stanzas, header):
726
my_file = rio_file(stanzas, header)
727
self._control_files.put(filename, my_file)
729
@needs_write_lock # because merge pulls data into the branch.
730
def merge_from_branch(self, branch, to_revision=None):
731
"""Merge from a branch into this working tree.
733
:param branch: The branch to merge from.
734
:param to_revision: If non-None, the merge will merge to to_revision, but
735
not beyond it. to_revision does not need to be in the history of
736
the branch when it is supplied. If None, to_revision defaults to
737
branch.last_revision().
739
from bzrlib.merge import Merger, Merge3Merger
740
pb = bzrlib.ui.ui_factory.nested_progress_bar()
742
merger = Merger(self.branch, this_tree=self, pb=pb)
743
merger.pp = ProgressPhase("Merge phase", 5, pb)
744
merger.pp.next_phase()
745
# check that there are no
747
merger.check_basis(check_clean=True, require_commits=False)
748
if to_revision is None:
749
to_revision = branch.last_revision()
750
merger.other_rev_id = to_revision
751
if merger.other_rev_id is None:
752
raise error.NoCommits(branch)
753
self.branch.fetch(branch, last_revision=merger.other_rev_id)
754
merger.other_basis = merger.other_rev_id
755
merger.other_tree = self.branch.repository.revision_tree(
757
merger.pp.next_phase()
759
if merger.base_rev_id == merger.other_rev_id:
760
raise errors.PointlessMerge
761
merger.backup_files = False
762
merger.merge_type = Merge3Merger
763
merger.set_interesting_files(None)
764
merger.show_base = False
765
merger.reprocess = False
766
conflicts = merger.do_merge()
773
def merge_modified(self):
775
hashfile = self._control_files.get('merge-hashes')
780
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
781
raise MergeModifiedFormatError()
782
except StopIteration:
783
raise MergeModifiedFormatError()
784
for s in RioReader(hashfile):
785
file_id = s.get("file_id")
786
if file_id not in self.inventory:
789
if hash == self.get_file_sha1(file_id):
790
merge_hashes[file_id] = hash
794
def mkdir(self, path, file_id=None):
795
"""See MutableTree.mkdir()."""
797
file_id = gen_file_id(os.path.basename(path))
798
os.mkdir(self.abspath(path))
799
self.add(path, file_id, 'directory')
802
def get_symlink_target(self, file_id):
803
return os.readlink(self.id2abspath(file_id))
805
def file_class(self, filename):
806
if self.path2id(filename):
808
elif self.is_ignored(filename):
813
def list_files(self):
814
"""Recursively list all files as (path, class, kind, id, entry).
816
Lists, but does not descend into unversioned directories.
818
This does not include files that have been deleted in this
821
Skips the control directory.
823
inv = self._inventory
824
# Convert these into local objects to save lookup times
825
pathjoin = osutils.pathjoin
826
file_kind = osutils.file_kind
828
# transport.base ends in a slash, we want the piece
829
# between the last two slashes
830
transport_base_dir = self.bzrdir.transport.base.rsplit('/', 2)[1]
832
fk_entries = {'directory':TreeDirectory, 'file':TreeFile, 'symlink':TreeLink}
834
# directory file_id, relative path, absolute path, reverse sorted children
835
children = os.listdir(self.basedir)
837
# jam 20060527 The kernel sized tree seems equivalent whether we
838
# use a deque and popleft to keep them sorted, or if we use a plain
839
# list and just reverse() them.
840
children = collections.deque(children)
841
stack = [(inv.root.file_id, u'', self.basedir, children)]
843
from_dir_id, from_dir_relpath, from_dir_abspath, children = stack[-1]
846
f = children.popleft()
847
## TODO: If we find a subdirectory with its own .bzr
848
## directory, then that is a separate tree and we
849
## should exclude it.
851
# the bzrdir for this tree
852
if transport_base_dir == f:
855
# we know that from_dir_relpath and from_dir_abspath never end in a slash
856
# and 'f' doesn't begin with one, we can do a string op, rather
857
# than the checks of pathjoin(), all relative paths will have an extra slash
859
fp = from_dir_relpath + '/' + f
862
fap = from_dir_abspath + '/' + f
864
f_ie = inv.get_child(from_dir_id, f)
867
elif self.is_ignored(fp[1:]):
870
# we may not have found this file, because of a unicode issue
871
f_norm, can_access = osutils.normalized_filename(f)
872
if f == f_norm or not can_access:
873
# No change, so treat this file normally
876
# this file can be accessed by a normalized path
877
# check again if it is versioned
878
# these lines are repeated here for performance
880
fp = from_dir_relpath + '/' + f
881
fap = from_dir_abspath + '/' + f
882
f_ie = inv.get_child(from_dir_id, f)
885
elif self.is_ignored(fp[1:]):
894
raise BzrCheckError("file %r entered as kind %r id %r, "
896
% (fap, f_ie.kind, f_ie.file_id, fk))
898
# make a last minute entry
900
yield fp[1:], c, fk, f_ie.file_id, f_ie
903
yield fp[1:], c, fk, None, fk_entries[fk]()
905
yield fp[1:], c, fk, None, TreeEntry()
908
if fk != 'directory':
911
# But do this child first
912
new_children = os.listdir(fap)
914
new_children = collections.deque(new_children)
915
stack.append((f_ie.file_id, fp, fap, new_children))
916
# Break out of inner loop, so that we start outer loop with child
919
# if we finished all children, pop it off the stack
922
@needs_tree_write_lock
923
def move(self, from_paths, to_name):
926
to_name must exist in the inventory.
928
If to_name exists and is a directory, the files are moved into
929
it, keeping their old names.
931
Note that to_name is only the last component of the new name;
932
this doesn't change the directory.
934
This returns a list of (from_path, to_path) pairs for each
938
## TODO: Option to move IDs only
939
assert not isinstance(from_paths, basestring)
941
to_abs = self.abspath(to_name)
942
if not isdir(to_abs):
943
raise BzrError("destination %r is not a directory" % to_abs)
944
if not self.has_filename(to_name):
945
raise BzrError("destination %r not in working directory" % to_abs)
946
to_dir_id = inv.path2id(to_name)
947
if to_dir_id is None and to_name != '':
948
raise BzrError("destination %r is not a versioned directory" % to_name)
949
to_dir_ie = inv[to_dir_id]
950
if to_dir_ie.kind != 'directory':
951
raise BzrError("destination %r is not a directory" % to_abs)
953
to_idpath = inv.get_idpath(to_dir_id)
956
if not self.has_filename(f):
957
raise BzrError("%r does not exist in working tree" % f)
958
f_id = inv.path2id(f)
960
raise BzrError("%r is not versioned" % f)
961
name_tail = splitpath(f)[-1]
962
dest_path = pathjoin(to_name, name_tail)
963
if self.has_filename(dest_path):
964
raise BzrError("destination %r already exists" % dest_path)
965
if f_id in to_idpath:
966
raise BzrError("can't move %r to a subdirectory of itself" % f)
968
# OK, so there's a race here, it's possible that someone will
969
# create a file in this interval and then the rename might be
970
# left half-done. But we should have caught most problems.
971
orig_inv = deepcopy(self.inventory)
974
name_tail = splitpath(f)[-1]
975
dest_path = pathjoin(to_name, name_tail)
976
result.append((f, dest_path))
977
inv.rename(inv.path2id(f), to_dir_id, name_tail)
979
rename(self.abspath(f), self.abspath(dest_path))
981
raise BzrError("failed to rename %r to %r: %s" %
982
(f, dest_path, e[1]),
983
["rename rolled back"])
985
# restore the inventory on error
986
self._set_inventory(orig_inv)
988
self._write_inventory(inv)
991
@needs_tree_write_lock
992
def rename_one(self, from_rel, to_rel):
995
This can change the directory or the filename or both.
998
if not self.has_filename(from_rel):
999
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1000
if self.has_filename(to_rel):
1001
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1003
file_id = inv.path2id(from_rel)
1005
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1007
entry = inv[file_id]
1008
from_parent = entry.parent_id
1009
from_name = entry.name
1011
if inv.path2id(to_rel):
1012
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1014
to_dir, to_tail = os.path.split(to_rel)
1015
to_dir_id = inv.path2id(to_dir)
1016
if to_dir_id is None and to_dir != '':
1017
raise BzrError("can't determine destination directory id for %r" % to_dir)
1019
mutter("rename_one:")
1020
mutter(" file_id {%s}" % file_id)
1021
mutter(" from_rel %r" % from_rel)
1022
mutter(" to_rel %r" % to_rel)
1023
mutter(" to_dir %r" % to_dir)
1024
mutter(" to_dir_id {%s}" % to_dir_id)
1026
inv.rename(file_id, to_dir_id, to_tail)
1028
from_abs = self.abspath(from_rel)
1029
to_abs = self.abspath(to_rel)
1031
rename(from_abs, to_abs)
1033
inv.rename(file_id, from_parent, from_name)
1034
raise BzrError("failed to rename %r to %r: %s"
1035
% (from_abs, to_abs, e[1]),
1036
["rename rolled back"])
1037
self._write_inventory(inv)
1041
"""Return all unknown files.
1043
These are files in the working directory that are not versioned or
1044
control files or ignored.
1046
for subp in self.extras():
1047
if not self.is_ignored(subp):
1050
@needs_tree_write_lock
1051
def unversion(self, file_ids):
1052
"""Remove the file ids in file_ids from the current versioned set.
1054
When a file_id is unversioned, all of its children are automatically
1057
:param file_ids: The file ids to stop versioning.
1058
:raises: NoSuchId if any fileid is not currently versioned.
1060
for file_id in file_ids:
1061
if self._inventory.has_id(file_id):
1062
self._inventory.remove_recursive_id(file_id)
1064
raise errors.NoSuchId(self, file_id)
1066
# in the future this should just set a dirty bit to wait for the
1067
# final unlock. However, until all methods of workingtree start
1068
# with the current in -memory inventory rather than triggering
1069
# a read, it is more complex - we need to teach read_inventory
1070
# to know when to read, and when to not read first... and possibly
1071
# to save first when the in memory one may be corrupted.
1072
# so for now, we just only write it if it is indeed dirty.
1074
self._write_inventory(self._inventory)
1076
@deprecated_method(zero_eight)
1077
def iter_conflicts(self):
1078
"""List all files in the tree that have text or content conflicts.
1079
DEPRECATED. Use conflicts instead."""
1080
return self._iter_conflicts()
1082
def _iter_conflicts(self):
1084
for info in self.list_files():
1086
stem = get_conflicted_stem(path)
1089
if stem not in conflicted:
1090
conflicted.add(stem)
1094
def pull(self, source, overwrite=False, stop_revision=None):
1095
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1098
pp = ProgressPhase("Pull phase", 2, top_pb)
1100
old_revision_history = self.branch.revision_history()
1101
basis_tree = self.basis_tree()
1102
count = self.branch.pull(source, overwrite, stop_revision)
1103
new_revision_history = self.branch.revision_history()
1104
if new_revision_history != old_revision_history:
1106
if len(old_revision_history):
1107
other_revision = old_revision_history[-1]
1109
other_revision = None
1110
repository = self.branch.repository
1111
pb = bzrlib.ui.ui_factory.nested_progress_bar()
1113
new_basis_tree = self.branch.basis_tree()
1114
merge_inner(self.branch,
1121
# TODO - dedup parents list with things merged by pull ?
1122
# reuse the revisiontree we merged against to set the new
1124
parent_trees = [(self.branch.last_revision(), new_basis_tree)]
1125
# we have to pull the merge trees out again, because
1126
# merge_inner has set the ids. - this corner is not yet
1127
# layered well enough to prevent double handling.
1128
merges = self.get_parent_ids()[1:]
1129
parent_trees.extend([
1130
(parent, repository.revision_tree(parent)) for
1132
self.set_parent_trees(parent_trees)
1139
def put_file_bytes_non_atomic(self, file_id, bytes):
1140
"""See MutableTree.put_file_bytes_non_atomic."""
1141
stream = file(self.id2abspath(file_id), 'wb')
1146
# TODO: update the hashcache here ?
1149
"""Yield all unknown files in this WorkingTree.
1151
If there are any unknown directories then only the directory is
1152
returned, not all its children. But if there are unknown files
1153
under a versioned subdirectory, they are returned.
1155
Currently returned depth-first, sorted by name within directories.
1157
## TODO: Work from given directory downwards
1158
for path, dir_entry in self.inventory.directories():
1159
# mutter("search for unknowns in %r", path)
1160
dirabs = self.abspath(path)
1161
if not isdir(dirabs):
1162
# e.g. directory deleted
1166
for subf in os.listdir(dirabs):
1169
if subf not in dir_entry.children:
1170
subf_norm, can_access = osutils.normalized_filename(subf)
1171
if subf_norm != subf and can_access:
1172
if subf_norm not in dir_entry.children:
1173
fl.append(subf_norm)
1179
subp = pathjoin(path, subf)
1182
def _translate_ignore_rule(self, rule):
1183
"""Translate a single ignore rule to a regex.
1185
There are two types of ignore rules. Those that do not contain a / are
1186
matched against the tail of the filename (that is, they do not care
1187
what directory the file is in.) Rules which do contain a slash must
1188
match the entire path. As a special case, './' at the start of the
1189
string counts as a slash in the string but is removed before matching
1190
(e.g. ./foo.c, ./src/foo.c)
1192
:return: The translated regex.
1194
if rule[:2] in ('./', '.\\'):
1196
result = fnmatch.translate(rule[2:])
1197
elif '/' in rule or '\\' in rule:
1199
result = fnmatch.translate(rule)
1201
# default rule style.
1202
result = "(?:.*/)?(?!.*/)" + fnmatch.translate(rule)
1203
assert result[-1] == '$', "fnmatch.translate did not add the expected $"
1204
return "(" + result + ")"
1206
def _combine_ignore_rules(self, rules):
1207
"""Combine a list of ignore rules into a single regex object.
1209
Each individual rule is combined with | to form a big regex, which then
1210
has $ added to it to form something like ()|()|()$. The group index for
1211
each subregex's outermost group is placed in a dictionary mapping back
1212
to the rule. This allows quick identification of the matching rule that
1214
:return: a list of the compiled regex and the matching-group index
1215
dictionaries. We return a list because python complains if you try to
1216
combine more than 100 regexes.
1221
translated_rules = []
1223
translated_rule = self._translate_ignore_rule(rule)
1224
compiled_rule = re.compile(translated_rule)
1225
groups[next_group] = rule
1226
next_group += compiled_rule.groups
1227
translated_rules.append(translated_rule)
1228
if next_group == 99:
1229
result.append((re.compile("|".join(translated_rules)), groups))
1232
translated_rules = []
1233
if len(translated_rules):
1234
result.append((re.compile("|".join(translated_rules)), groups))
1237
def ignored_files(self):
1238
"""Yield list of PATH, IGNORE_PATTERN"""
1239
for subp in self.extras():
1240
pat = self.is_ignored(subp)
1244
def get_ignore_list(self):
1245
"""Return list of ignore patterns.
1247
Cached in the Tree object after the first call.
1249
ignoreset = getattr(self, '_ignoreset', None)
1250
if ignoreset is not None:
1253
ignore_globs = set(bzrlib.DEFAULT_IGNORE)
1254
ignore_globs.update(ignores.get_runtime_ignores())
1256
ignore_globs.update(ignores.get_user_ignores())
1258
if self.has_filename(bzrlib.IGNORE_FILENAME):
1259
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
1261
ignore_globs.update(ignores.parse_ignore_file(f))
1265
self._ignoreset = ignore_globs
1266
self._ignore_regex = self._combine_ignore_rules(ignore_globs)
1269
def _get_ignore_rules_as_regex(self):
1270
"""Return a regex of the ignore rules and a mapping dict.
1272
:return: (ignore rules compiled regex, dictionary mapping rule group
1273
indices to original rule.)
1275
if getattr(self, '_ignoreset', None) is None:
1276
self.get_ignore_list()
1277
return self._ignore_regex
1279
def is_ignored(self, filename):
1280
r"""Check whether the filename matches an ignore pattern.
1282
Patterns containing '/' or '\' need to match the whole path;
1283
others match against only the last component.
1285
If the file is ignored, returns the pattern which caused it to
1286
be ignored, otherwise None. So this can simply be used as a
1287
boolean if desired."""
1289
# TODO: Use '**' to match directories, and other extended
1290
# globbing stuff from cvs/rsync.
1292
# XXX: fnmatch is actually not quite what we want: it's only
1293
# approximately the same as real Unix fnmatch, and doesn't
1294
# treat dotfiles correctly and allows * to match /.
1295
# Eventually it should be replaced with something more
1298
rules = self._get_ignore_rules_as_regex()
1299
for regex, mapping in rules:
1300
match = regex.match(filename)
1301
if match is not None:
1302
# one or more of the groups in mapping will have a non-None
1304
groups = match.groups()
1305
rules = [mapping[group] for group in
1306
mapping if groups[group] is not None]
1310
def kind(self, file_id):
1311
return file_kind(self.id2abspath(file_id))
1313
def last_revision(self):
1314
"""Return the last revision of the branch for this tree.
1316
This format tree does not support a separate marker for last-revision
1317
compared to the branch.
1319
See MutableTree.last_revision
1321
return self._last_revision()
1324
def _last_revision(self):
1325
"""helper for get_parent_ids."""
1326
return self.branch.last_revision()
1328
def is_locked(self):
1329
return self._control_files.is_locked()
1331
def lock_read(self):
1332
"""See Branch.lock_read, and WorkingTree.unlock."""
1333
self.branch.lock_read()
1335
return self._control_files.lock_read()
1337
self.branch.unlock()
1340
def lock_tree_write(self):
1341
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
1342
self.branch.lock_read()
1344
return self._control_files.lock_write()
1346
self.branch.unlock()
1349
def lock_write(self):
1350
"""See MutableTree.lock_write, and WorkingTree.unlock."""
1351
self.branch.lock_write()
1353
return self._control_files.lock_write()
1355
self.branch.unlock()
1358
def get_physical_lock_status(self):
1359
return self._control_files.get_physical_lock_status()
1361
def _basis_inventory_name(self):
1362
return 'basis-inventory-cache'
1364
@needs_tree_write_lock
1365
def set_last_revision(self, new_revision):
1366
"""Change the last revision in the working tree."""
1367
if self._change_last_revision(new_revision):
1368
self._cache_basis_inventory(new_revision)
1370
def _change_last_revision(self, new_revision):
1371
"""Template method part of set_last_revision to perform the change.
1373
This is used to allow WorkingTree3 instances to not affect branch
1374
when their last revision is set.
1376
if new_revision is None:
1377
self.branch.set_revision_history([])
1380
self.branch.generate_revision_history(new_revision)
1381
except errors.NoSuchRevision:
1382
# not present in the repo - dont try to set it deeper than the tip
1383
self.branch.set_revision_history([new_revision])
1386
def _cache_basis_inventory(self, new_revision):
1387
"""Cache new_revision as the basis inventory."""
1388
# TODO: this should allow the ready-to-use inventory to be passed in,
1389
# as commit already has that ready-to-use [while the format is the
1392
# this double handles the inventory - unpack and repack -
1393
# but is easier to understand. We can/should put a conditional
1394
# in here based on whether the inventory is in the latest format
1395
# - perhaps we should repack all inventories on a repository
1397
# the fast path is to copy the raw xml from the repository. If the
1398
# xml contains 'revision_id="', then we assume the right
1399
# revision_id is set. We must check for this full string, because a
1400
# root node id can legitimately look like 'revision_id' but cannot
1402
xml = self.branch.repository.get_inventory_xml(new_revision)
1403
firstline = xml.split('\n', 1)[0]
1404
if (not 'revision_id="' in firstline or
1405
'format="6"' not in firstline):
1406
inv = self.branch.repository.deserialise_inventory(
1408
inv.revision_id = new_revision
1409
xml = bzrlib.xml6.serializer_v6.write_inventory_to_string(inv)
1410
assert isinstance(xml, str), 'serialised xml must be bytestring.'
1411
path = self._basis_inventory_name()
1413
self._control_files.put(path, sio)
1414
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1417
def read_basis_inventory(self):
1418
"""Read the cached basis inventory."""
1419
path = self._basis_inventory_name()
1420
return self._control_files.get(path).read()
1423
def read_working_inventory(self):
1424
"""Read the working inventory."""
1425
# ElementTree does its own conversion from UTF-8, so open in
1427
result = bzrlib.xml5.serializer_v5.read_inventory(
1428
self._control_files.get('inventory'))
1429
self._set_inventory(result)
1432
@needs_tree_write_lock
1433
def remove(self, files, verbose=False, to_file=None):
1434
"""Remove nominated files from the working inventory..
1436
This does not remove their text. This does not run on XXX on what? RBC
1438
TODO: Refuse to remove modified files unless --force is given?
1440
TODO: Do something useful with directories.
1442
TODO: Should this remove the text or not? Tough call; not
1443
removing may be useful and the user can just use use rm, and
1444
is the opposite of add. Removing it is consistent with most
1445
other tools. Maybe an option.
1447
## TODO: Normalize names
1448
## TODO: Remove nested loops; better scalability
1449
if isinstance(files, basestring):
1452
inv = self.inventory
1454
# do this before any modifications
1456
fid = inv.path2id(f)
1458
# TODO: Perhaps make this just a warning, and continue?
1459
# This tends to happen when
1460
raise NotVersionedError(path=f)
1462
# having remove it, it must be either ignored or unknown
1463
if self.is_ignored(f):
1467
show_status(new_status, inv[fid].kind, f, to_file=to_file)
1470
self._write_inventory(inv)
1472
@needs_tree_write_lock
1473
def revert(self, filenames, old_tree=None, backups=True,
1474
pb=DummyProgress()):
1475
from transform import revert
1476
from conflicts import resolve
1477
if old_tree is None:
1478
old_tree = self.basis_tree()
1479
conflicts = revert(self, old_tree, filenames, backups, pb)
1480
if not len(filenames):
1481
self.set_parent_ids(self.get_parent_ids()[:1])
1484
resolve(self, filenames, ignore_misses=True)
1487
# XXX: This method should be deprecated in favour of taking in a proper
1488
# new Inventory object.
1489
@needs_tree_write_lock
1490
def set_inventory(self, new_inventory_list):
1491
from bzrlib.inventory import (Inventory,
1496
inv = Inventory(self.get_root_id())
1497
for path, file_id, parent, kind in new_inventory_list:
1498
name = os.path.basename(path)
1501
# fixme, there should be a factory function inv,add_??
1502
if kind == 'directory':
1503
inv.add(InventoryDirectory(file_id, name, parent))
1504
elif kind == 'file':
1505
inv.add(InventoryFile(file_id, name, parent))
1506
elif kind == 'symlink':
1507
inv.add(InventoryLink(file_id, name, parent))
1509
raise BzrError("unknown kind %r" % kind)
1510
self._write_inventory(inv)
1512
@needs_tree_write_lock
1513
def set_root_id(self, file_id):
1514
"""Set the root id for this tree."""
1515
inv = self.read_working_inventory()
1516
orig_root_id = inv.root.file_id
1517
del inv._byid[inv.root.file_id]
1518
inv.root.file_id = file_id
1519
inv._byid[inv.root.file_id] = inv.root
1522
if entry.parent_id == orig_root_id:
1523
entry.parent_id = inv.root.file_id
1524
self._write_inventory(inv)
1527
"""See Branch.unlock.
1529
WorkingTree locking just uses the Branch locking facilities.
1530
This is current because all working trees have an embedded branch
1531
within them. IF in the future, we were to make branch data shareable
1532
between multiple working trees, i.e. via shared storage, then we
1533
would probably want to lock both the local tree, and the branch.
1535
raise NotImplementedError(self.unlock)
1539
"""Update a working tree along its branch.
1541
This will update the branch if its bound too, which means we have multiple trees involved:
1542
The new basis tree of the master.
1543
The old basis tree of the branch.
1544
The old basis tree of the working tree.
1545
The current working tree state.
1546
pathologically all three may be different, and non ancestors of each other.
1547
Conceptually we want to:
1548
Preserve the wt.basis->wt.state changes
1549
Transform the wt.basis to the new master basis.
1550
Apply a merge of the old branch basis to get any 'local' changes from it into the tree.
1551
Restore the wt.basis->wt.state changes.
1553
There isn't a single operation at the moment to do that, so we:
1554
Merge current state -> basis tree of the master w.r.t. the old tree basis.
1555
Do a 'normal' merge of the old branch basis if it is relevant.
1557
old_tip = self.branch.update()
1558
# here if old_tip is not None, it is the old tip of the branch before
1559
# it was updated from the master branch. This should become a pending
1560
# merge in the working tree to preserve the user existing work. we
1561
# cant set that until we update the working trees last revision to be
1562
# one from the new branch, because it will just get absorbed by the
1563
# parent de-duplication logic.
1565
# We MUST save it even if an error occurs, because otherwise the users
1566
# local work is unreferenced and will appear to have been lost.
1570
last_rev = self.get_parent_ids()[0]
1573
if last_rev != self.branch.last_revision():
1574
# merge tree state up to new branch tip.
1575
basis = self.basis_tree()
1576
to_tree = self.branch.basis_tree()
1577
result += merge_inner(self.branch,
1581
# TODO - dedup parents list with things merged by pull ?
1582
# reuse the tree we've updated to to set the basis:
1583
parent_trees = [(self.branch.last_revision(), to_tree)]
1584
merges = self.get_parent_ids()[1:]
1585
# Ideally we ask the tree for the trees here, that way the working
1586
# tree can decide whether to give us teh entire tree or give us a
1587
# lazy initialised tree. dirstate for instance will have the trees
1588
# in ram already, whereas a last-revision + basis-inventory tree
1589
# will not, but also does not need them when setting parents.
1590
for parent in merges:
1591
parent_trees.append(
1592
(parent, self.branch.repository.revision_tree(parent)))
1593
if old_tip is not None:
1594
parent_trees.append(
1595
(old_tip, self.branch.repository.revision_tree(old_tip)))
1596
self.set_parent_trees(parent_trees)
1597
last_rev = parent_trees[0][0]
1599
# the working tree had the same last-revision as the master
1600
# branch did. We may still have pivot local work from the local
1601
# branch into old_tip:
1602
if old_tip is not None:
1603
self.add_parent_tree_id(old_tip)
1604
if old_tip and old_tip != last_rev:
1605
# our last revision was not the prior branch last revision
1606
# and we have converted that last revision to a pending merge.
1607
# base is somewhere between the branch tip now
1608
# and the now pending merge
1609
from bzrlib.revision import common_ancestor
1611
base_rev_id = common_ancestor(self.branch.last_revision(),
1613
self.branch.repository)
1614
except errors.NoCommonAncestor:
1616
base_tree = self.branch.repository.revision_tree(base_rev_id)
1617
other_tree = self.branch.repository.revision_tree(old_tip)
1618
result += merge_inner(self.branch,
1624
@needs_tree_write_lock
1625
def _write_inventory(self, inv):
1626
"""Write inventory as the current inventory."""
1628
bzrlib.xml5.serializer_v5.write_inventory(inv, sio)
1630
self._control_files.put('inventory', sio)
1631
self._set_inventory(inv)
1632
mutter('wrote working inventory')
1634
def set_conflicts(self, arg):
1635
raise UnsupportedOperation(self.set_conflicts, self)
1637
def add_conflicts(self, arg):
1638
raise UnsupportedOperation(self.add_conflicts, self)
1641
def conflicts(self):
1642
conflicts = ConflictList()
1643
for conflicted in self._iter_conflicts():
1646
if file_kind(self.abspath(conflicted)) != "file":
1648
except errors.NoSuchFile:
1651
for suffix in ('.THIS', '.OTHER'):
1653
kind = file_kind(self.abspath(conflicted+suffix))
1656
except errors.NoSuchFile:
1660
ctype = {True: 'text conflict', False: 'contents conflict'}[text]
1661
conflicts.append(Conflict.factory(ctype, path=conflicted,
1662
file_id=self.path2id(conflicted)))
1666
class WorkingTree2(WorkingTree):
1667
"""This is the Format 2 working tree.
1669
This was the first weave based working tree.
1670
- uses os locks for locking.
1671
- uses the branch last-revision.
1674
def lock_tree_write(self):
1675
"""See WorkingTree.lock_tree_write().
1677
In Format2 WorkingTrees we have a single lock for the branch and tree
1678
so lock_tree_write() degrades to lock_write().
1680
self.branch.lock_write()
1682
return self._control_files.lock_write()
1684
self.branch.unlock()
1688
# we share control files:
1689
if self._hashcache.needs_write and self._control_files._lock_count==3:
1690
self._hashcache.write()
1691
# reverse order of locking.
1693
return self._control_files.unlock()
1695
self.branch.unlock()
1698
class WorkingTree3(WorkingTree):
1699
"""This is the Format 3 working tree.
1701
This differs from the base WorkingTree by:
1702
- having its own file lock
1703
- having its own last-revision property.
1705
This is new in bzr 0.8
1709
def _last_revision(self):
1710
"""See Mutable.last_revision."""
1712
return self._control_files.get_utf8('last-revision').read()
1716
def _change_last_revision(self, revision_id):
1717
"""See WorkingTree._change_last_revision."""
1718
if revision_id is None or revision_id == NULL_REVISION:
1720
self._control_files._transport.delete('last-revision')
1721
except errors.NoSuchFile:
1725
self._control_files.put_utf8('last-revision', revision_id)
1728
@needs_tree_write_lock
1729
def set_conflicts(self, conflicts):
1730
self._put_rio('conflicts', conflicts.to_stanzas(),
1733
@needs_tree_write_lock
1734
def add_conflicts(self, new_conflicts):
1735
conflict_set = set(self.conflicts())
1736
conflict_set.update(set(list(new_conflicts)))
1737
self.set_conflicts(ConflictList(sorted(conflict_set,
1738
key=Conflict.sort_key)))
1741
def conflicts(self):
1743
confile = self._control_files.get('conflicts')
1745
return ConflictList()
1747
if confile.next() != CONFLICT_HEADER_1 + '\n':
1748
raise ConflictFormatError()
1749
except StopIteration:
1750
raise ConflictFormatError()
1751
return ConflictList.from_stanzas(RioReader(confile))
1754
if self._hashcache.needs_write and self._control_files._lock_count==1:
1755
self._hashcache.write()
1756
# reverse order of locking.
1758
return self._control_files.unlock()
1760
self.branch.unlock()
1763
def get_conflicted_stem(path):
1764
for suffix in CONFLICT_SUFFIXES:
1765
if path.endswith(suffix):
1766
return path[:-len(suffix)]
1768
@deprecated_function(zero_eight)
1769
def is_control_file(filename):
1770
"""See WorkingTree.is_control_filename(filename)."""
1771
## FIXME: better check
1772
filename = normpath(filename)
1773
while filename != '':
1774
head, tail = os.path.split(filename)
1775
## mutter('check %r for control file' % ((head, tail),))
1778
if filename == head:
1784
class WorkingTreeFormat(object):
1785
"""An encapsulation of the initialization and open routines for a format.
1787
Formats provide three things:
1788
* An initialization routine,
1792
Formats are placed in an dict by their format string for reference
1793
during workingtree opening. Its not required that these be instances, they
1794
can be classes themselves with class methods - it simply depends on
1795
whether state is needed for a given format or not.
1797
Once a format is deprecated, just deprecate the initialize and open
1798
methods on the format class. Do not deprecate the object, as the
1799
object will be created every time regardless.
1802
_default_format = None
1803
"""The default format used for new trees."""
1806
"""The known formats."""
1809
def find_format(klass, a_bzrdir):
1810
"""Return the format for the working tree object in a_bzrdir."""
1812
transport = a_bzrdir.get_workingtree_transport(None)
1813
format_string = transport.get("format").read()
1814
return klass._formats[format_string]
1816
raise errors.NoWorkingTree(base=transport.base)
1818
raise errors.UnknownFormatError(format=format_string)
1821
def get_default_format(klass):
1822
"""Return the current default format."""
1823
return klass._default_format
1825
def get_format_string(self):
1826
"""Return the ASCII format string that identifies this format."""
1827
raise NotImplementedError(self.get_format_string)
1829
def get_format_description(self):
1830
"""Return the short description for this format."""
1831
raise NotImplementedError(self.get_format_description)
1833
def is_supported(self):
1834
"""Is this format supported?
1836
Supported formats can be initialized and opened.
1837
Unsupported formats may not support initialization or committing or
1838
some other features depending on the reason for not being supported.
1843
def register_format(klass, format):
1844
klass._formats[format.get_format_string()] = format
1847
def set_default_format(klass, format):
1848
klass._default_format = format
1851
def unregister_format(klass, format):
1852
assert klass._formats[format.get_format_string()] is format
1853
del klass._formats[format.get_format_string()]
1857
class WorkingTreeFormat2(WorkingTreeFormat):
1858
"""The second working tree format.
1860
This format modified the hash cache from the format 1 hash cache.
1863
def get_format_description(self):
1864
"""See WorkingTreeFormat.get_format_description()."""
1865
return "Working tree format 2"
1867
def stub_initialize_remote(self, control_files):
1868
"""As a special workaround create critical control files for a remote working tree
1870
This ensures that it can later be updated and dealt with locally,
1871
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
1872
no working tree. (See bug #43064).
1876
bzrlib.xml5.serializer_v5.write_inventory(inv, sio)
1878
control_files.put('inventory', sio)
1880
control_files.put_utf8('pending-merges', '')
1883
def initialize(self, a_bzrdir, revision_id=None):
1884
"""See WorkingTreeFormat.initialize()."""
1885
if not isinstance(a_bzrdir.transport, LocalTransport):
1886
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1887
branch = a_bzrdir.open_branch()
1888
if revision_id is not None:
1891
revision_history = branch.revision_history()
1893
position = revision_history.index(revision_id)
1895
raise errors.NoSuchRevision(branch, revision_id)
1896
branch.set_revision_history(revision_history[:position + 1])
1899
revision = branch.last_revision()
1901
wt = WorkingTree2(a_bzrdir.root_transport.local_abspath('.'),
1907
wt._write_inventory(inv)
1908
wt.set_root_id(inv.root.file_id)
1909
basis_tree = branch.repository.revision_tree(revision)
1910
wt.set_parent_trees([(revision, basis_tree)])
1911
build_tree(basis_tree, wt)
1915
super(WorkingTreeFormat2, self).__init__()
1916
self._matchingbzrdir = bzrdir.BzrDirFormat6()
1918
def open(self, a_bzrdir, _found=False):
1919
"""Return the WorkingTree object for a_bzrdir
1921
_found is a private parameter, do not use it. It is used to indicate
1922
if format probing has already been done.
1925
# we are being called directly and must probe.
1926
raise NotImplementedError
1927
if not isinstance(a_bzrdir.transport, LocalTransport):
1928
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1929
return WorkingTree2(a_bzrdir.root_transport.local_abspath('.'),
1935
class WorkingTreeFormat3(WorkingTreeFormat):
1936
"""The second working tree format updated to record a format marker.
1939
- exists within a metadir controlling .bzr
1940
- includes an explicit version marker for the workingtree control
1941
files, separate from the BzrDir format
1942
- modifies the hash cache format
1944
- uses a LockDir to guard access for writes.
1947
def get_format_string(self):
1948
"""See WorkingTreeFormat.get_format_string()."""
1949
return "Bazaar-NG Working Tree format 3"
1951
def get_format_description(self):
1952
"""See WorkingTreeFormat.get_format_description()."""
1953
return "Working tree format 3"
1955
_lock_file_name = 'lock'
1956
_lock_class = LockDir
1958
def _open_control_files(self, a_bzrdir):
1959
transport = a_bzrdir.get_workingtree_transport(None)
1960
return LockableFiles(transport, self._lock_file_name,
1963
def initialize(self, a_bzrdir, revision_id=None):
1964
"""See WorkingTreeFormat.initialize().
1966
revision_id allows creating a working tree at a different
1967
revision than the branch is at.
1969
if not isinstance(a_bzrdir.transport, LocalTransport):
1970
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1971
transport = a_bzrdir.get_workingtree_transport(self)
1972
control_files = self._open_control_files(a_bzrdir)
1973
control_files.create_lock()
1974
control_files.lock_write()
1975
control_files.put_utf8('format', self.get_format_string())
1976
branch = a_bzrdir.open_branch()
1977
if revision_id is None:
1978
revision_id = branch.last_revision()
1980
wt = WorkingTree3(a_bzrdir.root_transport.local_abspath('.'),
1986
_control_files=control_files)
1987
wt.lock_tree_write()
1989
wt._write_inventory(inv)
1990
wt.set_root_id(inv.root.file_id)
1991
basis_tree = branch.repository.revision_tree(revision_id)
1992
if revision_id == bzrlib.revision.NULL_REVISION:
1993
wt.set_parent_trees([])
1995
wt.set_parent_trees([(revision_id, basis_tree)])
1996
build_tree(basis_tree, wt)
1999
control_files.unlock()
2003
super(WorkingTreeFormat3, self).__init__()
2004
self._matchingbzrdir = bzrdir.BzrDirMetaFormat1()
2006
def open(self, a_bzrdir, _found=False):
2007
"""Return the WorkingTree object for a_bzrdir
2009
_found is a private parameter, do not use it. It is used to indicate
2010
if format probing has already been done.
2013
# we are being called directly and must probe.
2014
raise NotImplementedError
2015
if not isinstance(a_bzrdir.transport, LocalTransport):
2016
raise errors.NotLocalUrl(a_bzrdir.transport.base)
2017
return self._open(a_bzrdir, self._open_control_files(a_bzrdir))
2019
def _open(self, a_bzrdir, control_files):
2020
"""Open the tree itself.
2022
:param a_bzrdir: the dir for the tree.
2023
:param control_files: the control files for the tree.
2025
return WorkingTree3(a_bzrdir.root_transport.local_abspath('.'),
2029
_control_files=control_files)
2032
return self.get_format_string()
2035
# formats which have no format string are not discoverable
2036
# and not independently creatable, so are not registered.
2037
__default_format = WorkingTreeFormat3()
2038
WorkingTreeFormat.register_format(__default_format)
2039
WorkingTreeFormat.set_default_format(__default_format)
2040
_legacy_formats = [WorkingTreeFormat2(),
2044
class WorkingTreeTestProviderAdapter(object):
2045
"""A tool to generate a suite testing multiple workingtree formats at once.
2047
This is done by copying the test once for each transport and injecting
2048
the transport_server, transport_readonly_server, and workingtree_format
2049
classes into each copy. Each copy is also given a new id() to make it
2053
def __init__(self, transport_server, transport_readonly_server, formats):
2054
self._transport_server = transport_server
2055
self._transport_readonly_server = transport_readonly_server
2056
self._formats = formats
2058
def _clone_test(self, test, bzrdir_format, workingtree_format, variation):
2059
"""Clone test for adaption."""
2060
new_test = deepcopy(test)
2061
new_test.transport_server = self._transport_server
2062
new_test.transport_readonly_server = self._transport_readonly_server
2063
new_test.bzrdir_format = bzrdir_format
2064
new_test.workingtree_format = workingtree_format
2065
def make_new_test_id():
2066
new_id = "%s(%s)" % (test.id(), variation)
2067
return lambda: new_id
2068
new_test.id = make_new_test_id()
2071
def adapt(self, test):
2072
from bzrlib.tests import TestSuite
2073
result = TestSuite()
2074
for workingtree_format, bzrdir_format in self._formats:
2075
new_test = self._clone_test(
2078
workingtree_format, workingtree_format.__class__.__name__)
2079
result.addTest(new_test)