51
59
conflicts as _mod_conflicts,
61
revision as _mod_revision,
71
from bzrlib.workingtree_4 import (
80
from bzrlib.transport import get_transport
82
from bzrlib.workingtree_4 import WorkingTreeFormat4
78
85
from bzrlib import symbol_versioning
79
86
from bzrlib.decorators import needs_read_lock, needs_write_lock
80
from bzrlib.lock import LogicalLockResult
81
from bzrlib.lockable_files import LockableFiles
87
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, TreeReference
88
from bzrlib.lockable_files import LockableFiles, TransportLock
82
89
from bzrlib.lockdir import LockDir
83
90
import bzrlib.mutabletree
84
91
from bzrlib.mutabletree import needs_tree_write_lock
85
from bzrlib import osutils
86
92
from bzrlib.osutils import (
94
102
supports_executable,
96
from bzrlib.filters import filtered_input_file
97
104
from bzrlib.trace import mutter, note
98
105
from bzrlib.transport.local import LocalTransport
99
from bzrlib.revision import CURRENT_REVISION
106
from bzrlib.progress import DummyProgress, ProgressPhase
107
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
100
108
from bzrlib.rio import RioReader, rio_file, Stanza
101
from bzrlib.symbol_versioning import (
103
DEPRECATED_PARAMETER,
109
from bzrlib.symbol_versioning import (deprecated_passed,
112
DEPRECATED_PARAMETER,
107
119
MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1"
108
# TODO: Modifying the conflict objects or their type is currently nearly
109
# impossible as there is no clear relationship between the working tree format
110
# and the conflict list file format.
111
120
CONFLICT_HEADER_1 = "BZR conflict list format 1"
113
ERROR_PATH_NOT_FOUND = 3 # WindowsError errno code, equivalent to ENOENT
123
@deprecated_function(zero_thirteen)
124
def gen_file_id(name):
125
"""Return new file id for the basename 'name'.
127
Use bzrlib.generate_ids.gen_file_id() instead
129
return generate_ids.gen_file_id(name)
132
@deprecated_function(zero_thirteen)
134
"""Return a new tree-root file id.
136
This has been deprecated in favor of bzrlib.generate_ids.gen_root_id()
138
return generate_ids.gen_root_id()
116
141
class TreeEntry(object):
117
142
"""An entry that implements the minimum interface used by commands.
119
This needs further inspection, it may be better to have
144
This needs further inspection, it may be better to have
120
145
InventoryEntries without ids - though that seems wrong. For now,
121
146
this is a parallel hierarchy to InventoryEntry, and needs to become
122
147
one of several things: decorates to that hierarchy, children of, or
345
334
path = osutils.getcwd()
346
335
control, relpath = bzrdir.BzrDir.open_containing(path)
347
337
return control.open_workingtree(), relpath
350
def open_containing_paths(file_list, default_directory=None,
351
canonicalize=True, apply_view=True):
352
"""Open the WorkingTree that contains a set of paths.
354
Fail if the paths given are not all in a single tree.
356
This is used for the many command-line interfaces that take a list of
357
any number of files and that require they all be in the same tree.
359
if default_directory is None:
360
default_directory = u'.'
361
# recommended replacement for builtins.internal_tree_files
362
if file_list is None or len(file_list) == 0:
363
tree = WorkingTree.open_containing(default_directory)[0]
364
# XXX: doesn't really belong here, and seems to have the strange
365
# side effect of making it return a bunch of files, not the whole
366
# tree -- mbp 20100716
367
if tree.supports_views() and apply_view:
368
view_files = tree.views.lookup_view()
370
file_list = view_files
371
view_str = views.view_display_str(view_files)
372
note("Ignoring files outside view. View is %s" % view_str)
373
return tree, file_list
374
if default_directory == u'.':
377
seed = default_directory
378
file_list = [osutils.pathjoin(default_directory, f)
380
tree = WorkingTree.open_containing(seed)[0]
381
return tree, tree.safe_relpath_files(file_list, canonicalize,
382
apply_view=apply_view)
384
def safe_relpath_files(self, file_list, canonicalize=True, apply_view=True):
385
"""Convert file_list into a list of relpaths in tree.
387
:param self: A tree to operate on.
388
:param file_list: A list of user provided paths or None.
389
:param apply_view: if True and a view is set, apply it or check that
390
specified files are within it
391
:return: A list of relative paths.
392
:raises errors.PathNotChild: When a provided path is in a different self
395
if file_list is None:
397
if self.supports_views() and apply_view:
398
view_files = self.views.lookup_view()
402
# self.relpath exists as a "thunk" to osutils, but canonical_relpath
403
# doesn't - fix that up here before we enter the loop.
405
fixer = lambda p: osutils.canonical_relpath(self.basedir, p)
408
for filename in file_list:
409
relpath = fixer(osutils.dereference_path(filename))
410
if view_files and not osutils.is_inside_any(view_files, relpath):
411
raise errors.FileOutsideView(filename, view_files)
412
new_list.append(relpath)
416
340
def open_downlevel(path=None):
417
341
"""Open an unsupported working tree.
485
387
# at this point ?
487
389
return self.branch.repository.revision_tree(revision_id)
488
except (errors.RevisionNotPresent, errors.NoSuchRevision):
390
except errors.RevisionNotPresent:
489
391
# the basis tree *may* be a ghost or a low level error may have
490
# occurred. If the revision is present, its a problem, if its not
392
# occured. If the revision is present, its a problem, if its not
492
394
if self.branch.repository.has_revision(revision_id):
494
396
# the basis tree is a ghost so return an empty tree.
495
return self.branch.repository.revision_tree(
496
_mod_revision.NULL_REVISION)
499
self._flush_ignore_list_cache()
397
return self.branch.repository.revision_tree(None)
400
@deprecated_method(zero_eight)
401
def create(branch, directory):
402
"""Create a workingtree for branch at directory.
404
If existing_directory already exists it must have a .bzr directory.
405
If it does not exist, it will be created.
407
This returns a new WorkingTree object for the new checkout.
409
TODO FIXME RBC 20060124 when we have checkout formats in place this
410
should accept an optional revisionid to checkout [and reject this if
411
checking out into the same dir as a pre-checkout-aware branch format.]
413
XXX: When BzrDir is present, these should be created through that
416
warnings.warn('delete WorkingTree.create', stacklevel=3)
417
transport = get_transport(directory)
418
if branch.bzrdir.root_transport.base == transport.base:
420
return branch.bzrdir.create_workingtree()
421
# different directory,
422
# create a branch reference
423
# and now a working tree.
424
raise NotImplementedError
427
@deprecated_method(zero_eight)
428
def create_standalone(directory):
429
"""Create a checkout and a branch and a repo at directory.
431
Directory must exist and be empty.
433
please use BzrDir.create_standalone_workingtree
435
return bzrdir.BzrDir.create_standalone_workingtree(directory)
501
437
def relpath(self, path):
502
438
"""Return the local path portion from a given path.
504
The path may be absolute or relative. If its a relative path it is
440
The path may be absolute or relative. If its a relative path it is
505
441
interpreted relative to the python current working directory.
507
443
return osutils.relpath(self.basedir, path)
509
445
def has_filename(self, filename):
510
446
return osutils.lexists(self.abspath(filename))
512
def get_file(self, file_id, path=None, filtered=True):
513
return self.get_file_with_stat(file_id, path, filtered=filtered)[0]
515
def get_file_with_stat(self, file_id, path=None, filtered=True,
517
"""See Tree.get_file_with_stat."""
519
path = self.id2path(file_id)
520
file_obj = self.get_file_byname(path, filtered=False)
521
stat_value = _fstat(file_obj.fileno())
522
if filtered and self.supports_content_filtering():
523
filters = self._content_filter_stack(path)
524
file_obj = filtered_input_file(file_obj, filters)
525
return (file_obj, stat_value)
527
def get_file_text(self, file_id, path=None, filtered=True):
528
my_file = self.get_file(file_id, path=path, filtered=filtered)
530
return my_file.read()
534
def get_file_byname(self, filename, filtered=True):
535
path = self.abspath(filename)
537
if filtered and self.supports_content_filtering():
538
filters = self._content_filter_stack(filename)
539
return filtered_input_file(f, filters)
543
def get_file_lines(self, file_id, path=None, filtered=True):
544
"""See Tree.get_file_lines()"""
545
file = self.get_file(file_id, path, filtered=filtered)
547
return file.readlines()
448
def get_file(self, file_id):
449
file_id = osutils.safe_file_id(file_id)
450
return self.get_file_byname(self.id2path(file_id))
452
def get_file_text(self, file_id):
453
file_id = osutils.safe_file_id(file_id)
454
return self.get_file(file_id).read()
456
def get_file_byname(self, filename):
457
return file(self.abspath(filename), 'rb')
552
def annotate_iter(self, file_id, default_revision=CURRENT_REVISION):
460
def annotate_iter(self, file_id):
553
461
"""See Tree.annotate_iter
555
463
This implementation will use the basis tree implementation if possible.
559
467
incorrectly attributed to CURRENT_REVISION (but after committing, the
560
468
attribution will be correct).
562
maybe_file_parent_keys = []
563
for parent_id in self.get_parent_ids():
565
parent_tree = self.revision_tree(parent_id)
566
except errors.NoSuchRevisionInTree:
567
parent_tree = self.branch.repository.revision_tree(parent_id)
568
parent_tree.lock_read()
570
if file_id not in parent_tree:
572
ie = parent_tree.inventory[file_id]
573
if ie.kind != 'file':
574
# Note: this is slightly unnecessary, because symlinks and
575
# directories have a "text" which is the empty text, and we
576
# know that won't mess up annotations. But it seems cleaner
578
parent_text_key = (file_id, ie.revision)
579
if parent_text_key not in maybe_file_parent_keys:
580
maybe_file_parent_keys.append(parent_text_key)
583
graph = _mod_graph.Graph(self.branch.repository.texts)
584
heads = graph.heads(maybe_file_parent_keys)
585
file_parent_keys = []
586
for key in maybe_file_parent_keys:
588
file_parent_keys.append(key)
590
# Now we have the parents of this content
591
annotator = self.branch.repository.texts.get_annotator()
592
text = self.get_file_text(file_id)
593
this_key =(file_id, default_revision)
594
annotator.add_special_text(this_key, file_parent_keys, text)
595
annotations = [(key[-1], line)
596
for key, line in annotator.annotate_flat(this_key)]
599
def _get_ancestors(self, default_revision):
600
ancestors = set([default_revision])
601
for parent_id in self.get_parent_ids():
602
ancestors.update(self.branch.repository.get_ancestry(
603
parent_id, topo_sorted=False))
470
file_id = osutils.safe_file_id(file_id)
471
basis = self.basis_tree()
474
changes = self._iter_changes(basis, True, [self.id2path(file_id)],
475
require_versioned=True).next()
476
changed_content, kind = changes[2], changes[6]
477
if not changed_content:
478
return basis.annotate_iter(file_id)
482
if kind[0] != 'file':
485
old_lines = list(basis.annotate_iter(file_id))
487
for tree in self.branch.repository.revision_trees(
488
self.get_parent_ids()[1:]):
489
if file_id not in tree:
491
old.append(list(tree.annotate_iter(file_id)))
492
return annotate.reannotate(old, self.get_file(file_id).readlines(),
606
497
def get_parent_ids(self):
607
498
"""See Tree.get_parent_ids.
609
500
This implementation reads the pending merges list and last_revision
610
501
value and uses that to decide what the parents list should be.
612
last_rev = _mod_revision.ensure_null(self._last_revision())
613
if _mod_revision.NULL_REVISION == last_rev:
503
last_rev = self._last_revision()
616
507
parents = [last_rev]
618
merges_bytes = self._transport.get_bytes('pending-merges')
509
merges_file = self._control_files.get('pending-merges')
619
510
except errors.NoSuchFile:
622
for l in osutils.split_lines(merges_bytes):
623
revision_id = l.rstrip('\n')
513
for l in merges_file.readlines():
514
revision_id = osutils.safe_revision_id(l.rstrip('\n'))
624
515
parents.append(revision_id)
683
578
__contains__ = has_id
685
580
def get_file_size(self, file_id):
686
"""See Tree.get_file_size"""
687
# XXX: this returns the on-disk size; it should probably return the
690
return os.path.getsize(self.id2abspath(file_id))
692
if e.errno != errno.ENOENT:
581
file_id = osutils.safe_file_id(file_id)
582
return os.path.getsize(self.id2abspath(file_id))
698
585
def get_file_sha1(self, file_id, path=None, stat_value=None):
586
file_id = osutils.safe_file_id(file_id)
700
588
path = self._inventory.id2path(file_id)
701
589
return self._hashcache.get_sha1(path, stat_value)
703
591
def get_file_mtime(self, file_id, path=None):
592
file_id = osutils.safe_file_id(file_id)
705
594
path = self.inventory.id2path(file_id)
706
595
return os.lstat(self.abspath(path)).st_mtime
708
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
709
file_id = self.path2id(path)
711
# For unversioned files on win32, we just assume they are not
714
return self._inventory[file_id].executable
716
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
717
mode = stat_result.st_mode
718
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
720
597
if not supports_executable():
721
598
def is_executable(self, file_id, path=None):
599
file_id = osutils.safe_file_id(file_id)
722
600
return self._inventory[file_id].executable
724
_is_executable_from_path_and_stat = \
725
_is_executable_from_path_and_stat_from_basis
727
602
def is_executable(self, file_id, path=None):
604
file_id = osutils.safe_file_id(file_id)
729
605
path = self.id2path(file_id)
730
606
mode = os.lstat(self.abspath(path)).st_mode
731
607
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
733
_is_executable_from_path_and_stat = \
734
_is_executable_from_path_and_stat_from_stat
736
609
@needs_tree_write_lock
737
610
def _add(self, files, ids, kinds):
738
611
"""See MutableTree._add."""
739
612
# TODO: Re-adding a file that is removed in the working copy
740
613
# should probably put it back with the previous ID.
741
# the read and write working inventory should not occur in this
614
# the read and write working inventory should not occur in this
742
615
# function - they should be part of lock_write and unlock.
616
inv = self.read_working_inventory()
744
617
for f, file_id, kind in zip(files, ids, kinds):
618
assert kind is not None
745
619
if file_id is None:
746
620
inv.add_path(f, kind=kind)
622
file_id = osutils.safe_file_id(file_id)
748
623
inv.add_path(f, kind=kind, file_id=file_id)
749
self._inventory_is_modified = True
624
self._write_inventory(inv)
751
626
@needs_tree_write_lock
752
627
def _gather_kinds(self, files, kinds):
813
688
self.set_parent_ids(parents, allow_leftmost_as_ghost=True)
815
def path_content_summary(self, path, _lstat=os.lstat,
816
_mapper=osutils.file_kind_from_stat_mode):
817
"""See Tree.path_content_summary."""
818
abspath = self.abspath(path)
820
stat_result = _lstat(abspath)
822
if getattr(e, 'errno', None) == errno.ENOENT:
824
return ('missing', None, None, None)
825
# propagate other errors
827
kind = _mapper(stat_result.st_mode)
829
return self._file_content_summary(path, stat_result)
830
elif kind == 'directory':
831
# perhaps it looks like a plain directory, but it's really a
833
if self._directory_is_tree_reference(path):
834
kind = 'tree-reference'
835
return kind, None, None, None
836
elif kind == 'symlink':
837
target = osutils.readlink(abspath)
838
return ('symlink', None, None, target)
840
return (kind, None, None, None)
842
def _file_content_summary(self, path, stat_result):
843
size = stat_result.st_size
844
executable = self._is_executable_from_path_and_stat(path, stat_result)
845
# try for a stat cache lookup
846
return ('file', size, executable, self._sha_from_stat(
690
@deprecated_method(zero_eleven)
692
def pending_merges(self):
693
"""Return a list of pending merges.
695
These are revisions that have been merged into the working
696
directory but not yet committed.
698
As of 0.11 this is deprecated. Please see WorkingTree.get_parent_ids()
699
instead - which is available on all tree objects.
701
return self.get_parent_ids()[1:]
849
703
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
850
704
"""Common ghost checking functionality from set_parent_*.
861
715
def _set_merges_from_parent_ids(self, parent_ids):
862
716
merges = parent_ids[1:]
863
self._transport.put_bytes('pending-merges', '\n'.join(merges),
864
mode=self.bzrdir._get_file_mode())
866
def _filter_parent_ids_by_ancestry(self, revision_ids):
867
"""Check that all merged revisions are proper 'heads'.
869
This will always return the first revision_id, and any merged revisions
872
if len(revision_ids) == 0:
874
graph = self.branch.repository.get_graph()
875
heads = graph.heads(revision_ids)
876
new_revision_ids = revision_ids[:1]
877
for revision_id in revision_ids[1:]:
878
if revision_id in heads and revision_id not in new_revision_ids:
879
new_revision_ids.append(revision_id)
880
if new_revision_ids != revision_ids:
881
trace.mutter('requested to set revision_ids = %s,'
882
' but filtered to %s', revision_ids, new_revision_ids)
883
return new_revision_ids
717
self._control_files.put_bytes('pending-merges', '\n'.join(merges))
885
719
@needs_tree_write_lock
886
720
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
887
721
"""Set the parent ids to revision_ids.
889
723
See also set_parent_trees. This api will try to retrieve the tree data
890
724
for each element of revision_ids from the trees repository. If you have
891
725
tree data already available, it is more efficient to use
895
729
:param revision_ids: The revision_ids to set as the parent ids of this
896
730
working tree. Any of these may be ghosts.
732
revision_ids = [osutils.safe_revision_id(r) for r in revision_ids]
898
733
self._check_parents_for_ghosts(revision_ids,
899
734
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
900
for revision_id in revision_ids:
901
_mod_revision.check_not_reserved_id(revision_id)
903
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
905
736
if len(revision_ids) > 0:
906
737
self.set_last_revision(revision_ids[0])
908
self.set_last_revision(_mod_revision.NULL_REVISION)
739
self.set_last_revision(None)
910
741
self._set_merges_from_parent_ids(revision_ids)
912
743
@needs_tree_write_lock
913
744
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
914
745
"""See MutableTree.set_parent_trees."""
915
parent_ids = [rev for (rev, tree) in parents_list]
916
for revision_id in parent_ids:
917
_mod_revision.check_not_reserved_id(revision_id)
746
parent_ids = [osutils.safe_revision_id(rev) for (rev, tree) in parents_list]
919
748
self._check_parents_for_ghosts(parent_ids,
920
749
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
922
parent_ids = self._filter_parent_ids_by_ancestry(parent_ids)
924
751
if len(parent_ids) == 0:
925
leftmost_parent_id = _mod_revision.NULL_REVISION
752
leftmost_parent_id = None
926
753
leftmost_parent_tree = None
928
755
leftmost_parent_id, leftmost_parent_tree = parents_list[0]
981
796
branch.last_revision().
983
798
from bzrlib.merge import Merger, Merge3Merger
984
merger = Merger(self.branch, this_tree=self)
985
# check that there are no local alterations
986
if not force and self.has_changes():
987
raise errors.UncommittedChanges(self)
988
if to_revision is None:
989
to_revision = _mod_revision.ensure_null(branch.last_revision())
990
merger.other_rev_id = to_revision
991
if _mod_revision.is_null(merger.other_rev_id):
992
raise errors.NoCommits(branch)
993
self.branch.fetch(branch, last_revision=merger.other_rev_id)
994
merger.other_basis = merger.other_rev_id
995
merger.other_tree = self.branch.repository.revision_tree(
997
merger.other_branch = branch
998
if from_revision is None:
799
pb = bzrlib.ui.ui_factory.nested_progress_bar()
801
merger = Merger(self.branch, this_tree=self, pb=pb)
802
merger.pp = ProgressPhase("Merge phase", 5, pb)
803
merger.pp.next_phase()
804
# check that there are no
806
merger.check_basis(check_clean=True, require_commits=False)
807
if to_revision is None:
808
to_revision = branch.last_revision()
810
to_revision = osutils.safe_revision_id(to_revision)
811
merger.other_rev_id = to_revision
812
if merger.other_rev_id is None:
813
raise error.NoCommits(branch)
814
self.branch.fetch(branch, last_revision=merger.other_rev_id)
815
merger.other_basis = merger.other_rev_id
816
merger.other_tree = self.branch.repository.revision_tree(
818
merger.other_branch = branch
819
merger.pp.next_phase()
999
820
merger.find_base()
1001
merger.set_base_revision(from_revision, branch)
1002
if merger.base_rev_id == merger.other_rev_id:
1003
raise errors.PointlessMerge
1004
merger.backup_files = False
1005
if merge_type is None:
821
if merger.base_rev_id == merger.other_rev_id:
822
raise errors.PointlessMerge
823
merger.backup_files = False
1006
824
merger.merge_type = Merge3Merger
1008
merger.merge_type = merge_type
1009
merger.set_interesting_files(None)
1010
merger.show_base = False
1011
merger.reprocess = False
1012
conflicts = merger.do_merge()
1013
merger.set_pending()
825
merger.set_interesting_files(None)
826
merger.show_base = False
827
merger.reprocess = False
828
conflicts = merger.do_merge()
1014
832
return conflicts
1016
834
@needs_read_lock
1017
835
def merge_modified(self):
1018
836
"""Return a dictionary of files modified by a merge.
1020
The list is initialized by WorkingTree.set_merge_modified, which is
838
The list is initialized by WorkingTree.set_merge_modified, which is
1021
839
typically called after we make some automatic updates to the tree
1022
840
because of a merge.
1194
988
sio = StringIO()
1195
989
self._serialize(self._inventory, sio)
1197
self._transport.put_file('inventory', sio,
1198
mode=self.bzrdir._get_file_mode())
991
self._control_files.put('inventory', sio)
1199
992
self._inventory_is_modified = False
1201
994
def _kind(self, relpath):
1202
995
return osutils.file_kind(self.abspath(relpath))
1204
def list_files(self, include_root=False, from_dir=None, recursive=True):
1205
"""List all files as (path, class, kind, id, entry).
997
def list_files(self, include_root=False):
998
"""Recursively list all files as (path, class, kind, id, entry).
1207
1000
Lists, but does not descend into unversioned directories.
1208
1002
This does not include files that have been deleted in this
1209
tree. Skips the control directory.
1211
:param include_root: if True, return an entry for the root
1212
:param from_dir: start from this directory or None for the root
1213
:param recursive: whether to recurse into subdirectories or not
1005
Skips the control directory.
1215
1007
# list_files is an iterator, so @needs_read_lock doesn't work properly
1216
1008
# with it. So callers should be careful to always read_lock the tree.
1311
1089
except KeyError:
1312
1090
yield fp[1:], c, fk, None, TreeEntry()
1315
1093
if fk != 'directory':
1318
# But do this child first if recursing down
1320
new_children = os.listdir(fap)
1322
new_children = collections.deque(new_children)
1323
stack.append((f_ie.file_id, fp, fap, new_children))
1324
# Break out of inner loop,
1325
# so that we start outer loop with child
1096
# But do this child first
1097
new_children = os.listdir(fap)
1099
new_children = collections.deque(new_children)
1100
stack.append((f_ie.file_id, fp, fap, new_children))
1101
# Break out of inner loop,
1102
# so that we start outer loop with child
1328
1105
# if we finished all children, pop it off the stack
1331
1108
@needs_tree_write_lock
1332
def move(self, from_paths, to_dir=None, after=False):
1109
def move(self, from_paths, to_dir=None, after=False, **kwargs):
1333
1110
"""Rename files.
1335
1112
to_dir must exist in the inventory.
1337
1114
If to_dir exists and is a directory, the files are moved into
1338
it, keeping their old names.
1115
it, keeping their old names.
1340
1117
Note that to_dir is only the last component of the new name;
1341
1118
this doesn't change the directory.
1991
1744
def read_basis_inventory(self):
1992
1745
"""Read the cached basis inventory."""
1993
1746
path = self._basis_inventory_name()
1994
return self._transport.get_bytes(path)
1747
return self._control_files.get(path).read()
1996
1749
@needs_read_lock
1997
1750
def read_working_inventory(self):
1998
1751
"""Read the working inventory.
2000
1753
:raises errors.InventoryModified: read_working_inventory will fail
2001
1754
when the current in memory inventory has been modified.
2003
# conceptually this should be an implementation detail of the tree.
1756
# conceptually this should be an implementation detail of the tree.
2004
1757
# XXX: Deprecate this.
2005
1758
# ElementTree does its own conversion from UTF-8, so open in
2007
1760
if self._inventory_is_modified:
2008
1761
raise errors.InventoryModified(self)
2009
f = self._transport.get('inventory')
2011
result = self._deserialize(f)
1762
result = self._deserialize(self._control_files.get('inventory'))
2014
1763
self._set_inventory(result, dirty=False)
2017
1766
@needs_tree_write_lock
2018
def remove(self, files, verbose=False, to_file=None, keep_files=True,
2020
"""Remove nominated files from the working inventory.
2022
:files: File paths relative to the basedir.
2023
:keep_files: If true, the files will also be kept.
2024
:force: Delete files and directories, even if they are changed and
2025
even if the directories are not empty.
1767
def remove(self, files, verbose=False, to_file=None):
1768
"""Remove nominated files from the working inventory..
1770
This does not remove their text. This does not run on XXX on what? RBC
1772
TODO: Refuse to remove modified files unless --force is given?
1774
TODO: Do something useful with directories.
1776
TODO: Should this remove the text or not? Tough call; not
1777
removing may be useful and the user can just use use rm, and
1778
is the opposite of add. Removing it is consistent with most
1779
other tools. Maybe an option.
1781
## TODO: Normalize names
1782
## TODO: Remove nested loops; better scalability
2027
1783
if isinstance(files, basestring):
2028
1784
files = [files]
2032
all_files = set() # specified and nested files
2033
unknown_nested_files=set()
2035
to_file = sys.stdout
2037
files_to_backup = []
2039
def recurse_directory_to_add_files(directory):
2040
# Recurse directory and add all files
2041
# so we can check if they have changed.
2042
for parent_info, file_infos in self.walkdirs(directory):
2043
for relpath, basename, kind, lstat, fileid, kind in file_infos:
2044
# Is it versioned or ignored?
2045
if self.path2id(relpath):
2046
# Add nested content for deletion.
2047
all_files.add(relpath)
2049
# Files which are not versioned
2050
# should be treated as unknown.
2051
files_to_backup.append(relpath)
2053
for filename in files:
2054
# Get file name into canonical form.
2055
abspath = self.abspath(filename)
2056
filename = self.relpath(abspath)
2057
if len(filename) > 0:
2058
all_files.add(filename)
2059
recurse_directory_to_add_files(filename)
2061
files = list(all_files)
2064
return # nothing to do
2066
# Sort needed to first handle directory content before the directory
2067
files.sort(reverse=True)
2069
# Bail out if we are going to delete files we shouldn't
2070
if not keep_files and not force:
2071
for (file_id, path, content_change, versioned, parent_id, name,
2072
kind, executable) in self.iter_changes(self.basis_tree(),
2073
include_unchanged=True, require_versioned=False,
2074
want_unversioned=True, specific_files=files):
2075
if versioned[0] == False:
2076
# The record is unknown or newly added
2077
files_to_backup.append(path[1])
2078
elif (content_change and (kind[1] is not None) and
2079
osutils.is_inside_any(files, path[1])):
2080
# Versioned and changed, but not deleted, and still
2081
# in one of the dirs to be deleted.
2082
files_to_backup.append(path[1])
2084
def backup(file_to_backup):
2085
backup_name = self.bzrdir._available_backup_name(file_to_backup)
2086
osutils.rename(abs_path, self.abspath(backup_name))
2087
return "removed %s (but kept a copy: %s)" % (file_to_backup,
2090
# Build inv_delta and delete files where applicable,
2091
# do this before any modifications to inventory.
1786
inv = self.inventory
1788
# do this before any modifications
2092
1789
for f in files:
2093
fid = self.path2id(f)
1790
fid = inv.path2id(f)
2096
message = "%s is not versioned." % (f,)
1792
note("%s is not versioned."%f)
2099
# having removed it, it must be either ignored or unknown
1795
# having remove it, it must be either ignored or unknown
2100
1796
if self.is_ignored(f):
2101
1797
new_status = 'I'
2103
1799
new_status = '?'
2104
# XXX: Really should be a more abstract reporter interface
2105
kind_ch = osutils.kind_marker(self.kind(fid))
2106
to_file.write(new_status + ' ' + f + kind_ch + '\n')
2108
inv_delta.append((f, None, fid, None))
2109
message = "removed %s" % (f,)
2112
abs_path = self.abspath(f)
2113
if osutils.lexists(abs_path):
2114
if (osutils.isdir(abs_path) and
2115
len(os.listdir(abs_path)) > 0):
2117
osutils.rmtree(abs_path)
2118
message = "deleted %s" % (f,)
2122
if f in files_to_backup:
2125
osutils.delete_any(abs_path)
2126
message = "deleted %s" % (f,)
2127
elif message is not None:
2128
# Only care if we haven't done anything yet.
2129
message = "%s does not exist." % (f,)
2131
# Print only one message (if any) per file.
2132
if message is not None:
2134
self.apply_inventory_delta(inv_delta)
1800
textui.show_status(new_status, inv[fid].kind, f,
1804
self._write_inventory(inv)
2136
1806
@needs_tree_write_lock
2137
def revert(self, filenames=None, old_tree=None, backups=True,
2138
pb=None, report_changes=False):
1807
def revert(self, filenames, old_tree=None, backups=True,
1808
pb=DummyProgress(), report_changes=False):
2139
1809
from bzrlib.conflicts import resolve
2142
symbol_versioning.warn('Using [] to revert all files is deprecated'
2143
' as of bzr 0.91. Please use None (the default) instead.',
2144
DeprecationWarning, stacklevel=2)
2145
1810
if old_tree is None:
2146
basis_tree = self.basis_tree()
2147
basis_tree.lock_read()
2148
old_tree = basis_tree
1811
old_tree = self.basis_tree()
1812
conflicts = transform.revert(self, old_tree, filenames, backups, pb,
1814
if not len(filenames):
1815
self.set_parent_ids(self.get_parent_ids()[:1])
2152
conflicts = transform.revert(self, old_tree, filenames, backups, pb,
2154
if filenames is None and len(self.get_parent_ids()) > 1:
2156
last_revision = self.last_revision()
2157
if last_revision != _mod_revision.NULL_REVISION:
2158
if basis_tree is None:
2159
basis_tree = self.basis_tree()
2160
basis_tree.lock_read()
2161
parent_trees.append((last_revision, basis_tree))
2162
self.set_parent_trees(parent_trees)
2165
resolve(self, filenames, ignore_misses=True, recursive=True)
2167
if basis_tree is not None:
1818
resolve(self, filenames, ignore_misses=True)
2169
1819
return conflicts
2171
1821
def revision_tree(self, revision_id):
2327
1971
# cant set that until we update the working trees last revision to be
2328
1972
# one from the new branch, because it will just get absorbed by the
2329
1973
# parent de-duplication logic.
2331
1975
# We MUST save it even if an error occurs, because otherwise the users
2332
1976
# local work is unreferenced and will appear to have been lost.
2336
1980
last_rev = self.get_parent_ids()[0]
2337
1981
except IndexError:
2338
last_rev = _mod_revision.NULL_REVISION
2339
if revision is None:
2340
revision = self.branch.last_revision()
2342
old_tip = old_tip or _mod_revision.NULL_REVISION
2344
if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
2345
# the branch we are bound to was updated
2346
# merge those changes in first
2347
base_tree = self.basis_tree()
2348
other_tree = self.branch.repository.revision_tree(old_tip)
2349
nb_conflicts = merge.merge_inner(self.branch, other_tree,
2350
base_tree, this_tree=self,
2351
change_reporter=change_reporter,
2352
show_base=show_base)
2354
self.add_parent_tree((old_tip, other_tree))
2355
trace.note('Rerun update after fixing the conflicts.')
2358
if last_rev != _mod_revision.ensure_null(revision):
2359
# the working tree is up to date with the branch
2360
# we can merge the specified revision from master
2361
to_tree = self.branch.repository.revision_tree(revision)
2362
to_root_id = to_tree.get_root_id()
1983
if last_rev != self.branch.last_revision():
1984
# merge tree state up to new branch tip.
2364
1985
basis = self.basis_tree()
2365
1986
basis.lock_read()
2367
if (basis.inventory.root is None
2368
or basis.inventory.root.file_id != to_root_id):
2369
self.set_root_id(to_root_id)
1988
to_tree = self.branch.basis_tree()
1989
if basis.inventory.root is None:
1990
self.set_root_id(to_tree.inventory.root.file_id)
1992
result += merge.merge_inner(
2374
# determine the branch point
2375
graph = self.branch.repository.get_graph()
2376
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2378
base_tree = self.branch.repository.revision_tree(base_rev_id)
2380
nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
2382
change_reporter=change_reporter,
2383
show_base=show_base)
2384
self.set_last_revision(revision)
2385
1999
# TODO - dedup parents list with things merged by pull ?
2386
2000
# reuse the tree we've updated to to set the basis:
2387
parent_trees = [(revision, to_tree)]
2001
parent_trees = [(self.branch.last_revision(), to_tree)]
2388
2002
merges = self.get_parent_ids()[1:]
2389
2003
# Ideally we ask the tree for the trees here, that way the working
2390
# tree can decide whether to give us the entire tree or give us a
2004
# tree can decide whether to give us teh entire tree or give us a
2391
2005
# lazy initialised tree. dirstate for instance will have the trees
2392
2006
# in ram already, whereas a last-revision + basis-inventory tree
2393
2007
# will not, but also does not need them when setting parents.
2394
2008
for parent in merges:
2395
2009
parent_trees.append(
2396
2010
(parent, self.branch.repository.revision_tree(parent)))
2397
if not _mod_revision.is_null(old_tip):
2011
if old_tip is not None:
2398
2012
parent_trees.append(
2399
2013
(old_tip, self.branch.repository.revision_tree(old_tip)))
2400
2014
self.set_parent_trees(parent_trees)
2401
2015
last_rev = parent_trees[0][0]
2017
# the working tree had the same last-revision as the master
2018
# branch did. We may still have pivot local work from the local
2019
# branch into old_tip:
2020
if old_tip is not None:
2021
self.add_parent_tree_id(old_tip)
2022
if old_tip and old_tip != last_rev:
2023
# our last revision was not the prior branch last revision
2024
# and we have converted that last revision to a pending merge.
2025
# base is somewhere between the branch tip now
2026
# and the now pending merge
2028
# Since we just modified the working tree and inventory, flush out
2029
# the current state, before we modify it again.
2030
# TODO: jam 20070214 WorkingTree3 doesn't require this, dirstate
2031
# requires it only because TreeTransform directly munges the
2032
# inventory and calls tree._write_inventory(). Ultimately we
2033
# should be able to remove this extra flush.
2035
from bzrlib.revision import common_ancestor
2037
base_rev_id = common_ancestor(self.branch.last_revision(),
2039
self.branch.repository)
2040
except errors.NoCommonAncestor:
2042
base_tree = self.branch.repository.revision_tree(base_rev_id)
2043
other_tree = self.branch.repository.revision_tree(old_tip)
2044
result += merge.merge_inner(
2404
2051
def _write_hashcache_if_dirty(self):
2405
2052
"""Write out the hashcache if it is dirty."""
2679
def check_state(self):
2680
"""Check that the working state is/isn't valid."""
2681
check_refs = self._get_check_refs()
2683
for ref in check_refs:
2686
refs[ref] = self.branch.repository.revision_tree(value)
2689
@needs_tree_write_lock
2690
def reset_state(self, revision_ids=None):
2691
"""Reset the state of the working tree.
2693
This does a hard-reset to a last-known-good state. This is a way to
2694
fix if something got corrupted (like the .bzr/checkout/dirstate file)
2696
if revision_ids is None:
2697
revision_ids = self.get_parent_ids()
2698
if not revision_ids:
2699
rt = self.branch.repository.revision_tree(
2700
_mod_revision.NULL_REVISION)
2702
rt = self.branch.repository.revision_tree(revision_ids[0])
2703
self._write_inventory(rt.inventory)
2704
self.set_parent_ids(revision_ids)
2706
def _get_rules_searcher(self, default_searcher):
2707
"""See Tree._get_rules_searcher."""
2708
if self._rules_searcher is None:
2709
self._rules_searcher = super(WorkingTree,
2710
self)._get_rules_searcher(default_searcher)
2711
return self._rules_searcher
2713
def get_shelf_manager(self):
2714
"""Return the ShelfManager for this WorkingTree."""
2715
from bzrlib.shelf import ShelfManager
2716
return ShelfManager(self, self._transport)
2719
2280
class WorkingTree2(WorkingTree):
2720
2281
"""This is the Format 2 working tree.
2722
This was the first weave based working tree.
2283
This was the first weave based working tree.
2723
2284
- uses os locks for locking.
2724
2285
- uses the branch last-revision.
2932
2478
"""Is this format supported?
2934
2480
Supported formats can be initialized and opened.
2935
Unsupported formats may not support initialization or committing or
2481
Unsupported formats may not support initialization or committing or
2936
2482
some other features depending on the reason for not being supported.
2940
def supports_content_filtering(self):
2941
"""True if this format supports content filtering."""
2944
def supports_views(self):
2945
"""True if this format supports stored views."""
2949
2487
def register_format(klass, format):
2950
2488
klass._formats[format.get_format_string()] = format
2953
def register_extra_format(klass, format):
2954
klass._extra_formats.append(format)
2957
def unregister_extra_format(klass, format):
2958
klass._extra_formats.remove(format)
2961
def get_formats(klass):
2962
return klass._formats.values() + klass._extra_formats
2965
2491
def set_default_format(klass, format):
2966
2492
klass._default_format = format
2969
2495
def unregister_format(klass, format):
2496
assert klass._formats[format.get_format_string()] is format
2970
2497
del klass._formats[format.get_format_string()]
2973
2500
class WorkingTreeFormat2(WorkingTreeFormat):
2974
"""The second working tree format.
2501
"""The second working tree format.
2976
2503
This format modified the hash cache from the format 1 hash cache.
2979
2506
upgrade_recommended = True
2981
requires_normalized_unicode_filenames = True
2983
case_sensitive_filename = "Branch-FoRMaT"
2985
missing_parent_conflicts = False
2987
2508
def get_format_description(self):
2988
2509
"""See WorkingTreeFormat.get_format_description()."""
2989
2510
return "Working tree format 2"
2991
def _stub_initialize_on_transport(self, transport, file_mode):
2992
"""Workaround: create control files for a remote working tree.
2512
def stub_initialize_remote(self, control_files):
2513
"""As a special workaround create critical control files for a remote working tree
2994
2515
This ensures that it can later be updated and dealt with locally,
2995
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2516
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2996
2517
no working tree. (See bug #43064).
2998
2519
sio = StringIO()
2999
inv = inventory.Inventory()
3000
xml5.serializer_v5.write_inventory(inv, sio, working=True)
2521
xml5.serializer_v5.write_inventory(inv, sio)
3002
transport.put_file('inventory', sio, file_mode)
3003
transport.put_bytes('pending-merges', '', file_mode)
3005
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
3006
accelerator_tree=None, hardlink=False):
2523
control_files.put('inventory', sio)
2525
control_files.put_bytes('pending-merges', '')
2528
def initialize(self, a_bzrdir, revision_id=None):
3007
2529
"""See WorkingTreeFormat.initialize()."""
3008
2530
if not isinstance(a_bzrdir.transport, LocalTransport):
3009
2531
raise errors.NotLocalUrl(a_bzrdir.transport.base)
3010
if from_branch is not None:
3011
branch = from_branch
3013
branch = a_bzrdir.open_branch()
3014
if revision_id is None:
3015
revision_id = _mod_revision.ensure_null(branch.last_revision())
3018
branch.generate_revision_history(revision_id)
3021
inv = inventory.Inventory()
2532
branch = a_bzrdir.open_branch()
2533
if revision_id is not None:
2534
revision_id = osutils.safe_revision_id(revision_id)
2537
revision_history = branch.revision_history()
2539
position = revision_history.index(revision_id)
2541
raise errors.NoSuchRevision(branch, revision_id)
2542
branch.set_revision_history(revision_history[:position + 1])
2545
revision = branch.last_revision()
3022
2547
wt = WorkingTree2(a_bzrdir.root_transport.local_abspath('.'),
3025
2550
_internal=True,
3028
_control_files=branch.control_files)
3029
basis_tree = branch.repository.revision_tree(revision_id)
2553
basis_tree = branch.repository.revision_tree(revision)
3030
2554
if basis_tree.inventory.root is not None:
3031
wt.set_root_id(basis_tree.get_root_id())
2555
wt.set_root_id(basis_tree.inventory.root.file_id)
3032
2556
# set the parent list and cache the basis tree.
3033
if _mod_revision.is_null(revision_id):
3036
parent_trees = [(revision_id, basis_tree)]
3037
wt.set_parent_trees(parent_trees)
2557
wt.set_parent_trees([(revision, basis_tree)])
3038
2558
transform.build_tree(basis_tree, wt)
3193
2701
return self.get_format_string()
3196
__default_format = WorkingTreeFormat6()
2704
__default_format = WorkingTreeFormat4()
3197
2705
WorkingTreeFormat.register_format(__default_format)
3198
WorkingTreeFormat.register_format(WorkingTreeFormat5())
3199
WorkingTreeFormat.register_format(WorkingTreeFormat4())
3200
2706
WorkingTreeFormat.register_format(WorkingTreeFormat3())
3201
2707
WorkingTreeFormat.set_default_format(__default_format)
3202
# Register extra formats which have no format string are not discoverable
3203
# and not independently creatable. They are implicitly created as part of
3204
# e.g. older Bazaar formats or foreign formats.
3205
WorkingTreeFormat.register_extra_format(WorkingTreeFormat2())
2708
# formats which have no format string are not discoverable
2709
# and not independently creatable, so are not registered.
2710
_legacy_formats = [WorkingTreeFormat2(),
2714
class WorkingTreeTestProviderAdapter(object):
2715
"""A tool to generate a suite testing multiple workingtree formats at once.
2717
This is done by copying the test once for each transport and injecting
2718
the transport_server, transport_readonly_server, and workingtree_format
2719
classes into each copy. Each copy is also given a new id() to make it
2723
def __init__(self, transport_server, transport_readonly_server, formats):
2724
self._transport_server = transport_server
2725
self._transport_readonly_server = transport_readonly_server
2726
self._formats = formats
2728
def _clone_test(self, test, bzrdir_format, workingtree_format, variation):
2729
"""Clone test for adaption."""
2730
new_test = deepcopy(test)
2731
new_test.transport_server = self._transport_server
2732
new_test.transport_readonly_server = self._transport_readonly_server
2733
new_test.bzrdir_format = bzrdir_format
2734
new_test.workingtree_format = workingtree_format
2735
def make_new_test_id():
2736
new_id = "%s(%s)" % (test.id(), variation)
2737
return lambda: new_id
2738
new_test.id = make_new_test_id()
2741
def adapt(self, test):
2742
from bzrlib.tests import TestSuite
2743
result = TestSuite()
2744
for workingtree_format, bzrdir_format in self._formats:
2745
new_test = self._clone_test(
2748
workingtree_format, workingtree_format.__class__.__name__)
2749
result.addTest(new_test)