13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
"""WorkingTree object and friends.
19
19
A WorkingTree represents the editable working copy of a branch.
20
Operations which represent the WorkingTree are also done here,
21
such as renaming or adding files. The WorkingTree has an inventory
22
which is updated by these operations. A commit produces a
20
Operations which represent the WorkingTree are also done here,
21
such as renaming or adding files. The WorkingTree has an inventory
22
which is updated by these operations. A commit produces a
23
23
new revision based on the workingtree and its inventory.
25
25
At the moment every WorkingTree has its own branch. Remote
59
57
conflicts as _mod_conflicts,
67
66
revision as _mod_revision,
79
75
import bzrlib.branch
80
76
from bzrlib.transport import get_transport
82
from bzrlib.workingtree_4 import WorkingTreeFormat4
77
from bzrlib.workingtree_4 import (
85
84
from bzrlib import symbol_versioning
86
85
from bzrlib.decorators import needs_read_lock, needs_write_lock
87
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, TreeReference
88
from bzrlib.lockable_files import LockableFiles, TransportLock
86
from bzrlib.lockable_files import LockableFiles
89
87
from bzrlib.lockdir import LockDir
90
88
import bzrlib.mutabletree
91
89
from bzrlib.mutabletree import needs_tree_write_lock
92
90
from bzrlib import osutils
93
91
from bzrlib.osutils import (
103
99
supports_executable,
101
from bzrlib.filters import filtered_input_file
105
102
from bzrlib.trace import mutter, note
106
103
from bzrlib.transport.local import LocalTransport
107
from bzrlib.progress import DummyProgress, ProgressPhase
108
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
104
from bzrlib.progress import ProgressPhase
105
from bzrlib.revision import CURRENT_REVISION
109
106
from bzrlib.rio import RioReader, rio_file, Stanza
110
from bzrlib.symbol_versioning import (deprecated_passed,
113
DEPRECATED_PARAMETER,
107
from bzrlib.symbol_versioning import (
109
DEPRECATED_PARAMETER,
120
113
MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1"
114
# TODO: Modifying the conflict objects or their type is currently nearly
115
# impossible as there is no clear relationship between the working tree format
116
# and the conflict list file format.
121
117
CONFLICT_HEADER_1 = "BZR conflict list format 1"
123
119
ERROR_PATH_NOT_FOUND = 3 # WindowsError errno code, equivalent to ENOENT
126
@deprecated_function(zero_thirteen)
127
def gen_file_id(name):
128
"""Return new file id for the basename 'name'.
130
Use bzrlib.generate_ids.gen_file_id() instead
132
return generate_ids.gen_file_id(name)
135
@deprecated_function(zero_thirteen)
137
"""Return a new tree-root file id.
139
This has been deprecated in favor of bzrlib.generate_ids.gen_root_id()
141
return generate_ids.gen_root_id()
144
122
class TreeEntry(object):
145
123
"""An entry that implements the minimum interface used by commands.
147
This needs further inspection, it may be better to have
125
This needs further inspection, it may be better to have
148
126
InventoryEntries without ids - though that seems wrong. For now,
149
127
this is a parallel hierarchy to InventoryEntry, and needs to become
150
128
one of several things: decorates to that hierarchy, children of, or
302
283
self._control_files.break_lock()
303
284
self.branch.break_lock()
286
def _get_check_refs(self):
287
"""Return the references needed to perform a check of this tree.
289
The default implementation returns no refs, and is only suitable for
290
trees that have no local caching and can commit on ghosts at any time.
292
:seealso: bzrlib.check for details about check_refs.
305
296
def requires_rich_root(self):
306
297
return self._format.requires_rich_root
308
299
def supports_tree_reference(self):
302
def supports_content_filtering(self):
303
return self._format.supports_content_filtering()
305
def supports_views(self):
306
return self.views.supports_views()
311
308
def _set_inventory(self, inv, dirty):
312
309
"""Set the internal cached inventory.
418
420
# at this point ?
420
422
return self.branch.repository.revision_tree(revision_id)
421
except errors.RevisionNotPresent:
423
except (errors.RevisionNotPresent, errors.NoSuchRevision):
422
424
# the basis tree *may* be a ghost or a low level error may have
423
# occured. If the revision is present, its a problem, if its not
425
# occurred. If the revision is present, its a problem, if its not
425
427
if self.branch.repository.has_revision(revision_id):
427
429
# the basis tree is a ghost so return an empty tree.
428
return self.branch.repository.revision_tree(None)
430
return self.branch.repository.revision_tree(
431
_mod_revision.NULL_REVISION)
430
433
def _cleanup(self):
431
434
self._flush_ignore_list_cache()
434
@deprecated_method(zero_eight)
435
def create(branch, directory):
436
"""Create a workingtree for branch at directory.
438
If existing_directory already exists it must have a .bzr directory.
439
If it does not exist, it will be created.
441
This returns a new WorkingTree object for the new checkout.
443
TODO FIXME RBC 20060124 when we have checkout formats in place this
444
should accept an optional revisionid to checkout [and reject this if
445
checking out into the same dir as a pre-checkout-aware branch format.]
447
XXX: When BzrDir is present, these should be created through that
450
warnings.warn('delete WorkingTree.create', stacklevel=3)
451
transport = get_transport(directory)
452
if branch.bzrdir.root_transport.base == transport.base:
454
return branch.bzrdir.create_workingtree()
455
# different directory,
456
# create a branch reference
457
# and now a working tree.
458
raise NotImplementedError
461
@deprecated_method(zero_eight)
462
def create_standalone(directory):
463
"""Create a checkout and a branch and a repo at directory.
465
Directory must exist and be empty.
467
please use BzrDir.create_standalone_workingtree
469
return bzrdir.BzrDir.create_standalone_workingtree(directory)
471
436
def relpath(self, path):
472
437
"""Return the local path portion from a given path.
474
The path may be absolute or relative. If its a relative path it is
439
The path may be absolute or relative. If its a relative path it is
475
440
interpreted relative to the python current working directory.
477
442
return osutils.relpath(self.basedir, path)
479
444
def has_filename(self, filename):
480
445
return osutils.lexists(self.abspath(filename))
482
def get_file(self, file_id, path=None):
447
def get_file(self, file_id, path=None, filtered=True):
448
return self.get_file_with_stat(file_id, path, filtered=filtered)[0]
450
def get_file_with_stat(self, file_id, path=None, filtered=True,
452
"""See Tree.get_file_with_stat."""
484
454
path = self.id2path(file_id)
485
return self.get_file_byname(path)
487
def get_file_text(self, file_id):
488
return self.get_file(file_id).read()
490
def get_file_byname(self, filename):
491
return file(self.abspath(filename), 'rb')
455
file_obj = self.get_file_byname(path, filtered=False)
456
stat_value = _fstat(file_obj.fileno())
457
if filtered and self.supports_content_filtering():
458
filters = self._content_filter_stack(path)
459
file_obj = filtered_input_file(file_obj, filters)
460
return (file_obj, stat_value)
462
def get_file_text(self, file_id, path=None, filtered=True):
463
return self.get_file(file_id, path=path, filtered=filtered).read()
465
def get_file_byname(self, filename, filtered=True):
466
path = self.abspath(filename)
468
if filtered and self.supports_content_filtering():
469
filters = self._content_filter_stack(filename)
470
return filtered_input_file(f, filters)
474
def get_file_lines(self, file_id, path=None, filtered=True):
475
"""See Tree.get_file_lines()"""
476
file = self.get_file(file_id, path, filtered=filtered)
478
return file.readlines()
494
483
def annotate_iter(self, file_id, default_revision=CURRENT_REVISION):
501
490
incorrectly attributed to CURRENT_REVISION (but after committing, the
502
491
attribution will be correct).
504
basis = self.basis_tree()
507
changes = self._iter_changes(basis, True, [self.id2path(file_id)],
508
require_versioned=True).next()
509
changed_content, kind = changes[2], changes[6]
510
if not changed_content:
511
return basis.annotate_iter(file_id)
515
if kind[0] != 'file':
518
old_lines = list(basis.annotate_iter(file_id))
520
for tree in self.branch.repository.revision_trees(
521
self.get_parent_ids()[1:]):
522
if file_id not in tree:
524
old.append(list(tree.annotate_iter(file_id)))
525
return annotate.reannotate(old, self.get_file(file_id).readlines(),
493
maybe_file_parent_keys = []
494
for parent_id in self.get_parent_ids():
496
parent_tree = self.revision_tree(parent_id)
497
except errors.NoSuchRevisionInTree:
498
parent_tree = self.branch.repository.revision_tree(parent_id)
499
parent_tree.lock_read()
501
if file_id not in parent_tree:
503
ie = parent_tree.inventory[file_id]
504
if ie.kind != 'file':
505
# Note: this is slightly unnecessary, because symlinks and
506
# directories have a "text" which is the empty text, and we
507
# know that won't mess up annotations. But it seems cleaner
509
parent_text_key = (file_id, ie.revision)
510
if parent_text_key not in maybe_file_parent_keys:
511
maybe_file_parent_keys.append(parent_text_key)
514
graph = _mod_graph.Graph(self.branch.repository.texts)
515
heads = graph.heads(maybe_file_parent_keys)
516
file_parent_keys = []
517
for key in maybe_file_parent_keys:
519
file_parent_keys.append(key)
521
# Now we have the parents of this content
522
annotator = self.branch.repository.texts.get_annotator()
523
text = self.get_file(file_id).read()
524
this_key =(file_id, default_revision)
525
annotator.add_special_text(this_key, file_parent_keys, text)
526
annotations = [(key[-1], line)
527
for key, line in annotator.annotate_flat(this_key)]
530
530
def _get_ancestors(self, default_revision):
531
531
ancestors = set([default_revision])
568
568
def clone(self, to_bzrdir, revision_id=None):
569
569
"""Duplicate this working tree into to_bzr, including all state.
571
571
Specifically modified files are kept as modified, but
572
572
ignored and unknown files are discarded.
574
574
If you want to make a new line of development, see bzrdir.sprout()
577
If not None, the cloned tree will have its last revision set to
578
revision, and and difference between the source trees last revision
577
If not None, the cloned tree will have its last revision set to
578
revision, and difference between the source trees last revision
579
579
and this one merged in.
581
581
# assumes the target bzr dir format is compatible.
582
result = self._format.initialize(to_bzrdir)
582
result = to_bzrdir.create_workingtree()
583
583
self.copy_content_into(result, revision_id)
757
765
kind = 'tree-reference'
758
766
return kind, None, None, None
759
767
elif kind == 'symlink':
760
return ('symlink', None, None, os.readlink(abspath))
768
target = osutils.readlink(abspath)
769
return ('symlink', None, None, target)
762
771
return (kind, None, None, None)
764
@deprecated_method(zero_eleven)
766
def pending_merges(self):
767
"""Return a list of pending merges.
769
These are revisions that have been merged into the working
770
directory but not yet committed.
772
As of 0.11 this is deprecated. Please see WorkingTree.get_parent_ids()
773
instead - which is available on all tree objects.
775
return self.get_parent_ids()[1:]
773
def _file_content_summary(self, path, stat_result):
774
size = stat_result.st_size
775
executable = self._is_executable_from_path_and_stat(path, stat_result)
776
# try for a stat cache lookup
777
return ('file', size, executable, self._sha_from_stat(
777
780
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
778
781
"""Common ghost checking functionality from set_parent_*.
789
792
def _set_merges_from_parent_ids(self, parent_ids):
790
793
merges = parent_ids[1:]
791
self._control_files.put_bytes('pending-merges', '\n'.join(merges))
794
self._transport.put_bytes('pending-merges', '\n'.join(merges),
795
mode=self.bzrdir._get_file_mode())
797
def _filter_parent_ids_by_ancestry(self, revision_ids):
798
"""Check that all merged revisions are proper 'heads'.
800
This will always return the first revision_id, and any merged revisions
803
if len(revision_ids) == 0:
805
graph = self.branch.repository.get_graph()
806
heads = graph.heads(revision_ids)
807
new_revision_ids = revision_ids[:1]
808
for revision_id in revision_ids[1:]:
809
if revision_id in heads and revision_id not in new_revision_ids:
810
new_revision_ids.append(revision_id)
811
if new_revision_ids != revision_ids:
812
trace.mutter('requested to set revision_ids = %s,'
813
' but filtered to %s', revision_ids, new_revision_ids)
814
return new_revision_ids
793
816
@needs_tree_write_lock
794
817
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
795
818
"""Set the parent ids to revision_ids.
797
820
See also set_parent_trees. This api will try to retrieve the tree data
798
821
for each element of revision_ids from the trees repository. If you have
799
822
tree data already available, it is more efficient to use
884
912
branch.last_revision().
886
914
from bzrlib.merge import Merger, Merge3Merger
887
pb = bzrlib.ui.ui_factory.nested_progress_bar()
889
merger = Merger(self.branch, this_tree=self, pb=pb)
890
merger.pp = ProgressPhase("Merge phase", 5, pb)
891
merger.pp.next_phase()
892
# check that there are no
894
merger.check_basis(check_clean=True, require_commits=False)
895
if to_revision is None:
896
to_revision = _mod_revision.ensure_null(branch.last_revision())
897
merger.other_rev_id = to_revision
898
if _mod_revision.is_null(merger.other_rev_id):
899
raise errors.NoCommits(branch)
900
self.branch.fetch(branch, last_revision=merger.other_rev_id)
901
merger.other_basis = merger.other_rev_id
902
merger.other_tree = self.branch.repository.revision_tree(
904
merger.other_branch = branch
905
merger.pp.next_phase()
906
if from_revision is None:
909
merger.set_base_revision(from_revision, branch)
910
if merger.base_rev_id == merger.other_rev_id:
911
raise errors.PointlessMerge
912
merger.backup_files = False
913
if merge_type is None:
914
merger.merge_type = Merge3Merger
916
merger.merge_type = merge_type
917
merger.set_interesting_files(None)
918
merger.show_base = False
919
merger.reprocess = False
920
conflicts = merger.do_merge()
915
merger = Merger(self.branch, this_tree=self)
916
# check that there are no local alterations
917
if not force and self.has_changes():
918
raise errors.UncommittedChanges(self)
919
if to_revision is None:
920
to_revision = _mod_revision.ensure_null(branch.last_revision())
921
merger.other_rev_id = to_revision
922
if _mod_revision.is_null(merger.other_rev_id):
923
raise errors.NoCommits(branch)
924
self.branch.fetch(branch, last_revision=merger.other_rev_id)
925
merger.other_basis = merger.other_rev_id
926
merger.other_tree = self.branch.repository.revision_tree(
928
merger.other_branch = branch
929
if from_revision is None:
932
merger.set_base_revision(from_revision, branch)
933
if merger.base_rev_id == merger.other_rev_id:
934
raise errors.PointlessMerge
935
merger.backup_files = False
936
if merge_type is None:
937
merger.merge_type = Merge3Merger
939
merger.merge_type = merge_type
940
merger.set_interesting_files(None)
941
merger.show_base = False
942
merger.reprocess = False
943
conflicts = merger.do_merge()
927
948
def merge_modified(self):
928
949
"""Return a dictionary of files modified by a merge.
930
The list is initialized by WorkingTree.set_merge_modified, which is
951
The list is initialized by WorkingTree.set_merge_modified, which is
931
952
typically called after we make some automatic updates to the tree
932
953
because of a merge.
935
956
still in the working inventory and have that text hash.
938
hashfile = self._control_files.get('merge-hashes')
959
hashfile = self._transport.get('merge-hashes')
939
960
except errors.NoSuchFile:
943
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
965
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
966
raise errors.MergeModifiedFormatError()
967
except StopIteration:
944
968
raise errors.MergeModifiedFormatError()
945
except StopIteration:
946
raise errors.MergeModifiedFormatError()
947
for s in RioReader(hashfile):
948
# RioReader reads in Unicode, so convert file_ids back to utf8
949
file_id = osutils.safe_file_id(s.get("file_id"), warn=False)
950
if file_id not in self.inventory:
952
text_hash = s.get("hash")
953
if text_hash == self.get_file_sha1(file_id):
954
merge_hashes[file_id] = text_hash
969
for s in RioReader(hashfile):
970
# RioReader reads in Unicode, so convert file_ids back to utf8
971
file_id = osutils.safe_file_id(s.get("file_id"), warn=False)
972
if file_id not in self.inventory:
974
text_hash = s.get("hash")
975
if text_hash == self.get_file_sha1(file_id):
976
merge_hashes[file_id] = text_hash
957
981
@needs_write_lock
958
982
def mkdir(self, path, file_id=None):
1098
1125
sio = StringIO()
1099
1126
self._serialize(self._inventory, sio)
1101
self._control_files.put('inventory', sio)
1128
self._transport.put_file('inventory', sio,
1129
mode=self.bzrdir._get_file_mode())
1102
1130
self._inventory_is_modified = False
1104
1132
def _kind(self, relpath):
1105
1133
return osutils.file_kind(self.abspath(relpath))
1107
def list_files(self, include_root=False):
1108
"""Recursively list all files as (path, class, kind, id, entry).
1135
def list_files(self, include_root=False, from_dir=None, recursive=True):
1136
"""List all files as (path, class, kind, id, entry).
1110
1138
Lists, but does not descend into unversioned directories.
1112
1139
This does not include files that have been deleted in this
1140
tree. Skips the control directory.
1115
Skips the control directory.
1142
:param include_root: if True, return an entry for the root
1143
:param from_dir: start from this directory or None for the root
1144
:param recursive: whether to recurse into subdirectories or not
1117
1146
# list_files is an iterator, so @needs_read_lock doesn't work properly
1118
1147
# with it. So callers should be careful to always read_lock the tree.
1133
1162
fk_entries = {'directory':TreeDirectory, 'file':TreeFile, 'symlink':TreeLink}
1135
1164
# directory file_id, relative path, absolute path, reverse sorted children
1136
children = os.listdir(self.basedir)
1165
if from_dir is not None:
1166
from_dir_id = inv.path2id(from_dir)
1167
if from_dir_id is None:
1168
# Directory not versioned
1170
from_dir_abspath = pathjoin(self.basedir, from_dir)
1172
from_dir_id = inv.root.file_id
1173
from_dir_abspath = self.basedir
1174
children = os.listdir(from_dir_abspath)
1137
1175
children.sort()
1138
# jam 20060527 The kernel sized tree seems equivalent whether we
1176
# jam 20060527 The kernel sized tree seems equivalent whether we
1139
1177
# use a deque and popleft to keep them sorted, or if we use a plain
1140
1178
# list and just reverse() them.
1141
1179
children = collections.deque(children)
1142
stack = [(inv.root.file_id, u'', self.basedir, children)]
1180
stack = [(from_dir_id, u'', from_dir_abspath, children)]
1144
1182
from_dir_id, from_dir_relpath, from_dir_abspath, children = stack[-1]
1199
1237
except KeyError:
1200
1238
yield fp[1:], c, fk, None, TreeEntry()
1203
1241
if fk != 'directory':
1206
# But do this child first
1207
new_children = os.listdir(fap)
1209
new_children = collections.deque(new_children)
1210
stack.append((f_ie.file_id, fp, fap, new_children))
1211
# Break out of inner loop,
1212
# so that we start outer loop with child
1244
# But do this child first if recursing down
1246
new_children = os.listdir(fap)
1248
new_children = collections.deque(new_children)
1249
stack.append((f_ie.file_id, fp, fap, new_children))
1250
# Break out of inner loop,
1251
# so that we start outer loop with child
1215
1254
# if we finished all children, pop it off the stack
1450
1491
from_tail = splitpath(from_rel)[-1]
1451
1492
from_id = inv.path2id(from_rel)
1452
1493
if from_id is None:
1453
raise errors.BzrRenameFailedError(from_rel,to_rel,
1454
errors.NotVersionedError(path=str(from_rel)))
1455
from_entry = inv[from_id]
1494
# if file is missing in the inventory maybe it's in the basis_tree
1495
basis_tree = self.branch.basis_tree()
1496
from_id = basis_tree.path2id(from_rel)
1498
raise errors.BzrRenameFailedError(from_rel,to_rel,
1499
errors.NotVersionedError(path=str(from_rel)))
1500
# put entry back in the inventory so we can rename it
1501
from_entry = basis_tree.inventory[from_id].copy()
1504
from_entry = inv[from_id]
1456
1505
from_parent_id = from_entry.parent_id
1457
1506
to_dir, to_tail = os.path.split(to_rel)
1458
1507
to_dir_id = inv.path2id(to_dir)
1520
1569
:raises: NoSuchId if any fileid is not currently versioned.
1522
1571
for file_id in file_ids:
1572
if file_id not in self._inventory:
1573
raise errors.NoSuchId(self, file_id)
1574
for file_id in file_ids:
1523
1575
if self._inventory.has_id(file_id):
1524
1576
self._inventory.remove_recursive_id(file_id)
1526
raise errors.NoSuchId(self, file_id)
1527
1577
if len(file_ids):
1528
# in the future this should just set a dirty bit to wait for the
1578
# in the future this should just set a dirty bit to wait for the
1529
1579
# final unlock. However, until all methods of workingtree start
1530
# with the current in -memory inventory rather than triggering
1580
# with the current in -memory inventory rather than triggering
1531
1581
# a read, it is more complex - we need to teach read_inventory
1532
1582
# to know when to read, and when to not read first... and possibly
1533
1583
# to save first when the in memory one may be corrupted.
1534
1584
# so for now, we just only write it if it is indeed dirty.
1535
1585
# - RBC 20060907
1536
1586
self._write_inventory(self._inventory)
1538
@deprecated_method(zero_eight)
1539
def iter_conflicts(self):
1540
"""List all files in the tree that have text or content conflicts.
1541
DEPRECATED. Use conflicts instead."""
1542
return self._iter_conflicts()
1544
1588
def _iter_conflicts(self):
1545
1589
conflicted = set()
1555
1599
@needs_write_lock
1556
1600
def pull(self, source, overwrite=False, stop_revision=None,
1557
change_reporter=None, possible_transports=None):
1558
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1601
change_reporter=None, possible_transports=None, local=False):
1559
1602
source.lock_read()
1561
pp = ProgressPhase("Pull phase", 2, top_pb)
1563
1604
old_revision_info = self.branch.last_revision_info()
1564
1605
basis_tree = self.basis_tree()
1565
1606
count = self.branch.pull(source, overwrite, stop_revision,
1566
possible_transports=possible_transports)
1607
possible_transports=possible_transports,
1567
1609
new_revision_info = self.branch.last_revision_info()
1568
1610
if new_revision_info != old_revision_info:
1570
1611
repository = self.branch.repository
1571
pb = bzrlib.ui.ui_factory.nested_progress_bar()
1572
1612
basis_tree.lock_read()
1574
1614
new_basis_tree = self.branch.basis_tree()
1577
1617
new_basis_tree,
1579
1619
this_tree=self,
1581
1621
change_reporter=change_reporter)
1582
if (basis_tree.inventory.root is None and
1583
new_basis_tree.inventory.root is not None):
1584
self.set_root_id(new_basis_tree.get_root_id())
1622
basis_root_id = basis_tree.get_root_id()
1623
new_root_id = new_basis_tree.get_root_id()
1624
if basis_root_id != new_root_id:
1625
self.set_root_id(new_root_id)
1587
1627
basis_tree.unlock()
1588
1628
# TODO - dedup parents list with things merged by pull ?
1589
1629
# reuse the revisiontree we merged against to set the new
1591
1631
parent_trees = [(self.branch.last_revision(), new_basis_tree)]
1592
# we have to pull the merge trees out again, because
1593
# merge_inner has set the ids. - this corner is not yet
1632
# we have to pull the merge trees out again, because
1633
# merge_inner has set the ids. - this corner is not yet
1594
1634
# layered well enough to prevent double handling.
1595
1635
# XXX TODO: Fix the double handling: telling the tree about
1596
1636
# the already known parent data is wasteful.
1686
1732
r"""Check whether the filename matches an ignore pattern.
1688
1734
Patterns containing '/' or '\' need to match the whole path;
1689
others match against only the last component.
1735
others match against only the last component. Patterns starting
1736
with '!' are ignore exceptions. Exceptions take precedence
1737
over regular patterns and cause the filename to not be ignored.
1691
1739
If the file is ignored, returns the pattern which caused it to
1692
1740
be ignored, otherwise None. So this can simply be used as a
1693
1741
boolean if desired."""
1694
1742
if getattr(self, '_ignoreglobster', None) is None:
1695
self._ignoreglobster = globbing.Globster(self.get_ignore_list())
1743
self._ignoreglobster = globbing.ExceptionGlobster(self.get_ignore_list())
1696
1744
return self._ignoreglobster.match(filename)
1698
1746
def kind(self, file_id):
1699
1747
return file_kind(self.id2abspath(file_id))
1749
def stored_kind(self, file_id):
1750
"""See Tree.stored_kind"""
1751
return self.inventory[file_id].kind
1701
1753
def _comparison_data(self, entry, path):
1702
1754
abspath = self.abspath(path)
1828
1884
# as commit already has that ready-to-use [while the format is the
1829
1885
# same, that is].
1831
# this double handles the inventory - unpack and repack -
1887
# this double handles the inventory - unpack and repack -
1832
1888
# but is easier to understand. We can/should put a conditional
1833
1889
# in here based on whether the inventory is in the latest format
1834
1890
# - perhaps we should repack all inventories on a repository
1836
1892
# the fast path is to copy the raw xml from the repository. If the
1837
# xml contains 'revision_id="', then we assume the right
1893
# xml contains 'revision_id="', then we assume the right
1838
1894
# revision_id is set. We must check for this full string, because a
1839
1895
# root node id can legitimately look like 'revision_id' but cannot
1840
1896
# contain a '"'.
1841
xml = self.branch.repository.get_inventory_xml(new_revision)
1897
xml = self.branch.repository._get_inventory_xml(new_revision)
1842
1898
firstline = xml.split('\n', 1)[0]
1843
if (not 'revision_id="' in firstline or
1899
if (not 'revision_id="' in firstline or
1844
1900
'format="7"' not in firstline):
1845
inv = self.branch.repository.deserialise_inventory(
1901
inv = self.branch.repository._serializer.read_inventory_from_string(
1847
1903
xml = self._create_basis_xml_from_inventory(new_revision, inv)
1848
1904
self._write_basis_inventory(xml)
1849
1905
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1852
1908
def read_basis_inventory(self):
1853
1909
"""Read the cached basis inventory."""
1854
1910
path = self._basis_inventory_name()
1855
return self._control_files.get(path).read()
1911
return self._transport.get_bytes(path)
1857
1913
@needs_read_lock
1858
1914
def read_working_inventory(self):
1859
1915
"""Read the working inventory.
1861
1917
:raises errors.InventoryModified: read_working_inventory will fail
1862
1918
when the current in memory inventory has been modified.
1864
# conceptually this should be an implementation detail of the tree.
1920
# conceptually this should be an implementation detail of the tree.
1865
1921
# XXX: Deprecate this.
1866
1922
# ElementTree does its own conversion from UTF-8, so open in
1868
1924
if self._inventory_is_modified:
1869
1925
raise errors.InventoryModified(self)
1870
result = self._deserialize(self._control_files.get('inventory'))
1926
f = self._transport.get('inventory')
1928
result = self._deserialize(f)
1871
1931
self._set_inventory(result, dirty=False)
1889
1949
new_files=set()
1890
1950
unknown_nested_files=set()
1952
to_file = sys.stdout
1892
1954
def recurse_directory_to_add_files(directory):
1893
1955
# Recurse directory and add all files
1894
1956
# so we can check if they have changed.
1895
1957
for parent_info, file_infos in\
1896
osutils.walkdirs(self.abspath(directory),
1898
for relpath, basename, kind, lstat, abspath in file_infos:
1958
self.walkdirs(directory):
1959
for relpath, basename, kind, lstat, fileid, kind in file_infos:
1899
1960
# Is it versioned or ignored?
1900
1961
if self.path2id(relpath) or self.is_ignored(relpath):
1901
1962
# Add nested content for deletion.
2080
2140
@needs_tree_write_lock
2081
2141
def set_root_id(self, file_id):
2082
2142
"""Set the root id for this tree."""
2084
2144
if file_id is None:
2085
symbol_versioning.warn(symbol_versioning.zero_twelve
2086
% 'WorkingTree.set_root_id with fileid=None',
2091
file_id = osutils.safe_file_id(file_id)
2146
'WorkingTree.set_root_id with fileid=None')
2147
file_id = osutils.safe_file_id(file_id)
2092
2148
self._set_root_id(file_id)
2094
2150
def _set_root_id(self, file_id):
2095
2151
"""Set the root id for this tree, in a format specific manner.
2097
:param file_id: The file id to assign to the root. It must not be
2153
:param file_id: The file id to assign to the root. It must not be
2098
2154
present in the current inventory or an error will occur. It must
2099
2155
not be None, but rather a valid file id.
2120
2176
def unlock(self):
2121
2177
"""See Branch.unlock.
2123
2179
WorkingTree locking just uses the Branch locking facilities.
2124
2180
This is current because all working trees have an embedded branch
2125
2181
within them. IF in the future, we were to make branch data shareable
2126
between multiple working trees, i.e. via shared storage, then we
2182
between multiple working trees, i.e. via shared storage, then we
2127
2183
would probably want to lock both the local tree, and the branch.
2129
2185
raise NotImplementedError(self.unlock)
2131
def update(self, change_reporter=None, possible_transports=None):
2189
def update(self, change_reporter=None, possible_transports=None,
2190
revision=None, old_tip=_marker):
2132
2191
"""Update a working tree along its branch.
2134
2193
This will update the branch if its bound too, which means we have
2181
2247
# cant set that until we update the working trees last revision to be
2182
2248
# one from the new branch, because it will just get absorbed by the
2183
2249
# parent de-duplication logic.
2185
2251
# We MUST save it even if an error occurs, because otherwise the users
2186
2252
# local work is unreferenced and will appear to have been lost.
2190
2256
last_rev = self.get_parent_ids()[0]
2191
2257
except IndexError:
2192
2258
last_rev = _mod_revision.NULL_REVISION
2193
if last_rev != _mod_revision.ensure_null(self.branch.last_revision()):
2194
# merge tree state up to new branch tip.
2259
if revision is None:
2260
revision = self.branch.last_revision()
2262
if revision not in self.branch.revision_history():
2263
raise errors.NoSuchRevision(self.branch, revision)
2265
old_tip = old_tip or _mod_revision.NULL_REVISION
2267
if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
2268
# the branch we are bound to was updated
2269
# merge those changes in first
2270
base_tree = self.basis_tree()
2271
other_tree = self.branch.repository.revision_tree(old_tip)
2272
nb_conflicts = merge.merge_inner(self.branch, other_tree,
2273
base_tree, this_tree=self,
2274
change_reporter=change_reporter)
2276
self.add_parent_tree((old_tip, other_tree))
2277
trace.note('Rerun update after fixing the conflicts.')
2280
if last_rev != _mod_revision.ensure_null(revision):
2281
# the working tree is up to date with the branch
2282
# we can merge the specified revision from master
2283
to_tree = self.branch.repository.revision_tree(revision)
2284
to_root_id = to_tree.get_root_id()
2195
2286
basis = self.basis_tree()
2196
2287
basis.lock_read()
2198
to_tree = self.branch.basis_tree()
2199
if basis.inventory.root is None:
2200
self.set_root_id(to_tree.get_root_id())
2289
if (basis.inventory.root is None
2290
or basis.inventory.root.file_id != to_root_id):
2291
self.set_root_id(to_root_id)
2202
result += merge.merge_inner(
2207
change_reporter=change_reporter)
2296
# determine the branch point
2297
graph = self.branch.repository.get_graph()
2298
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2300
base_tree = self.branch.repository.revision_tree(base_rev_id)
2302
nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
2304
change_reporter=change_reporter)
2305
self.set_last_revision(revision)
2210
2306
# TODO - dedup parents list with things merged by pull ?
2211
2307
# reuse the tree we've updated to to set the basis:
2212
parent_trees = [(self.branch.last_revision(), to_tree)]
2308
parent_trees = [(revision, to_tree)]
2213
2309
merges = self.get_parent_ids()[1:]
2214
2310
# Ideally we ask the tree for the trees here, that way the working
2215
# tree can decide whether to give us teh entire tree or give us a
2311
# tree can decide whether to give us the entire tree or give us a
2216
2312
# lazy initialised tree. dirstate for instance will have the trees
2217
2313
# in ram already, whereas a last-revision + basis-inventory tree
2218
2314
# will not, but also does not need them when setting parents.
2219
2315
for parent in merges:
2220
2316
parent_trees.append(
2221
2317
(parent, self.branch.repository.revision_tree(parent)))
2222
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2318
if not _mod_revision.is_null(old_tip):
2223
2319
parent_trees.append(
2224
2320
(old_tip, self.branch.repository.revision_tree(old_tip)))
2225
2321
self.set_parent_trees(parent_trees)
2226
2322
last_rev = parent_trees[0][0]
2228
# the working tree had the same last-revision as the master
2229
# branch did. We may still have pivot local work from the local
2230
# branch into old_tip:
2231
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2232
self.add_parent_tree_id(old_tip)
2233
if (old_tip is not None and not _mod_revision.is_null(old_tip)
2234
and old_tip != last_rev):
2235
# our last revision was not the prior branch last revision
2236
# and we have converted that last revision to a pending merge.
2237
# base is somewhere between the branch tip now
2238
# and the now pending merge
2240
# Since we just modified the working tree and inventory, flush out
2241
# the current state, before we modify it again.
2242
# TODO: jam 20070214 WorkingTree3 doesn't require this, dirstate
2243
# requires it only because TreeTransform directly munges the
2244
# inventory and calls tree._write_inventory(). Ultimately we
2245
# should be able to remove this extra flush.
2247
graph = self.branch.repository.get_graph()
2248
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2250
base_tree = self.branch.repository.revision_tree(base_rev_id)
2251
other_tree = self.branch.repository.revision_tree(old_tip)
2252
result += merge.merge_inner(
2257
change_reporter=change_reporter)
2260
2325
def _write_hashcache_if_dirty(self):
2261
2326
"""Write out the hashcache if it is dirty."""
2457
2524
# FIXME: stash the node in pending
2458
2525
entry = inv[top_id]
2459
for name, child in entry.sorted_children():
2460
dirblock.append((relroot + name, name, child.kind, None,
2461
child.file_id, child.kind
2526
if entry.kind == 'directory':
2527
for name, child in entry.sorted_children():
2528
dirblock.append((relroot + name, name, child.kind, None,
2529
child.file_id, child.kind
2463
2531
yield (currentdir[0], entry.file_id), dirblock
2464
2532
# push the user specified dirs from dirblock
2465
2533
for dir in reversed(dirblock):
2599
def _get_rules_searcher(self, default_searcher):
2600
"""See Tree._get_rules_searcher."""
2601
if self._rules_searcher is None:
2602
self._rules_searcher = super(WorkingTree,
2603
self)._get_rules_searcher(default_searcher)
2604
return self._rules_searcher
2606
def get_shelf_manager(self):
2607
"""Return the ShelfManager for this WorkingTree."""
2608
from bzrlib.shelf import ShelfManager
2609
return ShelfManager(self, self._transport)
2513
2612
class WorkingTree2(WorkingTree):
2514
2613
"""This is the Format 2 working tree.
2516
This was the first weave based working tree.
2615
This was the first weave based working tree.
2517
2616
- uses os locks for locking.
2518
2617
- uses the branch last-revision.
2574
2677
def _last_revision(self):
2575
2678
"""See Mutable.last_revision."""
2577
return self._control_files.get('last-revision').read()
2680
return self._transport.get_bytes('last-revision')
2578
2681
except errors.NoSuchFile:
2579
2682
return _mod_revision.NULL_REVISION
2581
2684
def _change_last_revision(self, revision_id):
2582
2685
"""See WorkingTree._change_last_revision."""
2583
if revision_id is None or revision_id == NULL_REVISION:
2686
if revision_id is None or revision_id == _mod_revision.NULL_REVISION:
2585
self._control_files._transport.delete('last-revision')
2688
self._transport.delete('last-revision')
2586
2689
except errors.NoSuchFile:
2590
self._control_files.put_bytes('last-revision', revision_id)
2693
self._transport.put_bytes('last-revision', revision_id,
2694
mode=self.bzrdir._get_file_mode())
2697
def _get_check_refs(self):
2698
"""Return the references needed to perform a check of this tree."""
2699
return [('trees', self.last_revision())]
2593
2701
@needs_tree_write_lock
2594
2702
def set_conflicts(self, conflicts):
2595
self._put_rio('conflicts', conflicts.to_stanzas(),
2703
self._put_rio('conflicts', conflicts.to_stanzas(),
2596
2704
CONFLICT_HEADER_1)
2598
2706
@needs_tree_write_lock
2746
2849
"""See WorkingTreeFormat.get_format_description()."""
2747
2850
return "Working tree format 2"
2749
def stub_initialize_remote(self, control_files):
2750
"""As a special workaround create critical control files for a remote working tree
2852
def _stub_initialize_on_transport(self, transport, file_mode):
2853
"""Workaround: create control files for a remote working tree.
2752
2855
This ensures that it can later be updated and dealt with locally,
2753
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2856
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2754
2857
no working tree. (See bug #43064).
2756
2859
sio = StringIO()
2860
inv = inventory.Inventory()
2758
2861
xml5.serializer_v5.write_inventory(inv, sio, working=True)
2760
control_files.put('inventory', sio)
2762
control_files.put_bytes('pending-merges', '')
2863
transport.put_file('inventory', sio, file_mode)
2864
transport.put_bytes('pending-merges', '', file_mode)
2765
2866
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
2766
accelerator_tree=None):
2867
accelerator_tree=None, hardlink=False):
2767
2868
"""See WorkingTreeFormat.initialize()."""
2768
2869
if not isinstance(a_bzrdir.transport, LocalTransport):
2769
2870
raise errors.NotLocalUrl(a_bzrdir.transport.base)
2853
2954
def _open_control_files(self, a_bzrdir):
2854
2955
transport = a_bzrdir.get_workingtree_transport(None)
2855
return LockableFiles(transport, self._lock_file_name,
2956
return LockableFiles(transport, self._lock_file_name,
2856
2957
self._lock_class)
2858
2959
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
2859
accelerator_tree=None):
2960
accelerator_tree=None, hardlink=False):
2860
2961
"""See WorkingTreeFormat.initialize().
2862
2963
:param revision_id: if supplied, create a working tree at a different
2863
2964
revision than the branch is at.
2864
2965
:param accelerator_tree: A tree which can be used for retrieving file
2865
2966
contents more quickly than the revision tree, i.e. a workingtree.
2866
2967
The revision tree will be used for cases where accelerator_tree's
2867
2968
content is different.
2969
:param hardlink: If true, hard-link files from accelerator_tree,
2869
2972
if not isinstance(a_bzrdir.transport, LocalTransport):
2870
2973
raise errors.NotLocalUrl(a_bzrdir.transport.base)