1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
"""WorkingTree object and friends.
19
19
A WorkingTree represents the editable working copy of a branch.
20
Operations which represent the WorkingTree are also done here,
21
such as renaming or adding files. The WorkingTree has an inventory
22
which is updated by these operations. A commit produces a
20
Operations which represent the WorkingTree are also done here,
21
such as renaming or adding files. The WorkingTree has an inventory
22
which is updated by these operations. A commit produces a
23
23
new revision based on the workingtree and its inventory.
25
25
At the moment every WorkingTree has its own branch. Remote
79
80
import bzrlib.branch
80
81
from bzrlib.transport import get_transport
82
from bzrlib.workingtree_4 import WorkingTreeFormat4
83
from bzrlib.workingtree_4 import (
85
90
from bzrlib import symbol_versioning
86
91
from bzrlib.decorators import needs_read_lock, needs_write_lock
87
92
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, TreeReference
88
from bzrlib.lockable_files import LockableFiles, TransportLock
93
from bzrlib.lockable_files import LockableFiles
89
94
from bzrlib.lockdir import LockDir
90
95
import bzrlib.mutabletree
91
96
from bzrlib.mutabletree import needs_tree_write_lock
123
129
class TreeEntry(object):
124
130
"""An entry that implements the minimum interface used by commands.
126
This needs further inspection, it may be better to have
132
This needs further inspection, it may be better to have
127
133
InventoryEntries without ids - though that seems wrong. For now,
128
134
this is a parallel hierarchy to InventoryEntry, and needs to become
129
135
one of several things: decorates to that hierarchy, children of, or
132
138
no InventoryEntry available - i.e. for unversioned objects.
133
139
Perhaps they should be UnversionedEntry et al. ? - RBC 20051003
136
142
def __eq__(self, other):
137
143
# yes, this us ugly, TODO: best practice __eq__ style.
138
144
return (isinstance(other, TreeEntry)
139
145
and other.__class__ == self.__class__)
141
147
def kind_character(self):
185
191
not listed in the Inventory and vice versa.
194
# override this to set the strategy for storing views
195
def _make_views(self):
196
return views.DisabledViews(self)
188
198
def __init__(self, basedir='.',
189
199
branch=DEPRECATED_PARAMETER,
216
226
# assume all other formats have their own control files.
217
227
self._control_files = _control_files
228
self._transport = self._control_files._transport
218
229
# update the whole cache up front and write to disk if anything changed;
219
230
# in the future we might want to do this more selectively
220
231
# two possible ways offer themselves : in self._unlock, write the cache
224
235
wt_trans = self.bzrdir.get_workingtree_transport(None)
225
236
cache_filename = wt_trans.local_abspath('stat-cache')
226
237
self._hashcache = hashcache.HashCache(basedir, cache_filename,
227
self._control_files._file_mode)
238
self.bzrdir._get_file_mode(),
239
self._content_filter_stack_provider())
228
240
hc = self._hashcache
230
242
# is this scan needed ? it makes things kinda slow.
245
257
# permitted to do this.
246
258
self._set_inventory(_inventory, dirty=False)
247
259
self._detect_case_handling()
260
self._rules_searcher = None
261
self.views = self._make_views()
249
263
def _detect_case_handling(self):
250
264
wt_trans = self.bzrdir.get_workingtree_transport(None)
282
296
def supports_tree_reference(self):
299
def supports_content_filtering(self):
300
return self._format.supports_content_filtering()
302
def supports_views(self):
303
return self.views.supports_views()
285
305
def _set_inventory(self, inv, dirty):
286
306
"""Set the internal cached inventory.
304
path = os.path.getcwdu()
324
path = osutils.getcwd()
305
325
control = bzrdir.BzrDir.open(path, _unsupported)
306
326
return control.open_workingtree(_unsupported)
309
329
def open_containing(path=None):
310
330
"""Open an existing working tree which has its root about path.
312
332
This probes for a working tree at path and searches upwards from there.
314
334
Basically we keep looking up until we find the control directory or
376
396
def basis_tree(self):
377
397
"""Return RevisionTree for the current last revision.
379
399
If the left most parent is a ghost then the returned tree will be an
380
empty tree - one obtained by calling repository.revision_tree(None).
400
empty tree - one obtained by calling
401
repository.revision_tree(NULL_REVISION).
383
404
revision_id = self.get_parent_ids()[0]
385
406
# no parents, return an empty revision tree.
386
407
# in the future this should return the tree for
387
408
# 'empty:' - the implicit root empty tree.
388
return self.branch.repository.revision_tree(None)
409
return self.branch.repository.revision_tree(
410
_mod_revision.NULL_REVISION)
390
412
return self.revision_tree(revision_id)
391
413
except errors.NoSuchRevision:
395
417
# at this point ?
397
419
return self.branch.repository.revision_tree(revision_id)
398
except errors.RevisionNotPresent:
420
except (errors.RevisionNotPresent, errors.NoSuchRevision):
399
421
# the basis tree *may* be a ghost or a low level error may have
400
# occured. If the revision is present, its a problem, if its not
422
# occurred. If the revision is present, its a problem, if its not
402
424
if self.branch.repository.has_revision(revision_id):
404
426
# the basis tree is a ghost so return an empty tree.
405
return self.branch.repository.revision_tree(None)
427
return self.branch.repository.revision_tree(
428
_mod_revision.NULL_REVISION)
407
430
def _cleanup(self):
408
431
self._flush_ignore_list_cache()
410
433
def relpath(self, path):
411
434
"""Return the local path portion from a given path.
413
The path may be absolute or relative. If its a relative path it is
436
The path may be absolute or relative. If its a relative path it is
414
437
interpreted relative to the python current working directory.
416
439
return osutils.relpath(self.basedir, path)
418
441
def has_filename(self, filename):
419
442
return osutils.lexists(self.abspath(filename))
421
def get_file(self, file_id, path=None):
444
def get_file(self, file_id, path=None, filtered=True):
445
return self.get_file_with_stat(file_id, path, filtered=filtered)[0]
447
def get_file_with_stat(self, file_id, path=None, filtered=True,
449
"""See Tree.get_file_with_stat."""
423
451
path = self.id2path(file_id)
424
return self.get_file_byname(path)
426
def get_file_text(self, file_id):
427
return self.get_file(file_id).read()
429
def get_file_byname(self, filename):
430
return file(self.abspath(filename), 'rb')
452
file_obj = self.get_file_byname(path, filtered=False)
453
stat_value = _fstat(file_obj.fileno())
454
if filtered and self.supports_content_filtering():
455
filters = self._content_filter_stack(path)
456
file_obj = filtered_input_file(file_obj, filters)
457
return (file_obj, stat_value)
459
def get_file_text(self, file_id, path=None, filtered=True):
460
return self.get_file(file_id, path=path, filtered=filtered).read()
462
def get_file_byname(self, filename, filtered=True):
463
path = self.abspath(filename)
465
if filtered and self.supports_content_filtering():
466
filters = self._content_filter_stack(filename)
467
return filtered_input_file(f, filters)
471
def get_file_lines(self, file_id, path=None, filtered=True):
472
"""See Tree.get_file_lines()"""
473
file = self.get_file(file_id, path, filtered=filtered)
475
return file.readlines()
433
480
def annotate_iter(self, file_id, default_revision=CURRENT_REVISION):
507
554
def clone(self, to_bzrdir, revision_id=None):
508
555
"""Duplicate this working tree into to_bzr, including all state.
510
557
Specifically modified files are kept as modified, but
511
558
ignored and unknown files are discarded.
513
560
If you want to make a new line of development, see bzrdir.sprout()
516
If not None, the cloned tree will have its last revision set to
517
revision, and and difference between the source trees last revision
563
If not None, the cloned tree will have its last revision set to
564
revision, and difference between the source trees last revision
518
565
and this one merged in.
520
567
# assumes the target bzr dir format is compatible.
521
result = self._format.initialize(to_bzrdir)
568
result = to_bzrdir.create_workingtree()
522
569
self.copy_content_into(result, revision_id)
602
649
"""See MutableTree._add."""
603
650
# TODO: Re-adding a file that is removed in the working copy
604
651
# should probably put it back with the previous ID.
605
# the read and write working inventory should not occur in this
652
# the read and write working inventory should not occur in this
606
653
# function - they should be part of lock_write and unlock.
607
654
inv = self.inventory
608
655
for f, file_id, kind in zip(files, ids, kinds):
702
749
kind = 'tree-reference'
703
750
return kind, None, None, None
704
751
elif kind == 'symlink':
705
return ('symlink', None, None, os.readlink(abspath))
752
target = osutils.readlink(abspath)
753
return ('symlink', None, None, target)
707
755
return (kind, None, None, None)
721
769
def _set_merges_from_parent_ids(self, parent_ids):
722
770
merges = parent_ids[1:]
723
self._control_files.put_bytes('pending-merges', '\n'.join(merges))
771
self._transport.put_bytes('pending-merges', '\n'.join(merges),
772
mode=self.bzrdir._get_file_mode())
774
def _filter_parent_ids_by_ancestry(self, revision_ids):
775
"""Check that all merged revisions are proper 'heads'.
777
This will always return the first revision_id, and any merged revisions
780
if len(revision_ids) == 0:
782
graph = self.branch.repository.get_graph()
783
heads = graph.heads(revision_ids)
784
new_revision_ids = revision_ids[:1]
785
for revision_id in revision_ids[1:]:
786
if revision_id in heads and revision_id not in new_revision_ids:
787
new_revision_ids.append(revision_id)
788
if new_revision_ids != revision_ids:
789
trace.mutter('requested to set revision_ids = %s,'
790
' but filtered to %s', revision_ids, new_revision_ids)
791
return new_revision_ids
725
793
@needs_tree_write_lock
726
794
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
727
795
"""Set the parent ids to revision_ids.
729
797
See also set_parent_trees. This api will try to retrieve the tree data
730
798
for each element of revision_ids from the trees repository. If you have
731
799
tree data already available, it is more efficient to use
802
874
def _put_rio(self, filename, stanzas, header):
803
875
self._must_be_locked()
804
876
my_file = rio_file(stanzas, header)
805
self._control_files.put(filename, my_file)
877
self._transport.put_file(filename, my_file,
878
mode=self.bzrdir._get_file_mode())
807
880
@needs_write_lock # because merge pulls data into the branch.
808
881
def merge_from_branch(self, branch, to_revision=None, from_revision=None,
859
932
def merge_modified(self):
860
933
"""Return a dictionary of files modified by a merge.
862
The list is initialized by WorkingTree.set_merge_modified, which is
935
The list is initialized by WorkingTree.set_merge_modified, which is
863
936
typically called after we make some automatic updates to the tree
864
937
because of a merge.
867
940
still in the working inventory and have that text hash.
870
hashfile = self._control_files.get('merge-hashes')
943
hashfile = self._transport.get('merge-hashes')
871
944
except errors.NoSuchFile:
875
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
949
if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':
950
raise errors.MergeModifiedFormatError()
951
except StopIteration:
876
952
raise errors.MergeModifiedFormatError()
877
except StopIteration:
878
raise errors.MergeModifiedFormatError()
879
for s in RioReader(hashfile):
880
# RioReader reads in Unicode, so convert file_ids back to utf8
881
file_id = osutils.safe_file_id(s.get("file_id"), warn=False)
882
if file_id not in self.inventory:
884
text_hash = s.get("hash")
885
if text_hash == self.get_file_sha1(file_id):
886
merge_hashes[file_id] = text_hash
953
for s in RioReader(hashfile):
954
# RioReader reads in Unicode, so convert file_ids back to utf8
955
file_id = osutils.safe_file_id(s.get("file_id"), warn=False)
956
if file_id not in self.inventory:
958
text_hash = s.get("hash")
959
if text_hash == self.get_file_sha1(file_id):
960
merge_hashes[file_id] = text_hash
889
965
@needs_write_lock
890
966
def mkdir(self, path, file_id=None):
898
974
def get_symlink_target(self, file_id):
899
return os.readlink(self.id2abspath(file_id))
975
abspath = self.id2abspath(file_id)
976
target = osutils.readlink(abspath)
901
979
@needs_write_lock
902
980
def subsume(self, other_tree):
954
1032
def _directory_may_be_tree_reference(self, relpath):
955
# as a special case, if a directory contains control files then
1033
# as a special case, if a directory contains control files then
956
1034
# it's a tree reference, except that the root of the tree is not
957
1035
return relpath and osutils.isdir(self.abspath(relpath) + u"/.bzr")
958
1036
# TODO: We could ask all the control formats whether they
1030
1108
sio = StringIO()
1031
1109
self._serialize(self._inventory, sio)
1033
self._control_files.put('inventory', sio)
1111
self._transport.put_file('inventory', sio,
1112
mode=self.bzrdir._get_file_mode())
1034
1113
self._inventory_is_modified = False
1036
1115
def _kind(self, relpath):
1037
1116
return osutils.file_kind(self.abspath(relpath))
1039
def list_files(self, include_root=False):
1040
"""Recursively list all files as (path, class, kind, id, entry).
1118
def list_files(self, include_root=False, from_dir=None, recursive=True):
1119
"""List all files as (path, class, kind, id, entry).
1042
1121
Lists, but does not descend into unversioned directories.
1044
1122
This does not include files that have been deleted in this
1123
tree. Skips the control directory.
1047
Skips the control directory.
1125
:param include_root: if True, do not return an entry for the root
1126
:param from_dir: start from this directory or None for the root
1127
:param recursive: whether to recurse into subdirectories or not
1049
1129
# list_files is an iterator, so @needs_read_lock doesn't work properly
1050
1130
# with it. So callers should be careful to always read_lock the tree.
1052
1132
raise errors.ObjectNotLocked(self)
1054
1134
inv = self.inventory
1055
if include_root is True:
1135
if from_dir is None and include_root is True:
1056
1136
yield ('', 'V', 'directory', inv.root.file_id, inv.root)
1057
1137
# Convert these into local objects to save lookup times
1058
1138
pathjoin = osutils.pathjoin
1065
1145
fk_entries = {'directory':TreeDirectory, 'file':TreeFile, 'symlink':TreeLink}
1067
1147
# directory file_id, relative path, absolute path, reverse sorted children
1068
children = os.listdir(self.basedir)
1148
if from_dir is not None:
1149
from_dir_id = inv.path2id(from_dir)
1150
if from_dir_id is None:
1151
# Directory not versioned
1153
from_dir_abspath = pathjoin(self.basedir, from_dir)
1155
from_dir_id = inv.root.file_id
1156
from_dir_abspath = self.basedir
1157
children = os.listdir(from_dir_abspath)
1069
1158
children.sort()
1070
# jam 20060527 The kernel sized tree seems equivalent whether we
1159
# jam 20060527 The kernel sized tree seems equivalent whether we
1071
1160
# use a deque and popleft to keep them sorted, or if we use a plain
1072
1161
# list and just reverse() them.
1073
1162
children = collections.deque(children)
1074
stack = [(inv.root.file_id, u'', self.basedir, children)]
1163
stack = [(from_dir_id, u'', from_dir_abspath, children)]
1076
1165
from_dir_id, from_dir_relpath, from_dir_abspath, children = stack[-1]
1131
1220
except KeyError:
1132
1221
yield fp[1:], c, fk, None, TreeEntry()
1135
1224
if fk != 'directory':
1138
# But do this child first
1139
new_children = os.listdir(fap)
1141
new_children = collections.deque(new_children)
1142
stack.append((f_ie.file_id, fp, fap, new_children))
1143
# Break out of inner loop,
1144
# so that we start outer loop with child
1227
# But do this child first if recursing down
1229
new_children = os.listdir(fap)
1231
new_children = collections.deque(new_children)
1232
stack.append((f_ie.file_id, fp, fap, new_children))
1233
# Break out of inner loop,
1234
# so that we start outer loop with child
1147
1237
# if we finished all children, pop it off the stack
1154
1244
to_dir must exist in the inventory.
1156
1246
If to_dir exists and is a directory, the files are moved into
1157
it, keeping their old names.
1247
it, keeping their old names.
1159
1249
Note that to_dir is only the last component of the new name;
1160
1250
this doesn't change the directory.
1288
1378
only_change_inv = True
1289
1379
elif self.has_filename(from_rel) and not self.has_filename(to_rel):
1290
1380
only_change_inv = False
1291
elif (sys.platform == 'win32'
1292
and from_rel.lower() == to_rel.lower()
1293
and self.has_filename(from_rel)):
1381
elif (not self.case_sensitive
1382
and from_rel.lower() == to_rel.lower()
1383
and self.has_filename(from_rel)):
1294
1384
only_change_inv = False
1296
1386
# something is wrong, so lets determine what exactly
1437
1527
These are files in the working directory that are not versioned or
1438
1528
control files or ignored.
1440
# force the extras method to be fully executed before returning, to
1530
# force the extras method to be fully executed before returning, to
1441
1531
# prevent race conditions with the lock
1443
1533
[subp for subp in self.extras() if not self.is_ignored(subp)])
1453
1543
:raises: NoSuchId if any fileid is not currently versioned.
1455
1545
for file_id in file_ids:
1546
if file_id not in self._inventory:
1547
raise errors.NoSuchId(self, file_id)
1548
for file_id in file_ids:
1456
1549
if self._inventory.has_id(file_id):
1457
1550
self._inventory.remove_recursive_id(file_id)
1459
raise errors.NoSuchId(self, file_id)
1460
1551
if len(file_ids):
1461
# in the future this should just set a dirty bit to wait for the
1552
# in the future this should just set a dirty bit to wait for the
1462
1553
# final unlock. However, until all methods of workingtree start
1463
# with the current in -memory inventory rather than triggering
1554
# with the current in -memory inventory rather than triggering
1464
1555
# a read, it is more complex - we need to teach read_inventory
1465
1556
# to know when to read, and when to not read first... and possibly
1466
1557
# to save first when the in memory one may be corrupted.
1467
1558
# so for now, we just only write it if it is indeed dirty.
1468
1559
# - RBC 20060907
1469
1560
self._write_inventory(self._inventory)
1471
1562
def _iter_conflicts(self):
1472
1563
conflicted = set()
1473
1564
for info in self.list_files():
1482
1573
@needs_write_lock
1483
1574
def pull(self, source, overwrite=False, stop_revision=None,
1484
change_reporter=None, possible_transports=None):
1575
change_reporter=None, possible_transports=None, local=False):
1485
1576
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1486
1577
source.lock_read()
1490
1581
old_revision_info = self.branch.last_revision_info()
1491
1582
basis_tree = self.basis_tree()
1492
1583
count = self.branch.pull(source, overwrite, stop_revision,
1493
possible_transports=possible_transports)
1584
possible_transports=possible_transports,
1494
1586
new_revision_info = self.branch.last_revision_info()
1495
1587
if new_revision_info != old_revision_info:
1496
1588
pp.next_phase()
1516
1608
# reuse the revisiontree we merged against to set the new
1518
1610
parent_trees = [(self.branch.last_revision(), new_basis_tree)]
1519
# we have to pull the merge trees out again, because
1520
# merge_inner has set the ids. - this corner is not yet
1611
# we have to pull the merge trees out again, because
1612
# merge_inner has set the ids. - this corner is not yet
1521
1613
# layered well enough to prevent double handling.
1522
1614
# XXX TODO: Fix the double handling: telling the tree about
1523
1615
# the already known parent data is wasteful.
1563
1655
for subf in os.listdir(dirabs):
1656
if self.bzrdir.is_control_filename(subf):
1566
1658
if subf not in dir_entry.children:
1567
subf_norm, can_access = osutils.normalized_filename(subf)
1661
can_access) = osutils.normalized_filename(subf)
1662
except UnicodeDecodeError:
1663
path_os_enc = path.encode(osutils._fs_enc)
1664
relpath = path_os_enc + '/' + subf
1665
raise errors.BadFilenameEncoding(relpath,
1568
1667
if subf_norm != subf and can_access:
1569
1668
if subf_norm not in dir_entry.children:
1570
1669
fl.append(subf_norm)
1572
1671
fl.append(subf)
1575
1674
for subf in fl:
1576
1675
subp = pathjoin(path, subf)
1716
1815
def _reset_data(self):
1717
1816
"""Reset transient data that cannot be revalidated."""
1718
1817
self._inventory_is_modified = False
1719
result = self._deserialize(self._control_files.get('inventory'))
1818
result = self._deserialize(self._transport.get('inventory'))
1720
1819
self._set_inventory(result, dirty=False)
1722
1821
@needs_tree_write_lock
1745
1844
"""Write the basis inventory XML to the basis-inventory file"""
1746
1845
path = self._basis_inventory_name()
1747
1846
sio = StringIO(xml)
1748
self._control_files.put(path, sio)
1847
self._transport.put_file(path, sio,
1848
mode=self.bzrdir._get_file_mode())
1750
1850
def _create_basis_xml_from_inventory(self, revision_id, inventory):
1751
1851
"""Create the text that will be saved in basis-inventory"""
1758
1858
# as commit already has that ready-to-use [while the format is the
1759
1859
# same, that is].
1761
# this double handles the inventory - unpack and repack -
1861
# this double handles the inventory - unpack and repack -
1762
1862
# but is easier to understand. We can/should put a conditional
1763
1863
# in here based on whether the inventory is in the latest format
1764
1864
# - perhaps we should repack all inventories on a repository
1766
1866
# the fast path is to copy the raw xml from the repository. If the
1767
# xml contains 'revision_id="', then we assume the right
1867
# xml contains 'revision_id="', then we assume the right
1768
1868
# revision_id is set. We must check for this full string, because a
1769
1869
# root node id can legitimately look like 'revision_id' but cannot
1770
1870
# contain a '"'.
1771
1871
xml = self.branch.repository.get_inventory_xml(new_revision)
1772
1872
firstline = xml.split('\n', 1)[0]
1773
if (not 'revision_id="' in firstline or
1873
if (not 'revision_id="' in firstline or
1774
1874
'format="7"' not in firstline):
1775
1875
inv = self.branch.repository.deserialise_inventory(
1776
1876
new_revision, xml)
1782
1882
def read_basis_inventory(self):
1783
1883
"""Read the cached basis inventory."""
1784
1884
path = self._basis_inventory_name()
1785
return self._control_files.get(path).read()
1885
return self._transport.get_bytes(path)
1787
1887
@needs_read_lock
1788
1888
def read_working_inventory(self):
1789
1889
"""Read the working inventory.
1791
1891
:raises errors.InventoryModified: read_working_inventory will fail
1792
1892
when the current in memory inventory has been modified.
1794
# conceptually this should be an implementation detail of the tree.
1894
# conceptually this should be an implementation detail of the tree.
1795
1895
# XXX: Deprecate this.
1796
1896
# ElementTree does its own conversion from UTF-8, so open in
1798
1898
if self._inventory_is_modified:
1799
1899
raise errors.InventoryModified(self)
1800
result = self._deserialize(self._control_files.get('inventory'))
1900
result = self._deserialize(self._transport.get('inventory'))
1801
1901
self._set_inventory(result, dirty=False)
1823
1923
# Recurse directory and add all files
1824
1924
# so we can check if they have changed.
1825
1925
for parent_info, file_infos in\
1826
osutils.walkdirs(self.abspath(directory),
1828
for relpath, basename, kind, lstat, abspath in file_infos:
1926
self.walkdirs(directory):
1927
for relpath, basename, kind, lstat, fileid, kind in file_infos:
1829
1928
# Is it versioned or ignored?
1830
1929
if self.path2id(relpath) or self.is_ignored(relpath):
1831
1930
# Add nested content for deletion.
1881
1979
tree_delta.unversioned.extend((unknown_file,))
1882
1980
raise errors.BzrRemoveChangedFilesError(tree_delta)
1884
# Build inv_delta and delete files where applicaple,
1982
# Build inv_delta and delete files where applicable,
1885
1983
# do this before any modifications to inventory.
1886
1984
for f in files:
1887
1985
fid = self.path2id(f)
1996
2094
name = os.path.basename(path)
1999
# fixme, there should be a factory function inv,add_??
2097
# fixme, there should be a factory function inv,add_??
2000
2098
if kind == 'directory':
2001
2099
inv.add(InventoryDirectory(file_id, name, parent))
2002
2100
elif kind == 'file':
2020
2118
def _set_root_id(self, file_id):
2021
2119
"""Set the root id for this tree, in a format specific manner.
2023
:param file_id: The file id to assign to the root. It must not be
2121
:param file_id: The file id to assign to the root. It must not be
2024
2122
present in the current inventory or an error will occur. It must
2025
2123
not be None, but rather a valid file id.
2046
2144
def unlock(self):
2047
2145
"""See Branch.unlock.
2049
2147
WorkingTree locking just uses the Branch locking facilities.
2050
2148
This is current because all working trees have an embedded branch
2051
2149
within them. IF in the future, we were to make branch data shareable
2052
between multiple working trees, i.e. via shared storage, then we
2150
between multiple working trees, i.e. via shared storage, then we
2053
2151
would probably want to lock both the local tree, and the branch.
2055
2153
raise NotImplementedError(self.unlock)
2107
2205
# cant set that until we update the working trees last revision to be
2108
2206
# one from the new branch, because it will just get absorbed by the
2109
2207
# parent de-duplication logic.
2111
2209
# We MUST save it even if an error occurs, because otherwise the users
2112
2210
# local work is unreferenced and will appear to have been lost.
2116
2214
last_rev = self.get_parent_ids()[0]
2138
2236
parent_trees = [(self.branch.last_revision(), to_tree)]
2139
2237
merges = self.get_parent_ids()[1:]
2140
2238
# Ideally we ask the tree for the trees here, that way the working
2141
# tree can decide whether to give us teh entire tree or give us a
2239
# tree can decide whether to give us the entire tree or give us a
2142
2240
# lazy initialised tree. dirstate for instance will have the trees
2143
2241
# in ram already, whereas a last-revision + basis-inventory tree
2144
2242
# will not, but also does not need them when setting parents.
2287
2385
bzrdir_loc = bisect_left(cur_disk_dir_content,
2288
2386
('.bzr', '.bzr'))
2289
if cur_disk_dir_content[bzrdir_loc][0] == '.bzr':
2387
if (bzrdir_loc < len(cur_disk_dir_content)
2388
and self.bzrdir.is_control_filename(
2389
cur_disk_dir_content[bzrdir_loc][0])):
2290
2390
# we dont yield the contents of, or, .bzr itself.
2291
2391
del cur_disk_dir_content[bzrdir_loc]
2292
2392
if inv_finished:
2383
2483
# FIXME: stash the node in pending
2384
2484
entry = inv[top_id]
2385
for name, child in entry.sorted_children():
2386
dirblock.append((relroot + name, name, child.kind, None,
2387
child.file_id, child.kind
2485
if entry.kind == 'directory':
2486
for name, child in entry.sorted_children():
2487
dirblock.append((relroot + name, name, child.kind, None,
2488
child.file_id, child.kind
2389
2490
yield (currentdir[0], entry.file_id), dirblock
2390
2491
# push the user specified dirs from dirblock
2391
2492
for dir in reversed(dirblock):
2424
2525
self.set_conflicts(un_resolved)
2425
2526
return un_resolved, resolved
2530
tree_basis = self.basis_tree()
2531
tree_basis.lock_read()
2533
repo_basis = self.branch.repository.revision_tree(
2534
self.last_revision())
2535
if len(list(repo_basis.iter_changes(tree_basis))) > 0:
2536
raise errors.BzrCheckError(
2537
"Mismatched basis inventory content.")
2427
2542
def _validate(self):
2428
2543
"""Validate internal structures.
2554
def _get_rules_searcher(self, default_searcher):
2555
"""See Tree._get_rules_searcher."""
2556
if self._rules_searcher is None:
2557
self._rules_searcher = super(WorkingTree,
2558
self)._get_rules_searcher(default_searcher)
2559
return self._rules_searcher
2561
def get_shelf_manager(self):
2562
"""Return the ShelfManager for this WorkingTree."""
2563
from bzrlib.shelf import ShelfManager
2564
return ShelfManager(self, self._transport)
2439
2567
class WorkingTree2(WorkingTree):
2440
2568
"""This is the Format 2 working tree.
2442
This was the first weave based working tree.
2570
This was the first weave based working tree.
2443
2571
- uses os locks for locking.
2444
2572
- uses the branch last-revision.
2500
2628
def _last_revision(self):
2501
2629
"""See Mutable.last_revision."""
2503
return self._control_files.get('last-revision').read()
2631
return self._transport.get_bytes('last-revision')
2504
2632
except errors.NoSuchFile:
2505
2633
return _mod_revision.NULL_REVISION
2508
2636
"""See WorkingTree._change_last_revision."""
2509
2637
if revision_id is None or revision_id == NULL_REVISION:
2511
self._control_files._transport.delete('last-revision')
2639
self._transport.delete('last-revision')
2512
2640
except errors.NoSuchFile:
2516
self._control_files.put_bytes('last-revision', revision_id)
2644
self._transport.put_bytes('last-revision', revision_id,
2645
mode=self.bzrdir._get_file_mode())
2519
2648
@needs_tree_write_lock
2520
2649
def set_conflicts(self, conflicts):
2521
self._put_rio('conflicts', conflicts.to_stanzas(),
2650
self._put_rio('conflicts', conflicts.to_stanzas(),
2522
2651
CONFLICT_HEADER_1)
2524
2653
@needs_tree_write_lock
2531
2660
@needs_read_lock
2532
2661
def conflicts(self):
2534
confile = self._control_files.get('conflicts')
2663
confile = self._transport.get('conflicts')
2535
2664
except errors.NoSuchFile:
2536
2665
return _mod_conflicts.ConflictList()
2538
if confile.next() != CONFLICT_HEADER_1 + '\n':
2668
if confile.next() != CONFLICT_HEADER_1 + '\n':
2669
raise errors.ConflictFormatError()
2670
except StopIteration:
2539
2671
raise errors.ConflictFormatError()
2540
except StopIteration:
2541
raise errors.ConflictFormatError()
2542
return _mod_conflicts.ConflictList.from_stanzas(RioReader(confile))
2672
return _mod_conflicts.ConflictList.from_stanzas(RioReader(confile))
2544
2676
def unlock(self):
2545
2677
# do non-implementation specific cleanup
2570
2702
* a format string,
2571
2703
* an open routine.
2573
Formats are placed in an dict by their format string for reference
2705
Formats are placed in an dict by their format string for reference
2574
2706
during workingtree opening. Its not required that these be instances, they
2575
can be classes themselves with class methods - it simply depends on
2707
can be classes themselves with class methods - it simply depends on
2576
2708
whether state is needed for a given format or not.
2578
2710
Once a format is deprecated, just deprecate the initialize and open
2579
methods on the format class. Do not deprecate the object, as the
2711
methods on the format class. Do not deprecate the object, as the
2580
2712
object will be created every time regardless.
2626
2758
"""Is this format supported?
2628
2760
Supported formats can be initialized and opened.
2629
Unsupported formats may not support initialization or committing or
2761
Unsupported formats may not support initialization or committing or
2630
2762
some other features depending on the reason for not being supported.
2766
def supports_content_filtering(self):
2767
"""True if this format supports content filtering."""
2770
def supports_views(self):
2771
"""True if this format supports stored views."""
2635
2775
def register_format(klass, format):
2636
2776
klass._formats[format.get_format_string()] = format
2656
2796
"""See WorkingTreeFormat.get_format_description()."""
2657
2797
return "Working tree format 2"
2659
def stub_initialize_remote(self, control_files):
2660
"""As a special workaround create critical control files for a remote working tree
2799
def _stub_initialize_on_transport(self, transport, file_mode):
2800
"""Workaround: create control files for a remote working tree.
2662
2802
This ensures that it can later be updated and dealt with locally,
2663
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2803
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2664
2804
no working tree. (See bug #43064).
2666
2806
sio = StringIO()
2667
2807
inv = Inventory()
2668
2808
xml5.serializer_v5.write_inventory(inv, sio, working=True)
2670
control_files.put('inventory', sio)
2672
control_files.put_bytes('pending-merges', '')
2810
transport.put_file('inventory', sio, file_mode)
2811
transport.put_bytes('pending-merges', '', file_mode)
2675
2813
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
2676
2814
accelerator_tree=None, hardlink=False):
2763
2901
def _open_control_files(self, a_bzrdir):
2764
2902
transport = a_bzrdir.get_workingtree_transport(None)
2765
return LockableFiles(transport, self._lock_file_name,
2903
return LockableFiles(transport, self._lock_file_name,
2766
2904
self._lock_class)
2768
2906
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
2769
2907
accelerator_tree=None, hardlink=False):
2770
2908
"""See WorkingTreeFormat.initialize().
2772
2910
:param revision_id: if supplied, create a working tree at a different
2773
2911
revision than the branch is at.
2774
2912
:param accelerator_tree: A tree which can be used for retrieving file
2784
2922
control_files = self._open_control_files(a_bzrdir)
2785
2923
control_files.create_lock()
2786
2924
control_files.lock_write()
2787
control_files.put_utf8('format', self.get_format_string())
2925
transport.put_bytes('format', self.get_format_string(),
2926
mode=a_bzrdir._get_file_mode())
2788
2927
if from_branch is not None:
2789
2928
branch = from_branch
2861
3000
__default_format = WorkingTreeFormat4()
2862
3001
WorkingTreeFormat.register_format(__default_format)
3002
WorkingTreeFormat.register_format(WorkingTreeFormat6())
3003
WorkingTreeFormat.register_format(WorkingTreeFormat5())
2863
3004
WorkingTreeFormat.register_format(WorkingTreeFormat3())
2864
3005
WorkingTreeFormat.set_default_format(__default_format)
2865
3006
# formats which have no format string are not discoverable