29
29
WorkingTree.open(dir).
32
# TODO: Give the workingtree sole responsibility for the working inventory;
33
# remove the variable and references to it from the branch. This may require
34
# updating the commit code so as to update the inventory within the working
35
# copy, and making sure there's only one WorkingTree for any directory on disk.
36
# At the moment they may alias the inventory and have old copies of it in
37
# memory. (Now done? -- mbp 20060309)
33
39
from cStringIO import StringIO
96
102
from bzrlib.filters import filtered_input_file
97
103
from bzrlib.trace import mutter, note
98
104
from bzrlib.transport.local import LocalTransport
105
from bzrlib.progress import DummyProgress, ProgressPhase
99
106
from bzrlib.revision import CURRENT_REVISION
100
107
from bzrlib.rio import RioReader, rio_file, Stanza
101
108
from bzrlib.symbol_versioning import (
289
281
self._control_files.break_lock()
290
282
self.branch.break_lock()
292
def _get_check_refs(self):
293
"""Return the references needed to perform a check of this tree.
295
The default implementation returns no refs, and is only suitable for
296
trees that have no local caching and can commit on ghosts at any time.
298
:seealso: bzrlib.check for details about check_refs.
302
284
def requires_rich_root(self):
303
285
return self._format.requires_rich_root
351
333
path = osutils.getcwd()
352
334
control, relpath = bzrdir.BzrDir.open_containing(path)
353
336
return control.open_workingtree(), relpath
356
def open_containing_paths(file_list, default_directory='.',
357
canonicalize=True, apply_view=True):
358
"""Open the WorkingTree that contains a set of paths.
360
Fail if the paths given are not all in a single tree.
362
This is used for the many command-line interfaces that take a list of
363
any number of files and that require they all be in the same tree.
365
# recommended replacement for builtins.internal_tree_files
366
if file_list is None or len(file_list) == 0:
367
tree = WorkingTree.open_containing(default_directory)[0]
368
# XXX: doesn't really belong here, and seems to have the strange
369
# side effect of making it return a bunch of files, not the whole
370
# tree -- mbp 20100716
371
if tree.supports_views() and apply_view:
372
view_files = tree.views.lookup_view()
374
file_list = view_files
375
view_str = views.view_display_str(view_files)
376
note("Ignoring files outside view. View is %s" % view_str)
377
return tree, file_list
378
tree = WorkingTree.open_containing(file_list[0])[0]
379
return tree, tree.safe_relpath_files(file_list, canonicalize,
380
apply_view=apply_view)
382
def safe_relpath_files(self, file_list, canonicalize=True, apply_view=True):
383
"""Convert file_list into a list of relpaths in tree.
385
:param self: A tree to operate on.
386
:param file_list: A list of user provided paths or None.
387
:param apply_view: if True and a view is set, apply it or check that
388
specified files are within it
389
:return: A list of relative paths.
390
:raises errors.PathNotChild: When a provided path is in a different self
393
if file_list is None:
395
if self.supports_views() and apply_view:
396
view_files = self.views.lookup_view()
400
# self.relpath exists as a "thunk" to osutils, but canonical_relpath
401
# doesn't - fix that up here before we enter the loop.
403
fixer = lambda p: osutils.canonical_relpath(self.basedir, p)
406
for filename in file_list:
407
relpath = fixer(osutils.dereference_path(filename))
408
if view_files and not osutils.is_inside_any(view_files, relpath):
409
raise errors.FileOutsideView(filename, view_files)
410
new_list.append(relpath)
414
339
def open_downlevel(path=None):
415
340
"""Open an unsupported working tree.
429
354
return True, None
431
356
return True, tree
432
t = transport.get_transport(location)
433
iterator = bzrdir.BzrDir.find_bzrdirs(t, evaluate=evaluate,
357
transport = get_transport(location)
358
iterator = bzrdir.BzrDir.find_bzrdirs(transport, evaluate=evaluate,
434
359
list_current=list_current)
435
return [tr for tr in iterator if tr is not None]
360
return [t for t in iterator if t is not None]
437
362
# should be deprecated - this is slow and in any case treating them as a
438
363
# container is (we now know) bad style -- mbp 20070302
523
448
return (file_obj, stat_value)
525
450
def get_file_text(self, file_id, path=None, filtered=True):
526
my_file = self.get_file(file_id, path=path, filtered=filtered)
528
return my_file.read()
451
return self.get_file(file_id, path=path, filtered=filtered).read()
532
453
def get_file_byname(self, filename, filtered=True):
533
454
path = self.abspath(filename)
588
509
# Now we have the parents of this content
589
510
annotator = self.branch.repository.texts.get_annotator()
590
text = self.get_file_text(file_id)
511
text = self.get_file(file_id).read()
591
512
this_key =(file_id, default_revision)
592
513
annotator.add_special_text(this_key, file_parent_keys, text)
593
514
annotations = [(key[-1], line)
706
625
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
707
626
file_id = self.path2id(path)
709
# For unversioned files on win32, we just assume they are not
712
627
return self._inventory[file_id].executable
714
629
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
825
740
kind = _mapper(stat_result.st_mode)
826
741
if kind == 'file':
827
return self._file_content_summary(path, stat_result)
742
size = stat_result.st_size
743
# try for a stat cache lookup
744
executable = self._is_executable_from_path_and_stat(path, stat_result)
745
return (kind, size, executable, self._sha_from_stat(
828
747
elif kind == 'directory':
829
748
# perhaps it looks like a plain directory, but it's really a
838
757
return (kind, None, None, None)
840
def _file_content_summary(self, path, stat_result):
841
size = stat_result.st_size
842
executable = self._is_executable_from_path_and_stat(path, stat_result)
843
# try for a stat cache lookup
844
return ('file', size, executable, self._sha_from_stat(
847
759
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
848
760
"""Common ghost checking functionality from set_parent_*.
979
891
branch.last_revision().
981
893
from bzrlib.merge import Merger, Merge3Merger
982
merger = Merger(self.branch, this_tree=self)
983
# check that there are no local alterations
984
if not force and self.has_changes():
985
raise errors.UncommittedChanges(self)
986
if to_revision is None:
987
to_revision = _mod_revision.ensure_null(branch.last_revision())
988
merger.other_rev_id = to_revision
989
if _mod_revision.is_null(merger.other_rev_id):
990
raise errors.NoCommits(branch)
991
self.branch.fetch(branch, last_revision=merger.other_rev_id)
992
merger.other_basis = merger.other_rev_id
993
merger.other_tree = self.branch.repository.revision_tree(
995
merger.other_branch = branch
996
if from_revision is None:
999
merger.set_base_revision(from_revision, branch)
1000
if merger.base_rev_id == merger.other_rev_id:
1001
raise errors.PointlessMerge
1002
merger.backup_files = False
1003
if merge_type is None:
1004
merger.merge_type = Merge3Merger
1006
merger.merge_type = merge_type
1007
merger.set_interesting_files(None)
1008
merger.show_base = False
1009
merger.reprocess = False
1010
conflicts = merger.do_merge()
1011
merger.set_pending()
894
pb = ui.ui_factory.nested_progress_bar()
896
merger = Merger(self.branch, this_tree=self, pb=pb)
897
merger.pp = ProgressPhase("Merge phase", 5, pb)
898
merger.pp.next_phase()
899
# check that there are no
901
merger.check_basis(check_clean=True, require_commits=False)
902
if to_revision is None:
903
to_revision = _mod_revision.ensure_null(branch.last_revision())
904
merger.other_rev_id = to_revision
905
if _mod_revision.is_null(merger.other_rev_id):
906
raise errors.NoCommits(branch)
907
self.branch.fetch(branch, last_revision=merger.other_rev_id)
908
merger.other_basis = merger.other_rev_id
909
merger.other_tree = self.branch.repository.revision_tree(
911
merger.other_branch = branch
912
merger.pp.next_phase()
913
if from_revision is None:
916
merger.set_base_revision(from_revision, branch)
917
if merger.base_rev_id == merger.other_rev_id:
918
raise errors.PointlessMerge
919
merger.backup_files = False
920
if merge_type is None:
921
merger.merge_type = Merge3Merger
923
merger.merge_type = merge_type
924
merger.set_interesting_files(None)
925
merger.show_base = False
926
merger.reprocess = False
927
conflicts = merger.do_merge()
1012
931
return conflicts
1014
933
@needs_read_lock
1161
1080
tree_transport = self.bzrdir.root_transport.clone(sub_path)
1162
1081
if tree_transport.base != branch_transport.base:
1163
1082
tree_bzrdir = format.initialize_on_transport(tree_transport)
1164
branch.BranchReferenceFormat().initialize(tree_bzrdir,
1165
target_branch=new_branch)
1083
branch.BranchReferenceFormat().initialize(tree_bzrdir, new_branch)
1167
1085
tree_bzrdir = branch_bzrdir
1168
1086
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1267
1185
# absolute path
1268
1186
fap = from_dir_abspath + '/' + f
1270
dir_ie = inv[from_dir_id]
1271
if dir_ie.kind == 'directory':
1272
f_ie = dir_ie.children.get(f)
1188
f_ie = inv.get_child(from_dir_id, f)
1277
1191
elif self.is_ignored(fp[1:]):
1280
# we may not have found this file, because of a unicode
1281
# issue, or because the directory was actually a symlink.
1194
# we may not have found this file, because of a unicode issue
1282
1195
f_norm, can_access = osutils.normalized_filename(f)
1283
1196
if f == f_norm or not can_access:
1284
1197
# No change, so treat this file normally
1368
1281
# check for deprecated use of signature
1369
1282
if to_dir is None:
1370
raise TypeError('You must supply a target directory')
1283
to_dir = kwargs.get('to_name', None)
1285
raise TypeError('You must supply a target directory')
1287
symbol_versioning.warn('The parameter to_name was deprecated'
1288
' in version 0.13. Use to_dir instead',
1371
1291
# check destination directory
1372
1292
if isinstance(from_paths, basestring):
1373
1293
raise ValueError()
1556
1476
from_tail = splitpath(from_rel)[-1]
1557
1477
from_id = inv.path2id(from_rel)
1558
1478
if from_id is None:
1559
# if file is missing in the inventory maybe it's in the basis_tree
1560
basis_tree = self.branch.basis_tree()
1561
from_id = basis_tree.path2id(from_rel)
1563
raise errors.BzrRenameFailedError(from_rel,to_rel,
1564
errors.NotVersionedError(path=str(from_rel)))
1565
# put entry back in the inventory so we can rename it
1566
from_entry = basis_tree.inventory[from_id].copy()
1569
from_entry = inv[from_id]
1479
raise errors.BzrRenameFailedError(from_rel,to_rel,
1480
errors.NotVersionedError(path=str(from_rel)))
1481
from_entry = inv[from_id]
1570
1482
from_parent_id = from_entry.parent_id
1571
1483
to_dir, to_tail = os.path.split(to_rel)
1572
1484
to_dir_id = inv.path2id(to_dir)
1664
1576
@needs_write_lock
1665
1577
def pull(self, source, overwrite=False, stop_revision=None,
1666
change_reporter=None, possible_transports=None, local=False,
1578
change_reporter=None, possible_transports=None, local=False):
1579
top_pb = ui.ui_factory.nested_progress_bar()
1668
1580
source.lock_read()
1582
pp = ProgressPhase("Pull phase", 2, top_pb)
1670
1584
old_revision_info = self.branch.last_revision_info()
1671
1585
basis_tree = self.basis_tree()
1672
1586
count = self.branch.pull(source, overwrite, stop_revision,
1683
1599
new_basis_tree,
1685
1601
this_tree=self,
1687
change_reporter=change_reporter,
1688
show_base=show_base)
1689
basis_root_id = basis_tree.get_root_id()
1690
new_root_id = new_basis_tree.get_root_id()
1691
if basis_root_id != new_root_id:
1692
self.set_root_id(new_root_id)
1603
change_reporter=change_reporter)
1604
if (basis_tree.inventory.root is None and
1605
new_basis_tree.inventory.root is not None):
1606
self.set_root_id(new_basis_tree.get_root_id())
1694
1609
basis_tree.unlock()
1695
1610
# TODO - dedup parents list with things merged by pull ?
1696
1611
# reuse the revisiontree we merged against to set the new
1799
1715
r"""Check whether the filename matches an ignore pattern.
1801
1717
Patterns containing '/' or '\' need to match the whole path;
1802
others match against only the last component. Patterns starting
1803
with '!' are ignore exceptions. Exceptions take precedence
1804
over regular patterns and cause the filename to not be ignored.
1718
others match against only the last component.
1806
1720
If the file is ignored, returns the pattern which caused it to
1807
1721
be ignored, otherwise None. So this can simply be used as a
1808
1722
boolean if desired."""
1809
1723
if getattr(self, '_ignoreglobster', None) is None:
1810
self._ignoreglobster = globbing.ExceptionGlobster(self.get_ignore_list())
1724
self._ignoreglobster = globbing.Globster(self.get_ignore_list())
1811
1725
return self._ignoreglobster.match(filename)
1813
1727
def kind(self, file_id):
1863
1777
raise errors.ObjectNotLocked(self)
1865
1779
def lock_read(self):
1866
"""Lock the tree for reading.
1868
This also locks the branch, and can be unlocked via self.unlock().
1870
:return: A bzrlib.lock.LogicalLockResult.
1780
"""See Branch.lock_read, and WorkingTree.unlock."""
1872
1781
if not self.is_locked():
1873
1782
self._reset_data()
1874
1783
self.branch.lock_read()
1876
self._control_files.lock_read()
1877
return LogicalLockResult(self.unlock)
1785
return self._control_files.lock_read()
1879
1787
self.branch.unlock()
1882
1790
def lock_tree_write(self):
1883
"""See MutableTree.lock_tree_write, and WorkingTree.unlock.
1885
:return: A bzrlib.lock.LogicalLockResult.
1791
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
1887
1792
if not self.is_locked():
1888
1793
self._reset_data()
1889
1794
self.branch.lock_read()
1891
self._control_files.lock_write()
1892
return LogicalLockResult(self.unlock)
1796
return self._control_files.lock_write()
1894
1798
self.branch.unlock()
1897
1801
def lock_write(self):
1898
"""See MutableTree.lock_write, and WorkingTree.unlock.
1900
:return: A bzrlib.lock.LogicalLockResult.
1802
"""See MutableTree.lock_write, and WorkingTree.unlock."""
1902
1803
if not self.is_locked():
1903
1804
self._reset_data()
1904
1805
self.branch.lock_write()
1906
self._control_files.lock_write()
1907
return LogicalLockResult(self.unlock)
1807
return self._control_files.lock_write()
1909
1809
self.branch.unlock()
1975
1871
# revision_id is set. We must check for this full string, because a
1976
1872
# root node id can legitimately look like 'revision_id' but cannot
1977
1873
# contain a '"'.
1978
xml = self.branch.repository._get_inventory_xml(new_revision)
1874
xml = self.branch.repository.get_inventory_xml(new_revision)
1979
1875
firstline = xml.split('\n', 1)[0]
1980
1876
if (not 'revision_id="' in firstline or
1981
1877
'format="7"' not in firstline):
1982
inv = self.branch.repository._serializer.read_inventory_from_string(
1878
inv = self.branch.repository.deserialise_inventory(
1984
1880
xml = self._create_basis_xml_from_inventory(new_revision, inv)
1985
1881
self._write_basis_inventory(xml)
1986
1882
except (errors.NoSuchRevision, errors.RevisionNotPresent):
2030
all_files = set() # specified and nested files
2031
1923
unknown_nested_files=set()
2033
to_file = sys.stdout
2035
files_to_backup = []
2037
1925
def recurse_directory_to_add_files(directory):
2038
1926
# Recurse directory and add all files
2039
1927
# so we can check if they have changed.
2040
for parent_info, file_infos in self.walkdirs(directory):
1928
for parent_info, file_infos in\
1929
self.walkdirs(directory):
2041
1930
for relpath, basename, kind, lstat, fileid, kind in file_infos:
2042
1931
# Is it versioned or ignored?
2043
if self.path2id(relpath):
1932
if self.path2id(relpath) or self.is_ignored(relpath):
2044
1933
# Add nested content for deletion.
2045
all_files.add(relpath)
1934
new_files.add(relpath)
2047
# Files which are not versioned
1936
# Files which are not versioned and not ignored
2048
1937
# should be treated as unknown.
2049
files_to_backup.append(relpath)
1938
unknown_nested_files.add((relpath, None, kind))
2051
1940
for filename in files:
2052
1941
# Get file name into canonical form.
2053
1942
abspath = self.abspath(filename)
2054
1943
filename = self.relpath(abspath)
2055
1944
if len(filename) > 0:
2056
all_files.add(filename)
1945
new_files.add(filename)
2057
1946
recurse_directory_to_add_files(filename)
2059
files = list(all_files)
1948
files = list(new_files)
2061
1950
if len(files) == 0:
2062
1951
return # nothing to do
2067
1956
# Bail out if we are going to delete files we shouldn't
2068
1957
if not keep_files and not force:
2069
for (file_id, path, content_change, versioned, parent_id, name,
2070
kind, executable) in self.iter_changes(self.basis_tree(),
2071
include_unchanged=True, require_versioned=False,
2072
want_unversioned=True, specific_files=files):
2073
if versioned[0] == False:
2074
# The record is unknown or newly added
2075
files_to_backup.append(path[1])
2076
elif (content_change and (kind[1] is not None) and
2077
osutils.is_inside_any(files, path[1])):
2078
# Versioned and changed, but not deleted, and still
2079
# in one of the dirs to be deleted.
2080
files_to_backup.append(path[1])
1958
has_changed_files = len(unknown_nested_files) > 0
1959
if not has_changed_files:
1960
for (file_id, path, content_change, versioned, parent_id, name,
1961
kind, executable) in self.iter_changes(self.basis_tree(),
1962
include_unchanged=True, require_versioned=False,
1963
want_unversioned=True, specific_files=files):
1964
if versioned == (False, False):
1965
# The record is unknown ...
1966
if not self.is_ignored(path[1]):
1967
# ... but not ignored
1968
has_changed_files = True
1970
elif content_change and (kind[1] is not None):
1971
# Versioned and changed, but not deleted
1972
has_changed_files = True
2082
def backup(file_to_backup):
2083
backup_name = self.bzrdir.generate_backup_name(file_to_backup)
2084
osutils.rename(abs_path, self.abspath(backup_name))
2085
return "removed %s (but kept a copy: %s)" % (file_to_backup, backup_name)
1975
if has_changed_files:
1976
# Make delta show ALL applicable changes in error message.
1977
tree_delta = self.changes_from(self.basis_tree(),
1978
require_versioned=False, want_unversioned=True,
1979
specific_files=files)
1980
for unknown_file in unknown_nested_files:
1981
if unknown_file not in tree_delta.unversioned:
1982
tree_delta.unversioned.extend((unknown_file,))
1983
raise errors.BzrRemoveChangedFilesError(tree_delta)
2087
1985
# Build inv_delta and delete files where applicable,
2088
1986
# do this before any modifications to inventory.
2098
1996
new_status = 'I'
2100
1998
new_status = '?'
2101
# XXX: Really should be a more abstract reporter interface
2102
kind_ch = osutils.kind_marker(self.kind(fid))
2103
to_file.write(new_status + ' ' + f + kind_ch + '\n')
1999
textui.show_status(new_status, self.kind(fid), f,
2104
2001
# Unversion file
2105
2002
inv_delta.append((f, None, fid, None))
2106
2003
message = "removed %s" % (f,)
2112
2009
len(os.listdir(abs_path)) > 0):
2114
2011
osutils.rmtree(abs_path)
2115
message = "deleted %s" % (f,)
2013
message = "%s is not an empty directory "\
2014
"and won't be deleted." % (f,)
2119
if f in files_to_backup:
2122
osutils.delete_any(abs_path)
2123
message = "deleted %s" % (f,)
2016
osutils.delete_any(abs_path)
2017
message = "deleted %s" % (f,)
2124
2018
elif message is not None:
2125
2019
# Only care if we haven't done anything yet.
2126
2020
message = "%s does not exist." % (f,)
2261
2155
raise NotImplementedError(self.unlock)
2265
def update(self, change_reporter=None, possible_transports=None,
2266
revision=None, old_tip=_marker, show_base=False):
2157
def update(self, change_reporter=None, possible_transports=None):
2267
2158
"""Update a working tree along its branch.
2269
2160
This will update the branch if its bound too, which means we have
2287
2178
- Merge current state -> basis tree of the master w.r.t. the old tree
2289
2180
- Do a 'normal' merge of the old branch basis if it is relevant.
2291
:param revision: The target revision to update to. Must be in the
2293
:param old_tip: If branch.update() has already been run, the value it
2294
returned (old tip of the branch or None). _marker is used
2297
2182
if self.branch.get_bound_location() is not None:
2298
2183
self.lock_write()
2299
update_branch = (old_tip is self._marker)
2184
update_branch = True
2301
2186
self.lock_tree_write()
2302
2187
update_branch = False
2304
2189
if update_branch:
2305
2190
old_tip = self.branch.update(possible_transports)
2307
if old_tip is self._marker:
2309
return self._update_tree(old_tip, change_reporter, revision, show_base)
2193
return self._update_tree(old_tip, change_reporter)
2313
2197
@needs_tree_write_lock
2314
def _update_tree(self, old_tip=None, change_reporter=None, revision=None,
2198
def _update_tree(self, old_tip=None, change_reporter=None):
2316
2199
"""Update a tree to the master branch.
2318
2201
:param old_tip: if supplied, the previous tip revision the branch,
2328
2211
# We MUST save it even if an error occurs, because otherwise the users
2329
2212
# local work is unreferenced and will appear to have been lost.
2333
2216
last_rev = self.get_parent_ids()[0]
2334
2217
except IndexError:
2335
2218
last_rev = _mod_revision.NULL_REVISION
2336
if revision is None:
2337
revision = self.branch.last_revision()
2339
old_tip = old_tip or _mod_revision.NULL_REVISION
2341
if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
2342
# the branch we are bound to was updated
2343
# merge those changes in first
2344
base_tree = self.basis_tree()
2345
other_tree = self.branch.repository.revision_tree(old_tip)
2346
nb_conflicts = merge.merge_inner(self.branch, other_tree,
2347
base_tree, this_tree=self,
2348
change_reporter=change_reporter,
2349
show_base=show_base)
2351
self.add_parent_tree((old_tip, other_tree))
2352
trace.note('Rerun update after fixing the conflicts.')
2355
if last_rev != _mod_revision.ensure_null(revision):
2356
# the working tree is up to date with the branch
2357
# we can merge the specified revision from master
2358
to_tree = self.branch.repository.revision_tree(revision)
2359
to_root_id = to_tree.get_root_id()
2219
if last_rev != _mod_revision.ensure_null(self.branch.last_revision()):
2220
# merge tree state up to new branch tip.
2361
2221
basis = self.basis_tree()
2362
2222
basis.lock_read()
2364
if (basis.inventory.root is None
2365
or basis.inventory.root.file_id != to_root_id):
2366
self.set_root_id(to_root_id)
2224
to_tree = self.branch.basis_tree()
2225
if basis.inventory.root is None:
2226
self.set_root_id(to_tree.get_root_id())
2228
result += merge.merge_inner(
2233
change_reporter=change_reporter)
2371
# determine the branch point
2372
graph = self.branch.repository.get_graph()
2373
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2375
base_tree = self.branch.repository.revision_tree(base_rev_id)
2377
nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
2379
change_reporter=change_reporter,
2380
show_base=show_base)
2381
self.set_last_revision(revision)
2382
2236
# TODO - dedup parents list with things merged by pull ?
2383
2237
# reuse the tree we've updated to to set the basis:
2384
parent_trees = [(revision, to_tree)]
2238
parent_trees = [(self.branch.last_revision(), to_tree)]
2385
2239
merges = self.get_parent_ids()[1:]
2386
2240
# Ideally we ask the tree for the trees here, that way the working
2387
2241
# tree can decide whether to give us the entire tree or give us a
2391
2245
for parent in merges:
2392
2246
parent_trees.append(
2393
2247
(parent, self.branch.repository.revision_tree(parent)))
2394
if not _mod_revision.is_null(old_tip):
2248
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2395
2249
parent_trees.append(
2396
2250
(old_tip, self.branch.repository.revision_tree(old_tip)))
2397
2251
self.set_parent_trees(parent_trees)
2398
2252
last_rev = parent_trees[0][0]
2254
# the working tree had the same last-revision as the master
2255
# branch did. We may still have pivot local work from the local
2256
# branch into old_tip:
2257
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2258
self.add_parent_tree_id(old_tip)
2259
if (old_tip is not None and not _mod_revision.is_null(old_tip)
2260
and old_tip != last_rev):
2261
# our last revision was not the prior branch last revision
2262
# and we have converted that last revision to a pending merge.
2263
# base is somewhere between the branch tip now
2264
# and the now pending merge
2266
# Since we just modified the working tree and inventory, flush out
2267
# the current state, before we modify it again.
2268
# TODO: jam 20070214 WorkingTree3 doesn't require this, dirstate
2269
# requires it only because TreeTransform directly munges the
2270
# inventory and calls tree._write_inventory(). Ultimately we
2271
# should be able to remove this extra flush.
2273
graph = self.branch.repository.get_graph()
2274
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2276
base_tree = self.branch.repository.revision_tree(base_rev_id)
2277
other_tree = self.branch.repository.revision_tree(old_tip)
2278
result += merge.merge_inner(
2283
change_reporter=change_reporter)
2401
2286
def _write_hashcache_if_dirty(self):
2402
2287
"""Write out the hashcache if it is dirty."""
2643
2528
return un_resolved, resolved
2645
2530
@needs_read_lock
2646
def _check(self, references):
2647
"""Check the tree for consistency.
2649
:param references: A dict with keys matching the items returned by
2650
self._get_check_refs(), and values from looking those keys up in
2653
2532
tree_basis = self.basis_tree()
2654
2533
tree_basis.lock_read()
2656
repo_basis = references[('trees', self.last_revision())]
2535
repo_basis = self.branch.repository.revision_tree(
2536
self.last_revision())
2657
2537
if len(list(repo_basis.iter_changes(tree_basis))) > 0:
2658
2538
raise errors.BzrCheckError(
2659
2539
"Mismatched basis inventory content.")
2704
2585
if self._inventory is None:
2705
2586
self.read_working_inventory()
2707
def _get_check_refs(self):
2708
"""Return the references needed to perform a check of this tree."""
2709
return [('trees', self.last_revision())]
2711
2588
def lock_tree_write(self):
2712
2589
"""See WorkingTree.lock_tree_write().
2714
2591
In Format2 WorkingTrees we have a single lock for the branch and tree
2715
2592
so lock_tree_write() degrades to lock_write().
2717
:return: An object with an unlock method which will release the lock
2720
2594
self.branch.lock_write()
2722
self._control_files.lock_write()
2596
return self._control_files.lock_write()
2725
2598
self.branch.unlock()
2774
2647
mode=self.bzrdir._get_file_mode())
2777
def _get_check_refs(self):
2778
"""Return the references needed to perform a check of this tree."""
2779
return [('trees', self.last_revision())]
2781
2650
@needs_tree_write_lock
2782
2651
def set_conflicts(self, conflicts):
2783
2652
self._put_rio('conflicts', conflicts.to_stanzas(),
3130
2999
return self.get_format_string()
3133
__default_format = WorkingTreeFormat6()
3002
__default_format = WorkingTreeFormat4()
3134
3003
WorkingTreeFormat.register_format(__default_format)
3004
WorkingTreeFormat.register_format(WorkingTreeFormat6())
3135
3005
WorkingTreeFormat.register_format(WorkingTreeFormat5())
3136
WorkingTreeFormat.register_format(WorkingTreeFormat4())
3137
3006
WorkingTreeFormat.register_format(WorkingTreeFormat3())
3138
3007
WorkingTreeFormat.set_default_format(__default_format)
3139
3008
# formats which have no format string are not discoverable