29
29
WorkingTree.open(dir).
32
# TODO: Give the workingtree sole responsibility for the working inventory;
33
# remove the variable and references to it from the branch. This may require
34
# updating the commit code so as to update the inventory within the working
35
# copy, and making sure there's only one WorkingTree for any directory on disk.
36
# At the moment they may alias the inventory and have old copies of it in
37
# memory. (Now done? -- mbp 20060309)
33
39
from cStringIO import StringIO
96
102
from bzrlib.filters import filtered_input_file
97
103
from bzrlib.trace import mutter, note
98
104
from bzrlib.transport.local import LocalTransport
105
from bzrlib.progress import DummyProgress, ProgressPhase
99
106
from bzrlib.revision import CURRENT_REVISION
100
107
from bzrlib.rio import RioReader, rio_file, Stanza
101
108
from bzrlib.symbol_versioning import (
351
343
path = osutils.getcwd()
352
344
control, relpath = bzrdir.BzrDir.open_containing(path)
353
346
return control.open_workingtree(), relpath
356
def open_containing_paths(file_list, default_directory='.',
357
canonicalize=True, apply_view=True):
358
"""Open the WorkingTree that contains a set of paths.
360
Fail if the paths given are not all in a single tree.
362
This is used for the many command-line interfaces that take a list of
363
any number of files and that require they all be in the same tree.
365
# recommended replacement for builtins.internal_tree_files
366
if file_list is None or len(file_list) == 0:
367
tree = WorkingTree.open_containing(default_directory)[0]
368
# XXX: doesn't really belong here, and seems to have the strange
369
# side effect of making it return a bunch of files, not the whole
370
# tree -- mbp 20100716
371
if tree.supports_views() and apply_view:
372
view_files = tree.views.lookup_view()
374
file_list = view_files
375
view_str = views.view_display_str(view_files)
376
note("Ignoring files outside view. View is %s" % view_str)
377
return tree, file_list
378
tree = WorkingTree.open_containing(file_list[0])[0]
379
return tree, tree.safe_relpath_files(file_list, canonicalize,
380
apply_view=apply_view)
382
def safe_relpath_files(self, file_list, canonicalize=True, apply_view=True):
383
"""Convert file_list into a list of relpaths in tree.
385
:param self: A tree to operate on.
386
:param file_list: A list of user provided paths or None.
387
:param apply_view: if True and a view is set, apply it or check that
388
specified files are within it
389
:return: A list of relative paths.
390
:raises errors.PathNotChild: When a provided path is in a different self
393
if file_list is None:
395
if self.supports_views() and apply_view:
396
view_files = self.views.lookup_view()
400
# self.relpath exists as a "thunk" to osutils, but canonical_relpath
401
# doesn't - fix that up here before we enter the loop.
403
fixer = lambda p: osutils.canonical_relpath(self.basedir, p)
406
for filename in file_list:
407
relpath = fixer(osutils.dereference_path(filename))
408
if view_files and not osutils.is_inside_any(view_files, relpath):
409
raise errors.FileOutsideView(filename, view_files)
410
new_list.append(relpath)
414
349
def open_downlevel(path=None):
415
350
"""Open an unsupported working tree.
429
364
return True, None
431
366
return True, tree
432
t = transport.get_transport(location)
433
iterator = bzrdir.BzrDir.find_bzrdirs(t, evaluate=evaluate,
367
transport = get_transport(location)
368
iterator = bzrdir.BzrDir.find_bzrdirs(transport, evaluate=evaluate,
434
369
list_current=list_current)
435
return [tr for tr in iterator if tr is not None]
370
return [t for t in iterator if t is not None]
437
372
# should be deprecated - this is slow and in any case treating them as a
438
373
# container is (we now know) bad style -- mbp 20070302
523
458
return (file_obj, stat_value)
525
460
def get_file_text(self, file_id, path=None, filtered=True):
526
my_file = self.get_file(file_id, path=path, filtered=filtered)
528
return my_file.read()
461
return self.get_file(file_id, path=path, filtered=filtered).read()
532
463
def get_file_byname(self, filename, filtered=True):
533
464
path = self.abspath(filename)
588
519
# Now we have the parents of this content
589
520
annotator = self.branch.repository.texts.get_annotator()
590
text = self.get_file_text(file_id)
521
text = self.get_file(file_id).read()
591
522
this_key =(file_id, default_revision)
592
523
annotator.add_special_text(this_key, file_parent_keys, text)
593
524
annotations = [(key[-1], line)
706
635
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
707
636
file_id = self.path2id(path)
709
# For unversioned files on win32, we just assume they are not
712
637
return self._inventory[file_id].executable
714
639
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
825
750
kind = _mapper(stat_result.st_mode)
826
751
if kind == 'file':
827
return self._file_content_summary(path, stat_result)
752
size = stat_result.st_size
753
# try for a stat cache lookup
754
executable = self._is_executable_from_path_and_stat(path, stat_result)
755
return (kind, size, executable, self._sha_from_stat(
828
757
elif kind == 'directory':
829
758
# perhaps it looks like a plain directory, but it's really a
838
767
return (kind, None, None, None)
840
def _file_content_summary(self, path, stat_result):
841
size = stat_result.st_size
842
executable = self._is_executable_from_path_and_stat(path, stat_result)
843
# try for a stat cache lookup
844
return ('file', size, executable, self._sha_from_stat(
847
769
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
848
770
"""Common ghost checking functionality from set_parent_*.
970
892
@needs_write_lock # because merge pulls data into the branch.
971
893
def merge_from_branch(self, branch, to_revision=None, from_revision=None,
972
merge_type=None, force=False):
973
895
"""Merge from a branch into this working tree.
975
897
:param branch: The branch to merge from.
979
901
branch.last_revision().
981
903
from bzrlib.merge import Merger, Merge3Merger
982
merger = Merger(self.branch, this_tree=self)
983
# check that there are no local alterations
984
if not force and self.has_changes():
985
raise errors.UncommittedChanges(self)
986
if to_revision is None:
987
to_revision = _mod_revision.ensure_null(branch.last_revision())
988
merger.other_rev_id = to_revision
989
if _mod_revision.is_null(merger.other_rev_id):
990
raise errors.NoCommits(branch)
991
self.branch.fetch(branch, last_revision=merger.other_rev_id)
992
merger.other_basis = merger.other_rev_id
993
merger.other_tree = self.branch.repository.revision_tree(
995
merger.other_branch = branch
996
if from_revision is None:
999
merger.set_base_revision(from_revision, branch)
1000
if merger.base_rev_id == merger.other_rev_id:
1001
raise errors.PointlessMerge
1002
merger.backup_files = False
1003
if merge_type is None:
1004
merger.merge_type = Merge3Merger
1006
merger.merge_type = merge_type
1007
merger.set_interesting_files(None)
1008
merger.show_base = False
1009
merger.reprocess = False
1010
conflicts = merger.do_merge()
1011
merger.set_pending()
904
pb = ui.ui_factory.nested_progress_bar()
906
merger = Merger(self.branch, this_tree=self, pb=pb)
907
merger.pp = ProgressPhase("Merge phase", 5, pb)
908
merger.pp.next_phase()
909
# check that there are no
911
merger.check_basis(check_clean=True, require_commits=False)
912
if to_revision is None:
913
to_revision = _mod_revision.ensure_null(branch.last_revision())
914
merger.other_rev_id = to_revision
915
if _mod_revision.is_null(merger.other_rev_id):
916
raise errors.NoCommits(branch)
917
self.branch.fetch(branch, last_revision=merger.other_rev_id)
918
merger.other_basis = merger.other_rev_id
919
merger.other_tree = self.branch.repository.revision_tree(
921
merger.other_branch = branch
922
merger.pp.next_phase()
923
if from_revision is None:
926
merger.set_base_revision(from_revision, branch)
927
if merger.base_rev_id == merger.other_rev_id:
928
raise errors.PointlessMerge
929
merger.backup_files = False
930
if merge_type is None:
931
merger.merge_type = Merge3Merger
933
merger.merge_type = merge_type
934
merger.set_interesting_files(None)
935
merger.show_base = False
936
merger.reprocess = False
937
conflicts = merger.do_merge()
1012
941
return conflicts
1014
943
@needs_read_lock
1161
1090
tree_transport = self.bzrdir.root_transport.clone(sub_path)
1162
1091
if tree_transport.base != branch_transport.base:
1163
1092
tree_bzrdir = format.initialize_on_transport(tree_transport)
1164
branch.BranchReferenceFormat().initialize(tree_bzrdir,
1165
target_branch=new_branch)
1093
branch.BranchReferenceFormat().initialize(tree_bzrdir, new_branch)
1167
1095
tree_bzrdir = branch_bzrdir
1168
1096
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1267
1195
# absolute path
1268
1196
fap = from_dir_abspath + '/' + f
1270
dir_ie = inv[from_dir_id]
1271
if dir_ie.kind == 'directory':
1272
f_ie = dir_ie.children.get(f)
1198
f_ie = inv.get_child(from_dir_id, f)
1277
1201
elif self.is_ignored(fp[1:]):
1280
# we may not have found this file, because of a unicode
1281
# issue, or because the directory was actually a symlink.
1204
# we may not have found this file, because of a unicode issue
1282
1205
f_norm, can_access = osutils.normalized_filename(f)
1283
1206
if f == f_norm or not can_access:
1284
1207
# No change, so treat this file normally
1368
1291
# check for deprecated use of signature
1369
1292
if to_dir is None:
1370
raise TypeError('You must supply a target directory')
1293
to_dir = kwargs.get('to_name', None)
1295
raise TypeError('You must supply a target directory')
1297
symbol_versioning.warn('The parameter to_name was deprecated'
1298
' in version 0.13. Use to_dir instead',
1371
1301
# check destination directory
1372
1302
if isinstance(from_paths, basestring):
1373
1303
raise ValueError()
1664
1594
@needs_write_lock
1665
1595
def pull(self, source, overwrite=False, stop_revision=None,
1666
1596
change_reporter=None, possible_transports=None, local=False):
1597
top_pb = ui.ui_factory.nested_progress_bar()
1667
1598
source.lock_read()
1600
pp = ProgressPhase("Pull phase", 2, top_pb)
1669
1602
old_revision_info = self.branch.last_revision_info()
1670
1603
basis_tree = self.basis_tree()
1671
1604
count = self.branch.pull(source, overwrite, stop_revision,
1682
1617
new_basis_tree,
1684
1619
this_tree=self,
1686
1621
change_reporter=change_reporter)
1687
basis_root_id = basis_tree.get_root_id()
1688
new_root_id = new_basis_tree.get_root_id()
1689
if basis_root_id != new_root_id:
1690
self.set_root_id(new_root_id)
1622
if (basis_tree.inventory.root is None and
1623
new_basis_tree.inventory.root is not None):
1624
self.set_root_id(new_basis_tree.get_root_id())
1692
1627
basis_tree.unlock()
1693
1628
# TODO - dedup parents list with things merged by pull ?
1694
1629
# reuse the revisiontree we merged against to set the new
1797
1733
r"""Check whether the filename matches an ignore pattern.
1799
1735
Patterns containing '/' or '\' need to match the whole path;
1800
others match against only the last component. Patterns starting
1801
with '!' are ignore exceptions. Exceptions take precedence
1802
over regular patterns and cause the filename to not be ignored.
1736
others match against only the last component.
1804
1738
If the file is ignored, returns the pattern which caused it to
1805
1739
be ignored, otherwise None. So this can simply be used as a
1806
1740
boolean if desired."""
1807
1741
if getattr(self, '_ignoreglobster', None) is None:
1808
self._ignoreglobster = globbing.ExceptionGlobster(self.get_ignore_list())
1742
self._ignoreglobster = globbing.Globster(self.get_ignore_list())
1809
1743
return self._ignoreglobster.match(filename)
1811
1745
def kind(self, file_id):
1861
1795
raise errors.ObjectNotLocked(self)
1863
1797
def lock_read(self):
1864
"""Lock the tree for reading.
1866
This also locks the branch, and can be unlocked via self.unlock().
1868
:return: A bzrlib.lock.LogicalLockResult.
1798
"""See Branch.lock_read, and WorkingTree.unlock."""
1870
1799
if not self.is_locked():
1871
1800
self._reset_data()
1872
1801
self.branch.lock_read()
1874
self._control_files.lock_read()
1875
return LogicalLockResult(self.unlock)
1803
return self._control_files.lock_read()
1877
1805
self.branch.unlock()
1880
1808
def lock_tree_write(self):
1881
"""See MutableTree.lock_tree_write, and WorkingTree.unlock.
1883
:return: A bzrlib.lock.LogicalLockResult.
1809
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
1885
1810
if not self.is_locked():
1886
1811
self._reset_data()
1887
1812
self.branch.lock_read()
1889
self._control_files.lock_write()
1890
return LogicalLockResult(self.unlock)
1814
return self._control_files.lock_write()
1892
1816
self.branch.unlock()
1895
1819
def lock_write(self):
1896
"""See MutableTree.lock_write, and WorkingTree.unlock.
1898
:return: A bzrlib.lock.LogicalLockResult.
1820
"""See MutableTree.lock_write, and WorkingTree.unlock."""
1900
1821
if not self.is_locked():
1901
1822
self._reset_data()
1902
1823
self.branch.lock_write()
1904
self._control_files.lock_write()
1905
return LogicalLockResult(self.unlock)
1825
return self._control_files.lock_write()
1907
1827
self.branch.unlock()
1973
1889
# revision_id is set. We must check for this full string, because a
1974
1890
# root node id can legitimately look like 'revision_id' but cannot
1975
1891
# contain a '"'.
1976
xml = self.branch.repository._get_inventory_xml(new_revision)
1892
xml = self.branch.repository.get_inventory_xml(new_revision)
1977
1893
firstline = xml.split('\n', 1)[0]
1978
1894
if (not 'revision_id="' in firstline or
1979
1895
'format="7"' not in firstline):
1980
inv = self.branch.repository._serializer.read_inventory_from_string(
1896
inv = self.branch.repository.deserialise_inventory(
1982
1898
xml = self._create_basis_xml_from_inventory(new_revision, inv)
1983
1899
self._write_basis_inventory(xml)
1984
1900
except (errors.NoSuchRevision, errors.RevisionNotPresent):
2028
all_files = set() # specified and nested files
2029
1941
unknown_nested_files=set()
2031
to_file = sys.stdout
2033
files_to_backup = []
2035
1943
def recurse_directory_to_add_files(directory):
2036
1944
# Recurse directory and add all files
2037
1945
# so we can check if they have changed.
2038
for parent_info, file_infos in self.walkdirs(directory):
1946
for parent_info, file_infos in\
1947
self.walkdirs(directory):
2039
1948
for relpath, basename, kind, lstat, fileid, kind in file_infos:
2040
1949
# Is it versioned or ignored?
2041
if self.path2id(relpath):
1950
if self.path2id(relpath) or self.is_ignored(relpath):
2042
1951
# Add nested content for deletion.
2043
all_files.add(relpath)
1952
new_files.add(relpath)
2045
# Files which are not versioned
1954
# Files which are not versioned and not ignored
2046
1955
# should be treated as unknown.
2047
files_to_backup.append(relpath)
1956
unknown_nested_files.add((relpath, None, kind))
2049
1958
for filename in files:
2050
1959
# Get file name into canonical form.
2051
1960
abspath = self.abspath(filename)
2052
1961
filename = self.relpath(abspath)
2053
1962
if len(filename) > 0:
2054
all_files.add(filename)
1963
new_files.add(filename)
2055
1964
recurse_directory_to_add_files(filename)
2057
files = list(all_files)
1966
files = list(new_files)
2059
1968
if len(files) == 0:
2060
1969
return # nothing to do
2065
1974
# Bail out if we are going to delete files we shouldn't
2066
1975
if not keep_files and not force:
2067
for (file_id, path, content_change, versioned, parent_id, name,
2068
kind, executable) in self.iter_changes(self.basis_tree(),
2069
include_unchanged=True, require_versioned=False,
2070
want_unversioned=True, specific_files=files):
2071
if versioned[0] == False:
2072
# The record is unknown or newly added
2073
files_to_backup.append(path[1])
2074
elif (content_change and (kind[1] is not None) and
2075
osutils.is_inside_any(files, path[1])):
2076
# Versioned and changed, but not deleted, and still
2077
# in one of the dirs to be deleted.
2078
files_to_backup.append(path[1])
1976
has_changed_files = len(unknown_nested_files) > 0
1977
if not has_changed_files:
1978
for (file_id, path, content_change, versioned, parent_id, name,
1979
kind, executable) in self.iter_changes(self.basis_tree(),
1980
include_unchanged=True, require_versioned=False,
1981
want_unversioned=True, specific_files=files):
1982
if versioned == (False, False):
1983
# The record is unknown ...
1984
if not self.is_ignored(path[1]):
1985
# ... but not ignored
1986
has_changed_files = True
1988
elif content_change and (kind[1] is not None):
1989
# Versioned and changed, but not deleted
1990
has_changed_files = True
2080
def backup(file_to_backup):
2081
backup_name = self.bzrdir.generate_backup_name(file_to_backup)
2082
osutils.rename(abs_path, self.abspath(backup_name))
2083
return "removed %s (but kept a copy: %s)" % (file_to_backup, backup_name)
1993
if has_changed_files:
1994
# Make delta show ALL applicable changes in error message.
1995
tree_delta = self.changes_from(self.basis_tree(),
1996
require_versioned=False, want_unversioned=True,
1997
specific_files=files)
1998
for unknown_file in unknown_nested_files:
1999
if unknown_file not in tree_delta.unversioned:
2000
tree_delta.unversioned.extend((unknown_file,))
2001
raise errors.BzrRemoveChangedFilesError(tree_delta)
2085
2003
# Build inv_delta and delete files where applicable,
2086
2004
# do this before any modifications to inventory.
2096
2014
new_status = 'I'
2098
2016
new_status = '?'
2099
# XXX: Really should be a more abstract reporter interface
2100
kind_ch = osutils.kind_marker(self.kind(fid))
2101
to_file.write(new_status + ' ' + f + kind_ch + '\n')
2017
textui.show_status(new_status, self.kind(fid), f,
2102
2019
# Unversion file
2103
2020
inv_delta.append((f, None, fid, None))
2104
2021
message = "removed %s" % (f,)
2110
2027
len(os.listdir(abs_path)) > 0):
2112
2029
osutils.rmtree(abs_path)
2113
message = "deleted %s" % (f,)
2031
message = "%s is not an empty directory "\
2032
"and won't be deleted." % (f,)
2117
if f in files_to_backup:
2120
osutils.delete_any(abs_path)
2121
message = "deleted %s" % (f,)
2034
osutils.delete_any(abs_path)
2035
message = "deleted %s" % (f,)
2122
2036
elif message is not None:
2123
2037
# Only care if we haven't done anything yet.
2124
2038
message = "%s does not exist." % (f,)
2259
2173
raise NotImplementedError(self.unlock)
2263
def update(self, change_reporter=None, possible_transports=None,
2264
revision=None, old_tip=_marker):
2175
def update(self, change_reporter=None, possible_transports=None):
2265
2176
"""Update a working tree along its branch.
2267
2178
This will update the branch if its bound too, which means we have
2285
2196
- Merge current state -> basis tree of the master w.r.t. the old tree
2287
2198
- Do a 'normal' merge of the old branch basis if it is relevant.
2289
:param revision: The target revision to update to. Must be in the
2291
:param old_tip: If branch.update() has already been run, the value it
2292
returned (old tip of the branch or None). _marker is used
2295
2200
if self.branch.get_bound_location() is not None:
2296
2201
self.lock_write()
2297
update_branch = (old_tip is self._marker)
2202
update_branch = True
2299
2204
self.lock_tree_write()
2300
2205
update_branch = False
2302
2207
if update_branch:
2303
2208
old_tip = self.branch.update(possible_transports)
2305
if old_tip is self._marker:
2307
return self._update_tree(old_tip, change_reporter, revision)
2211
return self._update_tree(old_tip, change_reporter)
2311
2215
@needs_tree_write_lock
2312
def _update_tree(self, old_tip=None, change_reporter=None, revision=None):
2216
def _update_tree(self, old_tip=None, change_reporter=None):
2313
2217
"""Update a tree to the master branch.
2315
2219
:param old_tip: if supplied, the previous tip revision the branch,
2325
2229
# We MUST save it even if an error occurs, because otherwise the users
2326
2230
# local work is unreferenced and will appear to have been lost.
2330
2234
last_rev = self.get_parent_ids()[0]
2331
2235
except IndexError:
2332
2236
last_rev = _mod_revision.NULL_REVISION
2333
if revision is None:
2334
revision = self.branch.last_revision()
2336
old_tip = old_tip or _mod_revision.NULL_REVISION
2338
if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
2339
# the branch we are bound to was updated
2340
# merge those changes in first
2341
base_tree = self.basis_tree()
2342
other_tree = self.branch.repository.revision_tree(old_tip)
2343
nb_conflicts = merge.merge_inner(self.branch, other_tree,
2344
base_tree, this_tree=self,
2345
change_reporter=change_reporter)
2347
self.add_parent_tree((old_tip, other_tree))
2348
trace.note('Rerun update after fixing the conflicts.')
2351
if last_rev != _mod_revision.ensure_null(revision):
2352
# the working tree is up to date with the branch
2353
# we can merge the specified revision from master
2354
to_tree = self.branch.repository.revision_tree(revision)
2355
to_root_id = to_tree.get_root_id()
2237
if last_rev != _mod_revision.ensure_null(self.branch.last_revision()):
2238
# merge tree state up to new branch tip.
2357
2239
basis = self.basis_tree()
2358
2240
basis.lock_read()
2360
if (basis.inventory.root is None
2361
or basis.inventory.root.file_id != to_root_id):
2362
self.set_root_id(to_root_id)
2242
to_tree = self.branch.basis_tree()
2243
if basis.inventory.root is None:
2244
self.set_root_id(to_tree.get_root_id())
2246
result += merge.merge_inner(
2251
change_reporter=change_reporter)
2367
# determine the branch point
2368
graph = self.branch.repository.get_graph()
2369
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2371
base_tree = self.branch.repository.revision_tree(base_rev_id)
2373
nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
2375
change_reporter=change_reporter)
2376
self.set_last_revision(revision)
2377
2254
# TODO - dedup parents list with things merged by pull ?
2378
2255
# reuse the tree we've updated to to set the basis:
2379
parent_trees = [(revision, to_tree)]
2256
parent_trees = [(self.branch.last_revision(), to_tree)]
2380
2257
merges = self.get_parent_ids()[1:]
2381
2258
# Ideally we ask the tree for the trees here, that way the working
2382
2259
# tree can decide whether to give us the entire tree or give us a
2386
2263
for parent in merges:
2387
2264
parent_trees.append(
2388
2265
(parent, self.branch.repository.revision_tree(parent)))
2389
if not _mod_revision.is_null(old_tip):
2266
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2390
2267
parent_trees.append(
2391
2268
(old_tip, self.branch.repository.revision_tree(old_tip)))
2392
2269
self.set_parent_trees(parent_trees)
2393
2270
last_rev = parent_trees[0][0]
2272
# the working tree had the same last-revision as the master
2273
# branch did. We may still have pivot local work from the local
2274
# branch into old_tip:
2275
if (old_tip is not None and not _mod_revision.is_null(old_tip)):
2276
self.add_parent_tree_id(old_tip)
2277
if (old_tip is not None and not _mod_revision.is_null(old_tip)
2278
and old_tip != last_rev):
2279
# our last revision was not the prior branch last revision
2280
# and we have converted that last revision to a pending merge.
2281
# base is somewhere between the branch tip now
2282
# and the now pending merge
2284
# Since we just modified the working tree and inventory, flush out
2285
# the current state, before we modify it again.
2286
# TODO: jam 20070214 WorkingTree3 doesn't require this, dirstate
2287
# requires it only because TreeTransform directly munges the
2288
# inventory and calls tree._write_inventory(). Ultimately we
2289
# should be able to remove this extra flush.
2291
graph = self.branch.repository.get_graph()
2292
base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
2294
base_tree = self.branch.repository.revision_tree(base_rev_id)
2295
other_tree = self.branch.repository.revision_tree(old_tip)
2296
result += merge.merge_inner(
2301
change_reporter=change_reporter)
2396
2304
def _write_hashcache_if_dirty(self):
2397
2305
"""Write out the hashcache if it is dirty."""
3125
3030
return self.get_format_string()
3128
__default_format = WorkingTreeFormat6()
3033
__default_format = WorkingTreeFormat4()
3129
3034
WorkingTreeFormat.register_format(__default_format)
3035
WorkingTreeFormat.register_format(WorkingTreeFormat6())
3130
3036
WorkingTreeFormat.register_format(WorkingTreeFormat5())
3131
WorkingTreeFormat.register_format(WorkingTreeFormat4())
3132
3037
WorkingTreeFormat.register_format(WorkingTreeFormat3())
3133
3038
WorkingTreeFormat.set_default_format(__default_format)
3134
3039
# formats which have no format string are not discoverable