123
120
ERROR_PATH_NOT_FOUND = 3 # WindowsError errno code, equivalent to ENOENT
126
@deprecated_function(zero_thirteen)
127
def gen_file_id(name):
128
"""Return new file id for the basename 'name'.
130
Use bzrlib.generate_ids.gen_file_id() instead
132
return generate_ids.gen_file_id(name)
135
@deprecated_function(zero_thirteen)
137
"""Return a new tree-root file id.
139
This has been deprecated in favor of bzrlib.generate_ids.gen_root_id()
141
return generate_ids.gen_root_id()
144
123
class TreeEntry(object):
145
124
"""An entry that implements the minimum interface used by commands.
222
201
if not _internal:
223
202
raise errors.BzrError("Please use bzrdir.open_workingtree or "
224
203
"WorkingTree.open() to obtain a WorkingTree.")
225
assert isinstance(basedir, basestring), \
226
"base directory %r is not a string" % basedir
227
204
basedir = safe_unicode(basedir)
228
205
mutter("opening working tree %r", basedir)
229
206
if deprecated_passed(branch):
237
214
self._control_files = self.branch.control_files
239
216
# assume all other formats have their own control files.
240
assert isinstance(_control_files, LockableFiles), \
241
"_control_files must be a LockableFiles, not %r" \
243
217
self._control_files = _control_files
218
self._transport = self._control_files._transport
244
219
# update the whole cache up front and write to disk if anything changed;
245
220
# in the future we might want to do this more selectively
246
221
# two possible ways offer themselves : in self._unlock, write the cache
250
225
wt_trans = self.bzrdir.get_workingtree_transport(None)
251
226
cache_filename = wt_trans.local_abspath('stat-cache')
252
227
self._hashcache = hashcache.HashCache(basedir, cache_filename,
253
self._control_files._file_mode)
228
self.bzrdir._get_file_mode())
254
229
hc = self._hashcache
256
231
# is this scan needed ? it makes things kinda slow.
434
408
def _cleanup(self):
435
409
self._flush_ignore_list_cache()
438
@deprecated_method(zero_eight)
439
def create(branch, directory):
440
"""Create a workingtree for branch at directory.
442
If existing_directory already exists it must have a .bzr directory.
443
If it does not exist, it will be created.
445
This returns a new WorkingTree object for the new checkout.
447
TODO FIXME RBC 20060124 when we have checkout formats in place this
448
should accept an optional revisionid to checkout [and reject this if
449
checking out into the same dir as a pre-checkout-aware branch format.]
451
XXX: When BzrDir is present, these should be created through that
454
warnings.warn('delete WorkingTree.create', stacklevel=3)
455
transport = get_transport(directory)
456
if branch.bzrdir.root_transport.base == transport.base:
458
return branch.bzrdir.create_workingtree()
459
# different directory,
460
# create a branch reference
461
# and now a working tree.
462
raise NotImplementedError
465
@deprecated_method(zero_eight)
466
def create_standalone(directory):
467
"""Create a checkout and a branch and a repo at directory.
469
Directory must exist and be empty.
471
please use BzrDir.create_standalone_workingtree
473
return bzrdir.BzrDir.create_standalone_workingtree(directory)
475
411
def relpath(self, path):
476
412
"""Return the local path portion from a given path.
671
607
# function - they should be part of lock_write and unlock.
672
608
inv = self.inventory
673
609
for f, file_id, kind in zip(files, ids, kinds):
674
assert kind is not None
675
610
if file_id is None:
676
611
inv.add_path(f, kind=kind)
773
708
return (kind, None, None, None)
775
@deprecated_method(zero_eleven)
777
def pending_merges(self):
778
"""Return a list of pending merges.
780
These are revisions that have been merged into the working
781
directory but not yet committed.
783
As of 0.11 this is deprecated. Please see WorkingTree.get_parent_ids()
784
instead - which is available on all tree objects.
786
return self.get_parent_ids()[1:]
788
710
def _check_parents_for_ghosts(self, revision_ids, allow_leftmost_as_ghost):
789
711
"""Common ghost checking functionality from set_parent_*.
800
722
def _set_merges_from_parent_ids(self, parent_ids):
801
723
merges = parent_ids[1:]
802
self._control_files.put_bytes('pending-merges', '\n'.join(merges))
724
self._transport.put_bytes('pending-merges', '\n'.join(merges),
725
mode=self._control_files._file_mode)
727
def _filter_parent_ids_by_ancestry(self, revision_ids):
728
"""Check that all merged revisions are proper 'heads'.
730
This will always return the first revision_id, and any merged revisions
733
if len(revision_ids) == 0:
735
graph = self.branch.repository.get_graph()
736
heads = graph.heads(revision_ids)
737
new_revision_ids = revision_ids[:1]
738
for revision_id in revision_ids[1:]:
739
if revision_id in heads and revision_id not in new_revision_ids:
740
new_revision_ids.append(revision_id)
741
if new_revision_ids != revision_ids:
742
trace.mutter('requested to set revision_ids = %s,'
743
' but filtered to %s', revision_ids, new_revision_ids)
744
return new_revision_ids
804
746
@needs_tree_write_lock
805
747
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
819
761
for revision_id in revision_ids:
820
762
_mod_revision.check_not_reserved_id(revision_id)
764
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
822
766
if len(revision_ids) > 0:
823
767
self.set_last_revision(revision_ids[0])
836
780
self._check_parents_for_ghosts(parent_ids,
837
781
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
783
parent_ids = self._filter_parent_ids_by_ancestry(parent_ids)
839
785
if len(parent_ids) == 0:
840
786
leftmost_parent_id = _mod_revision.NULL_REVISION
841
787
leftmost_parent_tree = None
881
827
def _put_rio(self, filename, stanzas, header):
882
828
self._must_be_locked()
883
829
my_file = rio_file(stanzas, header)
884
self._control_files.put(filename, my_file)
830
self._transport.put_file(filename, my_file,
831
mode=self._control_files._file_mode)
886
833
@needs_write_lock # because merge pulls data into the branch.
887
834
def merge_from_branch(self, branch, to_revision=None, from_revision=None,
1109
1056
sio = StringIO()
1110
1057
self._serialize(self._inventory, sio)
1112
self._control_files.put('inventory', sio)
1059
self._transport.put_file('inventory', sio,
1060
mode=self._control_files._file_mode)
1113
1061
self._inventory_is_modified = False
1115
1063
def _kind(self, relpath):
1276
1224
DeprecationWarning)
1278
1226
# check destination directory
1279
assert not isinstance(from_paths, basestring)
1227
if isinstance(from_paths, basestring):
1280
1229
inv = self.inventory
1281
1230
to_abs = self.abspath(to_dir)
1282
1231
if not isdir(to_abs):
1546
1495
# - RBC 20060907
1547
1496
self._write_inventory(self._inventory)
1549
@deprecated_method(zero_eight)
1550
def iter_conflicts(self):
1551
"""List all files in the tree that have text or content conflicts.
1552
DEPRECATED. Use conflicts instead."""
1553
return self._iter_conflicts()
1555
1498
def _iter_conflicts(self):
1556
1499
conflicted = set()
1557
1500
for info in self.list_files():
1800
1743
def _reset_data(self):
1801
1744
"""Reset transient data that cannot be revalidated."""
1802
1745
self._inventory_is_modified = False
1803
result = self._deserialize(self._control_files.get('inventory'))
1746
result = self._deserialize(self._transport.get('inventory'))
1804
1747
self._set_inventory(result, dirty=False)
1806
1749
@needs_tree_write_lock
1828
1771
def _write_basis_inventory(self, xml):
1829
1772
"""Write the basis inventory XML to the basis-inventory file"""
1830
assert isinstance(xml, str), 'serialised xml must be bytestring.'
1831
1773
path = self._basis_inventory_name()
1832
1774
sio = StringIO(xml)
1833
self._control_files.put(path, sio)
1775
self._transport.put_file(path, sio,
1776
mode=self._control_files._file_mode)
1835
1778
def _create_basis_xml_from_inventory(self, revision_id, inventory):
1836
1779
"""Create the text that will be saved in basis-inventory"""
1867
1810
def read_basis_inventory(self):
1868
1811
"""Read the cached basis inventory."""
1869
1812
path = self._basis_inventory_name()
1870
return self._control_files.get(path).read()
1813
return self._transport.get_bytes(path)
1872
1815
@needs_read_lock
1873
1816
def read_working_inventory(self):
1883
1826
if self._inventory_is_modified:
1884
1827
raise errors.InventoryModified(self)
1885
result = self._deserialize(self._control_files.get('inventory'))
1828
result = self._deserialize(self._transport.get('inventory'))
1886
1829
self._set_inventory(result, dirty=False)
2097
2040
"""Set the root id for this tree."""
2098
2041
# for compatability
2099
2042
if file_id is None:
2100
symbol_versioning.warn(symbol_versioning.zero_twelve
2101
% 'WorkingTree.set_root_id with fileid=None',
2106
file_id = osutils.safe_file_id(file_id)
2044
'WorkingTree.set_root_id with fileid=None')
2045
file_id = osutils.safe_file_id(file_id)
2107
2046
self._set_root_id(file_id)
2109
2048
def _set_root_id(self, file_id):
2589
2528
def _last_revision(self):
2590
2529
"""See Mutable.last_revision."""
2592
return self._control_files.get('last-revision').read()
2531
return self._transport.get_bytes('last-revision')
2593
2532
except errors.NoSuchFile:
2594
2533
return _mod_revision.NULL_REVISION
2597
2536
"""See WorkingTree._change_last_revision."""
2598
2537
if revision_id is None or revision_id == NULL_REVISION:
2600
self._control_files._transport.delete('last-revision')
2539
self._transport.delete('last-revision')
2601
2540
except errors.NoSuchFile:
2605
self._control_files.put_bytes('last-revision', revision_id)
2544
self._transport.put_bytes('last-revision', revision_id,
2545
mode=self._control_files._file_mode)
2608
2548
@needs_tree_write_lock
2651
2591
return path[:-len(suffix)]
2654
@deprecated_function(zero_eight)
2655
def is_control_file(filename):
2656
"""See WorkingTree.is_control_filename(filename)."""
2657
## FIXME: better check
2658
filename = normpath(filename)
2659
while filename != '':
2660
head, tail = os.path.split(filename)
2661
## mutter('check %r for control file' % ((head, tail),))
2664
if filename == head:
2670
2594
class WorkingTreeFormat(object):
2671
2595
"""An encapsulation of the initialization and open routines for a format.
2762
2685
"""See WorkingTreeFormat.get_format_description()."""
2763
2686
return "Working tree format 2"
2765
def stub_initialize_remote(self, control_files):
2766
"""As a special workaround create critical control files for a remote working tree
2688
def _stub_initialize_remote(self, branch):
2689
"""As a special workaround create critical control files for a remote working tree.
2768
2691
This ensures that it can later be updated and dealt with locally,
2769
2692
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
2773
2696
inv = Inventory()
2774
2697
xml5.serializer_v5.write_inventory(inv, sio, working=True)
2776
control_files.put('inventory', sio)
2778
control_files.put_bytes('pending-merges', '')
2699
branch._transport.put_file('inventory', sio,
2700
mode=branch.control_files._file_mode)
2701
branch._transport.put_bytes('pending-merges', '',
2702
mode=branch.control_files._file_mode)
2781
2705
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
2890
2814
control_files = self._open_control_files(a_bzrdir)
2891
2815
control_files.create_lock()
2892
2816
control_files.lock_write()
2893
control_files.put_utf8('format', self.get_format_string())
2817
transport.put_bytes('format', self.get_format_string(),
2818
mode=control_files._file_mode)
2894
2819
if from_branch is not None:
2895
2820
branch = from_branch