14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
21
19
from stat import S_ISREG, S_IEXEC
31
lazy_import.lazy_import(globals(), """
21
from bzrlib.lazy_import import lazy_import
22
lazy_import(globals(), """
32
23
from bzrlib import (
42
32
revision as _mod_revision,
46
from bzrlib.i18n import gettext
48
from bzrlib.errors import (DuplicateKey, MalformedTransform,
49
ReusingTransform, CantMoveRoot,
35
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
36
ReusingTransform, NotVersionedError, CantMoveRoot,
50
37
ExistingLimbo, ImmortalLimbo, NoFinalPath,
51
38
UnableCreateSymlink)
52
39
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
40
from bzrlib.inventory import InventoryEntry
53
41
from bzrlib.osutils import (
60
49
supports_executable,
62
from bzrlib.progress import ProgressPhase
51
from bzrlib.progress import DummyProgress, ProgressPhase
63
52
from bzrlib.symbol_versioning import (
56
from bzrlib.trace import mutter, warning
57
from bzrlib import tree
59
import bzrlib.urlutils as urlutils
70
62
ROOT_PARENT = "root-parent"
72
65
def unique_add(map, key, value):
74
67
raise DuplicateKey(key=key)
79
71
class _TransformResults(object):
80
72
def __init__(self, modified_paths, rename_count):
81
73
object.__init__(self)
218
202
self.version_file(old_root_file_id, old_root)
219
203
self.unversion_file(self._new_root)
221
def fixup_new_roots(self):
222
"""Reinterpret requests to change the root directory
224
Instead of creating a root directory, or moving an existing directory,
225
all the attributes and children of the new root are applied to the
226
existing root directory.
228
This means that the old root trans-id becomes obsolete, so it is
229
recommended only to invoke this after the root trans-id has become
233
new_roots = [k for k, v in self._new_parent.iteritems() if v is
235
if len(new_roots) < 1:
237
if len(new_roots) != 1:
238
raise ValueError('A tree cannot have two roots!')
239
if self._new_root is None:
240
self._new_root = new_roots[0]
242
old_new_root = new_roots[0]
243
# unversion the new root's directory.
244
if self.final_kind(self._new_root) is None:
245
file_id = self.final_file_id(old_new_root)
247
file_id = self.final_file_id(self._new_root)
248
if old_new_root in self._new_id:
249
self.cancel_versioning(old_new_root)
251
self.unversion_file(old_new_root)
252
# if, at this stage, root still has an old file_id, zap it so we can
253
# stick a new one in.
254
if (self.tree_file_id(self._new_root) is not None and
255
self._new_root not in self._removed_id):
256
self.unversion_file(self._new_root)
257
if file_id is not None:
258
self.version_file(file_id, self._new_root)
260
# Now move children of new root into old root directory.
261
# Ensure all children are registered with the transaction, but don't
262
# use directly-- some tree children have new parents
263
list(self.iter_tree_children(old_new_root))
264
# Move all children of new root into old root directory.
265
for child in self.by_parent().get(old_new_root, []):
266
self.adjust_path(self.final_name(child), self._new_root, child)
268
# Ensure old_new_root has no directory.
269
if old_new_root in self._new_contents:
270
self.cancel_creation(old_new_root)
272
self.delete_contents(old_new_root)
274
# prevent deletion of root directory.
275
if self._new_root in self._removed_contents:
276
self.cancel_deletion(self._new_root)
278
# destroy path info for old_new_root.
279
del self._new_parent[old_new_root]
280
del self._new_name[old_new_root]
282
205
def trans_id_tree_file_id(self, inventory_id):
283
206
"""Determine the transaction id of a working tree file.
395
317
return sorted(FinalPaths(self).get_paths(new_ids))
397
319
def _inventory_altered(self):
398
"""Determine which trans_ids need new Inventory entries.
400
An new entry is needed when anything that would be reflected by an
401
inventory entry changes, including file name, file_id, parent file_id,
402
file kind, and the execute bit.
404
Some care is taken to return entries with real changes, not cases
405
where the value is deleted and then restored to its original value,
406
but some actually unchanged values may be returned.
408
:returns: A list of (path, trans_id) for all items requiring an
409
inventory change. Ordered by path.
412
# Find entries whose file_ids are new (or changed).
413
new_file_id = set(t for t in self._new_id
414
if self._new_id[t] != self.tree_file_id(t))
415
for id_set in [self._new_name, self._new_parent, new_file_id,
320
"""Get the trans_ids and paths of files needing new inv entries."""
322
for id_set in [self._new_name, self._new_parent, self._new_id,
416
323
self._new_executability]:
417
changed_ids.update(id_set)
418
# removing implies a kind change
324
new_ids.update(id_set)
419
325
changed_kind = set(self._removed_contents)
421
326
changed_kind.intersection_update(self._new_contents)
422
# Ignore entries that are already known to have changed.
423
changed_kind.difference_update(changed_ids)
424
# to keep only the truly changed ones
425
changed_kind = (t for t in changed_kind
426
if self.tree_kind(t) != self.final_kind(t))
427
# all kind changes will alter the inventory
428
changed_ids.update(changed_kind)
429
# To find entries with changed parent_ids, find parents which existed,
430
# but changed file_id.
431
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
432
# Now add all their children to the set.
433
for parent_trans_id in new_file_id:
434
changed_ids.update(self.iter_tree_children(parent_trans_id))
435
return sorted(FinalPaths(self).get_paths(changed_ids))
327
changed_kind.difference_update(new_ids)
328
changed_kind = (t for t in changed_kind if self.tree_kind(t) !=
330
new_ids.update(changed_kind)
331
return sorted(FinalPaths(self).get_paths(new_ids))
437
333
def final_kind(self, trans_id):
438
334
"""Determine the final file kind, after any changes applied.
440
:return: None if the file does not exist/has no contents. (It is
441
conceivable that a path would be created without the corresponding
442
contents insertion command)
336
Raises NoSuchFile if the file does not exist/has no contents.
337
(It is conceivable that a path would be created without the
338
corresponding contents insertion command)
444
340
if trans_id in self._new_contents:
445
341
return self._new_contents[trans_id]
446
342
elif trans_id in self._removed_contents:
343
raise NoSuchFile(None)
449
345
return self.tree_kind(trans_id)
572
468
# ensure that all children are registered with the transaction
573
469
list(self.iter_tree_children(parent_id))
575
@deprecated_method(deprecated_in((2, 3, 0)))
576
471
def has_named_child(self, by_parent, parent_id, name):
577
return self._has_named_child(
578
name, parent_id, known_children=by_parent.get(parent_id, []))
580
def _has_named_child(self, name, parent_id, known_children):
581
"""Does a parent already have a name child.
583
:param name: The searched for name.
585
:param parent_id: The parent for which the check is made.
587
:param known_children: The already known children. This should have
588
been recently obtained from `self.by_parent.get(parent_id)`
589
(or will be if None is passed).
591
if known_children is None:
592
known_children = self.by_parent().get(parent_id, [])
593
for child in known_children:
473
children = by_parent[parent_id]
476
for child in children:
594
477
if self.final_name(child) == name:
596
parent_path = self._tree_id_paths.get(parent_id, None)
597
if parent_path is None:
598
# No parent... no children
480
path = self._tree_id_paths[parent_id]
600
child_path = joinpath(parent_path, name)
601
child_id = self._tree_path_ids.get(child_path, None)
483
childpath = joinpath(path, name)
484
child_id = self._tree_path_ids.get(childpath)
602
485
if child_id is None:
603
# Not known by the tree transform yet, check the filesystem
604
return osutils.lexists(self._tree.abspath(child_path))
486
return lexists(self._tree.abspath(childpath))
606
raise AssertionError('child_id is missing: %s, %s, %s'
607
% (name, parent_id, child_id))
609
def _available_backup_name(self, name, target_id):
610
"""Find an available backup name.
612
:param name: The basename of the file.
614
:param target_id: The directory trans_id where the backup should
617
known_children = self.by_parent().get(target_id, [])
618
return osutils.available_backup_name(
620
lambda base: self._has_named_child(
621
base, target_id, known_children))
488
if self.final_parent(child_id) != parent_id:
490
if child_id in self._removed_contents:
491
# XXX What about dangling file-ids?
623
496
def _parent_loops(self):
624
497
"""No entry should be its own ancestor"""
701
581
if (self._new_name, self._new_parent) == ({}, {}):
703
583
for children in by_parent.itervalues():
705
for child_tid in children:
706
name = self.final_name(child_tid)
708
# Keep children only if they still exist in the end
709
if not self._case_sensitive_target:
711
name_ids.append((name, child_tid))
584
name_ids = [(self.final_name(t), t) for t in children]
585
if not self._case_sensitive_target:
586
name_ids = [(n.lower(), t) for n, t in name_ids]
714
589
last_trans_id = None
715
590
for name, trans_id in name_ids:
716
kind = self.final_kind(trans_id)
592
kind = self.final_kind(trans_id)
717
595
file_id = self.final_file_id(trans_id)
718
596
if kind is None and file_id is None:
740
618
def _parent_type_conflicts(self, by_parent):
741
"""Children must have a directory parent"""
619
"""parents must have directory 'contents'."""
743
621
for parent_id, children in by_parent.iteritems():
744
622
if parent_id is ROOT_PARENT:
747
for child_id in children:
748
if self.final_kind(child_id) is not None:
624
if not self._any_contents(children):
753
# There is at least a child, so we need an existing directory to
755
kind = self.final_kind(parent_id)
626
for child in children:
628
self.final_kind(child)
632
kind = self.final_kind(parent_id)
757
# The directory will be deleted
758
636
conflicts.append(('missing parent', parent_id))
759
637
elif kind != "directory":
760
# Meh, we need a *directory* to put something in it
761
638
conflicts.append(('non-directory parent', parent_id))
641
def _any_contents(self, trans_ids):
642
"""Return true if any of the trans_ids, will have contents."""
643
for trans_id in trans_ids:
645
kind = self.final_kind(trans_id)
764
651
def _set_executability(self, path, trans_id):
765
652
"""Set the executability of versioned files """
766
if self._tree._supports_executable():
653
if supports_executable():
767
654
new_executability = self._new_executability[trans_id]
768
655
abspath = self._tree.abspath(path)
769
656
current_mode = os.stat(abspath).st_mode
830
717
self.create_symlink(target, trans_id)
833
def new_orphan(self, trans_id, parent_id):
834
"""Schedule an item to be orphaned.
836
When a directory is about to be removed, its children, if they are not
837
versioned are moved out of the way: they don't have a parent anymore.
839
:param trans_id: The trans_id of the existing item.
840
:param parent_id: The parent trans_id of the item.
842
raise NotImplementedError(self.new_orphan)
844
def _get_potential_orphans(self, dir_id):
845
"""Find the potential orphans in a directory.
847
A directory can't be safely deleted if there are versioned files in it.
848
If all the contained files are unversioned then they can be orphaned.
850
The 'None' return value means that the directory contains at least one
851
versioned file and should not be deleted.
853
:param dir_id: The directory trans id.
855
:return: A list of the orphan trans ids or None if at least one
856
versioned file is present.
859
# Find the potential orphans, stop if one item should be kept
860
for child_tid in self.by_parent()[dir_id]:
861
if child_tid in self._removed_contents:
862
# The child is removed as part of the transform. Since it was
863
# versioned before, it's not an orphan
865
elif self.final_file_id(child_tid) is None:
866
# The child is not versioned
867
orphans.append(child_tid)
869
# We have a versioned file here, searching for orphans is
875
720
def _affected_ids(self):
876
721
"""Return the set of transform ids affected by the transform"""
877
722
trans_ids = set(self._removed_id)
1017
865
return _PreviewTree(self)
1019
def commit(self, branch, message, merge_parents=None, strict=False,
1020
timestamp=None, timezone=None, committer=None, authors=None,
1021
revprops=None, revision_id=None):
867
def commit(self, branch, message, merge_parents=None, strict=False):
1022
868
"""Commit the result of this TreeTransform to a branch.
1024
870
:param branch: The branch to commit to.
1025
871
:param message: The message to attach to the commit.
1026
:param merge_parents: Additional parent revision-ids specified by
1028
:param strict: If True, abort the commit if there are unversioned
1030
:param timestamp: if not None, seconds-since-epoch for the time and
1031
date. (May be a float.)
1032
:param timezone: Optional timezone for timestamp, as an offset in
1034
:param committer: Optional committer in email-id format.
1035
(e.g. "J Random Hacker <jrandom@example.com>")
1036
:param authors: Optional list of authors in email-id format.
1037
:param revprops: Optional dictionary of revision properties.
1038
:param revision_id: Optional revision id. (Specifying a revision-id
1039
may reduce performance for some non-native formats.)
872
:param merge_parents: Additional parents specified by pending merges.
1040
873
:return: The revision_id of the revision committed.
1042
875
self._check_malformed()
1173
1000
class DiskTreeTransform(TreeTransformBase):
1174
1001
"""Tree transform storing its contents on disk."""
1176
def __init__(self, tree, limbodir, pb=None,
1003
def __init__(self, tree, limbodir, pb=DummyProgress(),
1177
1004
case_sensitive=True):
1178
1005
"""Constructor.
1179
1006
:param tree: The tree that will be transformed, but not necessarily
1180
1007
the output tree.
1181
1008
:param limbodir: A directory where new files can be stored until
1182
1009
they are installed in their proper places
1010
:param pb: A ProgressBar indicating how much progress is being made
1184
1011
:param case_sensitive: If True, the target of the transform is
1185
1012
case sensitive, not just case preserving.
1279
1090
entries from _limbo_files, because they are now stale.
1281
1092
for trans_id in trans_ids:
1282
old_path = self._limbo_files[trans_id]
1283
self._possibly_stale_limbo_files.add(old_path)
1284
del self._limbo_files[trans_id]
1093
old_path = self._limbo_files.pop(trans_id)
1285
1094
if trans_id not in self._new_contents:
1287
1096
new_path = self._limbo_name(trans_id)
1288
1097
os.rename(old_path, new_path)
1289
self._possibly_stale_limbo_files.remove(old_path)
1290
for descendant in self._limbo_descendants(trans_id):
1291
desc_path = self._limbo_files[descendant]
1292
desc_path = new_path + desc_path[len(old_path):]
1293
self._limbo_files[descendant] = desc_path
1295
def _limbo_descendants(self, trans_id):
1296
"""Return the set of trans_ids whose limbo paths descend from this."""
1297
descendants = set(self._limbo_children.get(trans_id, []))
1298
for descendant in list(descendants):
1299
descendants.update(self._limbo_descendants(descendant))
1302
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1099
def create_file(self, contents, trans_id, mode_id=None):
1303
1100
"""Schedule creation of a new file.
1307
:param contents: an iterator of strings, all of which will be written
1308
to the target destination.
1309
:param trans_id: TreeTransform handle
1310
:param mode_id: If not None, force the mode of the target file to match
1311
the mode of the object referenced by mode_id.
1312
Otherwise, we will try to preserve mode bits of an existing file.
1313
:param sha1: If the sha1 of this content is already known, pass it in.
1314
We can use it to prevent future sha1 computations.
1104
Contents is an iterator of strings, all of which will be written
1105
to the target destination.
1107
New file takes the permissions of any existing file with that id,
1108
unless mode_id is specified.
1316
1110
name = self._limbo_name(trans_id)
1317
1111
f = open(name, 'wb')
1319
unique_add(self._new_contents, trans_id, 'file')
1114
unique_add(self._new_contents, trans_id, 'file')
1116
# Clean up the file, it never got registered so
1117
# TreeTransform.finalize() won't clean it up.
1320
1122
f.writelines(contents)
1323
self._set_mtime(name)
1324
1125
self._set_mode(trans_id, mode_id, S_ISREG)
1325
# It is unfortunate we have to use lstat instead of fstat, but we just
1326
# used utime and chmod on the file, so we need the accurate final
1328
if sha1 is not None:
1329
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1331
1127
def _read_file_chunks(self, trans_id):
1332
1128
cur_file = open(self._limbo_name(trans_id), 'rb')
1402
1187
del self._limbo_children_names[trans_id]
1403
1188
delete_any(self._limbo_name(trans_id))
1405
def new_orphan(self, trans_id, parent_id):
1406
# FIXME: There is no tree config, so we use the branch one (it's weird
1407
# to define it this way as orphaning can only occur in a working tree,
1408
# but that's all we have (for now). It will find the option in
1409
# locations.conf or bazaar.conf though) -- vila 20100916
1410
conf = self._tree.branch.get_config()
1411
conf_var_name = 'bzr.transform.orphan_policy'
1412
orphan_policy = conf.get_user_option(conf_var_name)
1413
default_policy = orphaning_registry.default_key
1414
if orphan_policy is None:
1415
orphan_policy = default_policy
1416
if orphan_policy not in orphaning_registry:
1417
trace.warning('%s (from %s) is not a known policy, defaulting '
1418
'to %s' % (orphan_policy, conf_var_name, default_policy))
1419
orphan_policy = default_policy
1420
handle_orphan = orphaning_registry.get(orphan_policy)
1421
handle_orphan(self, trans_id, parent_id)
1424
class OrphaningError(errors.BzrError):
1426
# Only bugs could lead to such exception being seen by the user
1427
internal_error = True
1428
_fmt = "Error while orphaning %s in %s directory"
1430
def __init__(self, orphan, parent):
1431
errors.BzrError.__init__(self)
1432
self.orphan = orphan
1433
self.parent = parent
1436
class OrphaningForbidden(OrphaningError):
1438
_fmt = "Policy: %s doesn't allow creating orphans."
1440
def __init__(self, policy):
1441
errors.BzrError.__init__(self)
1442
self.policy = policy
1445
def move_orphan(tt, orphan_id, parent_id):
1446
"""See TreeTransformBase.new_orphan.
1448
This creates a new orphan in the `bzr-orphans` dir at the root of the
1451
:param tt: The TreeTransform orphaning `trans_id`.
1453
:param orphan_id: The trans id that should be orphaned.
1455
:param parent_id: The orphan parent trans id.
1457
# Add the orphan dir if it doesn't exist
1458
orphan_dir_basename = 'bzr-orphans'
1459
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1460
if tt.final_kind(od_id) is None:
1461
tt.create_directory(od_id)
1462
parent_path = tt._tree_id_paths[parent_id]
1463
# Find a name that doesn't exist yet in the orphan dir
1464
actual_name = tt.final_name(orphan_id)
1465
new_name = tt._available_backup_name(actual_name, od_id)
1466
tt.adjust_path(new_name, od_id, orphan_id)
1467
trace.warning('%s has been orphaned in %s'
1468
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1471
def refuse_orphan(tt, orphan_id, parent_id):
1472
"""See TreeTransformBase.new_orphan.
1474
This refuses to create orphan, letting the caller handle the conflict.
1476
raise OrphaningForbidden('never')
1479
orphaning_registry = registry.Registry()
1480
orphaning_registry.register(
1481
'conflict', refuse_orphan,
1482
'Leave orphans in place and create a conflict on the directory.')
1483
orphaning_registry.register(
1484
'move', move_orphan,
1485
'Move orphans into the bzr-orphans directory.')
1486
orphaning_registry._set_default_key('conflict')
1489
1191
class TreeTransform(DiskTreeTransform):
1490
1192
"""Represent a tree transformation.
1829
1536
tree_paths = list(self._tree_path_ids.iteritems())
1830
1537
tree_paths.sort(reverse=True)
1831
child_pb = ui.ui_factory.nested_progress_bar()
1538
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1833
for num, (path, trans_id) in enumerate(tree_paths):
1834
# do not attempt to move root into a subdirectory of itself.
1837
child_pb.update(gettext('removing file'), num, len(tree_paths))
1540
for num, data in enumerate(tree_paths):
1541
path, trans_id = data
1542
child_pb.update('removing file', num, len(tree_paths))
1838
1543
full_path = self._tree.abspath(path)
1839
1544
if trans_id in self._removed_contents:
1840
delete_path = os.path.join(self._deletiondir, trans_id)
1841
mover.pre_delete(full_path, delete_path)
1842
elif (trans_id in self._new_name
1843
or trans_id in self._new_parent):
1545
mover.pre_delete(full_path, os.path.join(self._deletiondir,
1547
elif trans_id in self._new_name or trans_id in \
1845
1550
mover.rename(full_path, self._limbo_name(trans_id))
1846
except errors.TransformRenameFailed, e:
1847
1552
if e.errno != errno.ENOENT:
1865
1570
modified_paths = []
1866
1571
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1868
child_pb = ui.ui_factory.nested_progress_bar()
1573
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1870
1575
for num, (path, trans_id) in enumerate(new_paths):
1871
1576
if (num % 10) == 0:
1872
child_pb.update(gettext('adding file'), num, len(new_paths))
1577
child_pb.update('adding file', num, len(new_paths))
1873
1578
full_path = self._tree.abspath(path)
1874
1579
if trans_id in self._needs_rename:
1876
1581
mover.rename(self._limbo_name(trans_id), full_path)
1877
except errors.TransformRenameFailed, e:
1878
1583
# We may be renaming a dangling inventory id
1879
1584
if e.errno != errno.ENOENT:
1882
1587
self.rename_count += 1
1883
# TODO: if trans_id in self._observed_sha1s, we should
1884
# re-stat the final target, since ctime will be
1885
# updated by the change.
1886
1588
if (trans_id in self._new_contents or
1887
1589
self.path_changed(trans_id)):
1888
1590
if trans_id in self._new_contents:
1889
1591
modified_paths.append(full_path)
1890
1592
if trans_id in self._new_executability:
1891
1593
self._set_executability(path, trans_id)
1892
if trans_id in self._observed_sha1s:
1893
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1894
st = osutils.lstat(full_path)
1895
self._observed_sha1s[trans_id] = (o_sha1, st)
1897
1595
child_pb.finished()
1898
for path, trans_id in new_paths:
1899
# new_paths includes stuff like workingtree conflicts. Only the
1900
# stuff in new_contents actually comes from limbo.
1901
if trans_id in self._limbo_files:
1902
del self._limbo_files[trans_id]
1903
1596
self._new_contents.clear()
1904
1597
return modified_paths
1906
def _apply_observed_sha1s(self):
1907
"""After we have finished renaming everything, update observed sha1s
1909
This has to be done after self._tree.apply_inventory_delta, otherwise
1910
it doesn't know anything about the files we are updating. Also, we want
1911
to do this as late as possible, so that most entries end up cached.
1913
# TODO: this doesn't update the stat information for directories. So
1914
# the first 'bzr status' will still need to rewrite
1915
# .bzr/checkout/dirstate. However, we at least don't need to
1916
# re-read all of the files.
1917
# TODO: If the operation took a while, we could do a time.sleep(3) here
1918
# to allow the clock to tick over and ensure we won't have any
1919
# problems. (we could observe start time, and finish time, and if
1920
# it is less than eg 10% overhead, add a sleep call.)
1921
paths = FinalPaths(self)
1922
for trans_id, observed in self._observed_sha1s.iteritems():
1923
path = paths.get_path(trans_id)
1924
# We could get the file_id, but dirstate prefers to use the path
1925
# anyway, and it is 'cheaper' to determine.
1926
# file_id = self._new_id[trans_id]
1927
self._tree._observed_sha1(None, path, observed)
1930
1600
class TransformPreview(DiskTreeTransform):
1931
1601
"""A TreeTransform for generating preview trees.
2015
1680
yield self._get_repository().revision_tree(revision_id)
2017
1682
def _get_file_revision(self, file_id, vf, tree_revision):
2018
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
1683
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
2019
1684
self._iter_parent_trees()]
2020
1685
vf.add_lines((file_id, tree_revision), parent_keys,
2021
self.get_file_lines(file_id))
1686
self.get_file(file_id).readlines())
2022
1687
repo = self._get_repository()
2023
1688
base_vf = repo.texts
2024
1689
if base_vf not in vf.fallback_versionedfiles:
2025
1690
vf.fallback_versionedfiles.append(base_vf)
2026
1691
return tree_revision
2028
def _stat_limbo_file(self, file_id=None, trans_id=None):
2029
if trans_id is None:
2030
trans_id = self._transform.trans_id_file_id(file_id)
1693
def _stat_limbo_file(self, file_id):
1694
trans_id = self._transform.trans_id_file_id(file_id)
2031
1695
name = self._transform._limbo_name(trans_id)
2032
1696
return os.lstat(name)
2249
1911
def get_file_size(self, file_id):
2250
1912
"""See Tree.get_file_size"""
2251
trans_id = self._transform.trans_id_file_id(file_id)
2252
kind = self._transform.final_kind(trans_id)
2255
if trans_id in self._transform._new_contents:
2256
return self._stat_limbo_file(trans_id=trans_id).st_size
2257
1913
if self.kind(file_id) == 'file':
2258
1914
return self._transform._tree.get_file_size(file_id)
2262
def get_file_verifier(self, file_id, path=None, stat_value=None):
2263
trans_id = self._transform.trans_id_file_id(file_id)
2264
kind = self._transform._new_contents.get(trans_id)
2266
return self._transform._tree.get_file_verifier(file_id)
2268
fileobj = self.get_file(file_id)
2270
return ("SHA1", sha_file(fileobj))
2274
1918
def get_file_sha1(self, file_id, path=None, stat_value=None):
2275
1919
trans_id = self._transform.trans_id_file_id(file_id)
2276
1920
kind = self._transform._new_contents.get(trans_id)
2564
2203
pp.next_phase()
2565
2204
file_trans_id[wt.get_root_id()] = \
2566
2205
tt.trans_id_tree_file_id(wt.get_root_id())
2567
pb = ui.ui_factory.nested_progress_bar()
2206
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2569
2208
deferred_contents = []
2571
total = len(tree.all_file_ids())
2210
total = len(tree.inventory)
2572
2211
if delta_from_tree:
2573
2212
precomputed_delta = []
2575
2214
precomputed_delta = None
2576
# Check if tree inventory has content. If so, we populate
2577
# existing_files with the directory content. If there are no
2578
# entries we skip populating existing_files as its not used.
2579
# This improves performance and unncessary work on large
2580
# directory trees. (#501307)
2582
existing_files = set()
2583
for dir, files in wt.walkdirs():
2584
existing_files.update(f[0] for f in files)
2585
2215
for num, (tree_path, entry) in \
2586
enumerate(tree.iter_entries_by_dir()):
2587
pb.update(gettext("Building tree"), num - len(deferred_contents), total)
2216
enumerate(tree.inventory.iter_entries_by_dir()):
2217
pb.update("Building tree", num - len(deferred_contents), total)
2588
2218
if entry.parent_id is None:
2590
2220
reparent = False
2660
2290
new_desired_files = desired_files
2662
2292
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2663
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2664
in iter if not (c or e[0] != e[1])]
2665
if accelerator_tree.supports_content_filtering():
2666
unchanged = [(f, p) for (f, p) in unchanged
2667
if not accelerator_tree.iter_search_rules([p]).next()]
2668
unchanged = dict(unchanged)
2293
unchanged = dict((f, p[1]) for (f, p, c, v, d, n, k, e)
2294
in iter if not (c or e[0] != e[1]))
2669
2295
new_desired_files = []
2671
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2297
for file_id, (trans_id, tree_path) in desired_files:
2672
2298
accelerator_path = unchanged.get(file_id)
2673
2299
if accelerator_path is None:
2674
new_desired_files.append((file_id,
2675
(trans_id, tree_path, text_sha1)))
2300
new_desired_files.append((file_id, (trans_id, tree_path)))
2677
pb.update(gettext('Adding file contents'), count + offset, total)
2302
pb.update('Adding file contents', count + offset, total)
2679
2304
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2696
2321
offset += count
2697
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2322
for count, ((trans_id, tree_path), contents) in enumerate(
2698
2323
tree.iter_files_bytes(new_desired_files)):
2699
2324
if wt.supports_content_filtering():
2700
2325
filters = wt._content_filter_stack(tree_path)
2701
2326
contents = filtered_output_bytes(contents, filters,
2702
2327
ContentFilterContext(tree_path, tree))
2703
tt.create_file(contents, trans_id, sha1=text_sha1)
2704
pb.update(gettext('Adding file contents'), count + offset, total)
2328
tt.create_file(contents, trans_id)
2329
pb.update('Adding file contents', count + offset, total)
2707
2332
def _reparent_children(tt, old_parent, new_parent):
2708
2333
for child in tt.iter_tree_children(old_parent):
2709
2334
tt.adjust_path(tt.final_name(child), new_parent, child)
2712
2336
def _reparent_transform_children(tt, old_parent, new_parent):
2713
2337
by_parent = tt.by_parent()
2714
2338
for child in by_parent[old_parent]:
2715
2339
tt.adjust_path(tt.final_name(child), new_parent, child)
2716
2340
return by_parent[old_parent]
2719
2342
def _content_match(tree, entry, file_id, kind, target_path):
2720
2343
if entry.kind != kind:
2722
2345
if entry.kind == "directory":
2724
2347
if entry.kind == "file":
2725
f = file(target_path, 'rb')
2727
if tree.get_file_text(file_id) == f.read():
2348
if tree.get_file(file_id).read() == file(target_path, 'rb').read():
2731
2350
elif entry.kind == "symlink":
2732
2351
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2785
2404
raise errors.BadFileKindError(name, kind)
2407
@deprecated_function(deprecated_in((1, 9, 0)))
2408
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
2409
"""Create new file contents according to an inventory entry.
2411
DEPRECATED. Use create_from_tree instead.
2413
if entry.kind == "file":
2415
lines = tree.get_file(entry.file_id).readlines()
2416
tt.create_file(lines, trans_id, mode_id=mode_id)
2417
elif entry.kind == "symlink":
2418
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
2419
elif entry.kind == "directory":
2420
tt.create_directory(trans_id)
2788
2423
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2789
2424
filter_tree_path=None):
2790
2425
"""Create new file contents according to tree contents.
2839
2472
return new_name
2475
def _entry_changes(file_id, entry, working_tree):
2476
"""Determine in which ways the inventory entry has changed.
2478
Returns booleans: has_contents, content_mod, meta_mod
2479
has_contents means there are currently contents, but they differ
2480
contents_mod means contents need to be modified
2481
meta_mod means the metadata needs to be modified
2483
cur_entry = working_tree.inventory[file_id]
2485
working_kind = working_tree.kind(file_id)
2488
has_contents = False
2491
if has_contents is True:
2492
if entry.kind != working_kind:
2493
contents_mod, meta_mod = True, False
2495
cur_entry._read_tree_state(working_tree.id2path(file_id),
2497
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2498
cur_entry._forget_tree_state()
2499
return has_contents, contents_mod, meta_mod
2842
2502
def revert(working_tree, target_tree, filenames, backups=False,
2843
pb=None, change_reporter=None):
2503
pb=DummyProgress(), change_reporter=None):
2844
2504
"""Revert a working tree's contents to those of a target tree."""
2845
2505
target_tree.lock_read()
2846
pb = ui.ui_factory.nested_progress_bar()
2847
2506
tt = TreeTransform(working_tree, pb)
2849
2508
pp = ProgressPhase("Revert phase", 3, pb)
2905
2562
deferred_files = []
2906
2563
for id_num, (file_id, path, changed_content, versioned, parent, name,
2907
2564
kind, executable) in enumerate(change_list):
2908
target_path, wt_path = path
2909
target_versioned, wt_versioned = versioned
2910
target_parent, wt_parent = parent
2911
target_name, wt_name = name
2912
target_kind, wt_kind = kind
2913
target_executable, wt_executable = executable
2914
if skip_root and wt_parent is None:
2565
if skip_root and file_id[0] is not None and parent[0] is None:
2916
2567
trans_id = tt.trans_id_file_id(file_id)
2918
2569
if changed_content:
2919
2570
keep_content = False
2920
if wt_kind == 'file' and (backups or target_kind is None):
2571
if kind[0] == 'file' and (backups or kind[1] is None):
2921
2572
wt_sha1 = working_tree.get_file_sha1(file_id)
2922
2573
if merge_modified.get(file_id) != wt_sha1:
2923
2574
# acquire the basis tree lazily to prevent the
2926
2577
if basis_tree is None:
2927
2578
basis_tree = working_tree.basis_tree()
2928
2579
basis_tree.lock_read()
2929
if basis_tree.has_id(file_id):
2580
if file_id in basis_tree:
2930
2581
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2931
2582
keep_content = True
2932
elif target_kind is None and not target_versioned:
2583
elif kind[1] is None and not versioned[1]:
2933
2584
keep_content = True
2934
if wt_kind is not None:
2585
if kind[0] is not None:
2935
2586
if not keep_content:
2936
2587
tt.delete_contents(trans_id)
2937
elif target_kind is not None:
2938
parent_trans_id = tt.trans_id_file_id(wt_parent)
2939
backup_name = tt._available_backup_name(
2940
wt_name, parent_trans_id)
2588
elif kind[1] is not None:
2589
parent_trans_id = tt.trans_id_file_id(parent[0])
2590
by_parent = tt.by_parent()
2591
backup_name = _get_backup_name(name[0], by_parent,
2592
parent_trans_id, tt)
2941
2593
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2942
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2943
if wt_versioned and target_versioned:
2594
new_trans_id = tt.create_path(name[0], parent_trans_id)
2595
if versioned == (True, True):
2944
2596
tt.unversion_file(trans_id)
2945
2597
tt.version_file(file_id, new_trans_id)
2946
2598
# New contents should have the same unix perms as old
2948
2600
mode_id = trans_id
2949
2601
trans_id = new_trans_id
2950
if target_kind in ('directory', 'tree-reference'):
2602
if kind[1] in ('directory', 'tree-reference'):
2951
2603
tt.create_directory(trans_id)
2952
if target_kind == 'tree-reference':
2604
if kind[1] == 'tree-reference':
2953
2605
revision = target_tree.get_reference_revision(file_id,
2955
2607
tt.set_tree_reference(revision, trans_id)
2956
elif target_kind == 'symlink':
2608
elif kind[1] == 'symlink':
2957
2609
tt.create_symlink(target_tree.get_symlink_target(file_id),
2959
elif target_kind == 'file':
2611
elif kind[1] == 'file':
2960
2612
deferred_files.append((file_id, (trans_id, mode_id)))
2961
2613
if basis_tree is None:
2962
2614
basis_tree = working_tree.basis_tree()
2963
2615
basis_tree.lock_read()
2964
2616
new_sha1 = target_tree.get_file_sha1(file_id)
2965
if (basis_tree.has_id(file_id) and
2966
new_sha1 == basis_tree.get_file_sha1(file_id)):
2617
if (file_id in basis_tree and new_sha1 ==
2618
basis_tree.get_file_sha1(file_id)):
2967
2619
if file_id in merge_modified:
2968
2620
del merge_modified[file_id]
2970
2622
merge_modified[file_id] = new_sha1
2972
2624
# preserve the execute bit when backing up
2973
if keep_content and wt_executable == target_executable:
2974
tt.set_executability(target_executable, trans_id)
2975
elif target_kind is not None:
2976
raise AssertionError(target_kind)
2977
if not wt_versioned and target_versioned:
2625
if keep_content and executable[0] == executable[1]:
2626
tt.set_executability(executable[1], trans_id)
2627
elif kind[1] is not None:
2628
raise AssertionError(kind[1])
2629
if versioned == (False, True):
2978
2630
tt.version_file(file_id, trans_id)
2979
if wt_versioned and not target_versioned:
2631
if versioned == (True, False):
2980
2632
tt.unversion_file(trans_id)
2981
if (target_name is not None and
2982
(wt_name != target_name or wt_parent != target_parent)):
2983
if target_name == '' and target_parent is None:
2633
if (name[1] is not None and
2634
(name[0] != name[1] or parent[0] != parent[1])):
2635
if name[1] == '' and parent[1] is None:
2984
2636
parent_trans = ROOT_PARENT
2986
parent_trans = tt.trans_id_file_id(target_parent)
2987
if wt_parent is None and wt_versioned:
2988
tt.adjust_root_path(target_name, parent_trans)
2990
tt.adjust_path(target_name, parent_trans, trans_id)
2991
if wt_executable != target_executable and target_kind == "file":
2992
tt.set_executability(target_executable, trans_id)
2638
parent_trans = tt.trans_id_file_id(parent[1])
2639
tt.adjust_path(name[1], parent_trans, trans_id)
2640
if executable[0] != executable[1] and kind[1] == "file":
2641
tt.set_executability(executable[1], trans_id)
2993
2642
if working_tree.supports_content_filtering():
2994
2643
for index, ((trans_id, mode_id), bytes) in enumerate(
2995
2644
target_tree.iter_files_bytes(deferred_files)):
3006
2655
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3007
2656
deferred_files):
3008
2657
tt.create_file(bytes, trans_id, mode_id)
3009
tt.fixup_new_roots()
3011
2659
if basis_tree is not None:
3012
2660
basis_tree.unlock()
3013
2661
return merge_modified
3016
def resolve_conflicts(tt, pb=None, pass_func=None):
2664
def resolve_conflicts(tt, pb=DummyProgress(), pass_func=None):
3017
2665
"""Make many conflict-resolution attempts, but die if they fail"""
3018
2666
if pass_func is None:
3019
2667
pass_func = conflict_pass
3020
2668
new_conflicts = set()
3021
pb = ui.ui_factory.nested_progress_bar()
3023
2670
for n in range(10):
3024
pb.update(gettext('Resolution pass'), n+1, 10)
2671
pb.update('Resolution pass', n+1, 10)
3025
2672
conflicts = tt.find_conflicts()
3026
2673
if len(conflicts) == 0:
3027
2674
return new_conflicts
3028
2675
new_conflicts.update(pass_func(tt, conflicts))
3029
2676
raise MalformedTransform(conflicts=conflicts)
3034
2681
def conflict_pass(tt, conflicts, path_tree=None):
3067
2714
elif c_type == 'missing parent':
3068
2715
trans_id = conflict[1]
3069
if trans_id in tt._removed_contents:
3070
cancel_deletion = True
3071
orphans = tt._get_potential_orphans(trans_id)
3073
cancel_deletion = False
3074
# All children are orphans
3077
tt.new_orphan(o, trans_id)
3078
except OrphaningError:
3079
# Something bad happened so we cancel the directory
3080
# deletion which will leave it in place with a
3081
# conflict. The user can deal with it from there.
3082
# Note that this also catch the case where we don't
3083
# want to create orphans and leave the directory in
3085
cancel_deletion = True
3088
# Cancel the directory deletion
3089
tt.cancel_deletion(trans_id)
3090
new_conflicts.add(('deleting parent', 'Not deleting',
2717
tt.cancel_deletion(trans_id)
2718
new_conflicts.add(('deleting parent', 'Not deleting',
3095
2723
tt.final_name(trans_id)
3156
2784
modified_path = fp.get_path(conflict[2])
3157
2785
modified_id = tt.final_file_id(conflict[2])
3158
2786
if len(conflict) == 3:
3159
yield conflicts.Conflict.factory(
3160
c_type, action=action, path=modified_path, file_id=modified_id)
2787
yield Conflict.factory(c_type, action=action, path=modified_path,
2788
file_id=modified_id)
3163
2791
conflicting_path = fp.get_path(conflict[3])
3164
2792
conflicting_id = tt.final_file_id(conflict[3])
3165
yield conflicts.Conflict.factory(
3166
c_type, action=action, path=modified_path,
3167
file_id=modified_id,
3168
conflict_path=conflicting_path,
3169
conflict_file_id=conflicting_id)
2793
yield Conflict.factory(c_type, action=action, path=modified_path,
2794
file_id=modified_id,
2795
conflict_path=conflicting_path,
2796
conflict_file_id=conflicting_id)
3172
2799
class _FileMover(object):
3177
2804
self.pending_deletions = []
3179
2806
def rename(self, from_, to):
3180
"""Rename a file from one path to another."""
2807
"""Rename a file from one path to another. Functions like os.rename"""
3182
2809
os.rename(from_, to)
3183
2810
except OSError, e:
3184
2811
if e.errno in (errno.EEXIST, errno.ENOTEMPTY):
3185
2812
raise errors.FileExists(to, str(e))
3186
# normal OSError doesn't include filenames so it's hard to see where
3187
# the problem is, see https://bugs.launchpad.net/bzr/+bug/491763
3188
raise errors.TransformRenameFailed(from_, to, str(e), e.errno)
3189
2814
self.past_renames.append((from_, to))
3191
2816
def pre_delete(self, from_, to):