15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
import sys, os, os.path, random, time, sha, sets, types, re, shutil, tempfile
21
import traceback, socket, fnmatch, difflib, time
22
from binascii import hexlify
21
from bzrlib.trace import mutter, note
22
from bzrlib.osutils import isdir, quotefn, compact_date, rand_bytes, splitpath, \
23
sha_file, appendpath, file_kind
24
from bzrlib.errors import BzrError
25
from inventory import Inventory
26
from trace import mutter, note
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
28
from inventory import InventoryEntry, Inventory
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
32
from store import ImmutableStore
33
from revision import Revision
34
from errors import bailout, BzrError
35
from textui import show_status
36
from diff import diff_trees
26
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
27
39
## TODO: Maybe include checks for common corruption of newlines, etc?
31
def find_branch(f, **args):
32
if f and (f.startswith('http://') or f.startswith('https://')):
34
return remotebranch.RemoteBranch(f, **args)
36
return Branch(f, **args)
39
def find_cached_branch(f, cache_root, **args):
40
from remotebranch import RemoteBranch
41
br = find_branch(f, **args)
42
def cacheify(br, store_name):
43
from meta_store import CachedStore
44
cache_path = os.path.join(cache_root, store_name)
46
new_store = CachedStore(getattr(br, store_name), cache_path)
47
setattr(br, store_name, new_store)
49
if isinstance(br, RemoteBranch):
50
cacheify(br, 'inventory_store')
51
cacheify(br, 'text_store')
52
cacheify(br, 'revision_store')
56
def _relpath(base, path):
57
"""Return path relative to base, or raise exception.
59
The path may be either an absolute path or a path relative to the
60
current working directory.
62
Lifted out of Branch.relpath for ease of testing.
64
os.path.commonprefix (python2.4) has a bad bug that it works just
65
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
66
avoids that problem."""
67
rp = os.path.abspath(path)
71
while len(head) >= len(base):
74
head, tail = os.path.split(head)
78
from errors import NotBranchError
79
raise NotBranchError("path %r is not within branch %r" % (rp, base))
84
43
def find_branch_root(f=None):
85
44
"""Find the branch root enclosing f, or pwd.
87
f may be a filename or a URL.
89
46
It is not necessary that f exists.
91
48
Basically we keep looking up until we find the control directory or
111
65
raise BzrError('%r is not in a branch' % orig_f)
114
class DivergedBranches(Exception):
115
def __init__(self, branch1, branch2):
116
self.branch1 = branch1
117
self.branch2 = branch2
118
Exception.__init__(self, "These branches have diverged.")
121
class NoSuchRevision(BzrError):
122
def __init__(self, branch, revision):
124
self.revision = revision
125
msg = "Branch %s has no revision %d" % (branch, revision)
126
BzrError.__init__(self, msg)
129
70
######################################################################
132
class Branch(object):
133
74
"""Branch holding a history of revisions.
136
Base directory of the branch.
142
If _lock_mode is true, a positive count of the number of times the
146
Lock object from bzrlib.lock.
76
TODO: Perhaps use different stores for different classes of object,
77
so that we can keep track of how much space each one uses,
78
or garbage-collect them.
80
TODO: Add a RemoteBranch subclass. For the basic case of read-only
81
HTTP access this should be very easy by,
82
just redirecting controlfile access into HTTP requests.
83
We would need a RemoteStore working similarly.
85
TODO: Keep the on-disk branch locked while the object exists.
153
89
def __init__(self, base, init=False, find_root=True):
154
90
"""Create new branch object at a particular location.
323
204
fmt = self.controlfile('branch-format', 'r').read()
324
205
fmt.replace('\r\n', '')
325
206
if fmt != BZR_BRANCH_FORMAT:
326
raise BzrError('sorry, branch format %r not supported' % fmt,
327
['use a different bzr version',
328
'or remove the .bzr directory and "bzr init" again'])
207
bailout('sorry, branch format %r not supported' % fmt,
208
['use a different bzr version',
209
'or remove the .bzr directory and "bzr init" again'])
332
212
def read_working_inventory(self):
333
213
"""Read the working inventory."""
334
from bzrlib.inventory import Inventory
335
from bzrlib.xml import unpack_xml
336
from time import time
340
# ElementTree does its own conversion from UTF-8, so open in
342
inv = unpack_xml(Inventory,
343
self.controlfile('inventory', 'rb'))
344
mutter("loaded inventory of %d items in %f"
345
% (len(inv), time() - before))
215
# ElementTree does its own conversion from UTF-8, so open in
217
inv = Inventory.read_xml(self.controlfile('inventory', 'rb'))
218
mutter("loaded inventory of %d items in %f"
219
% (len(inv), time.time() - before))
351
223
def _write_inventory(self, inv):
352
224
"""Update the working inventory.
383
251
This puts the files in the Added state, so that they will be
384
252
recorded by the next commit.
387
List of paths to add, relative to the base of the tree.
390
If set, use these instead of automatically generated ids.
391
Must be the same length as the list of files, but may
392
contain None for ids that are to be autogenerated.
394
254
TODO: Perhaps have an option to add the ids even if the files do
397
257
TODO: Perhaps return the ids of the files? But then again it
398
is easy to retrieve them if they're needed.
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
400
262
TODO: Adding a directory should optionally recurse down and
401
add all non-ignored children. Perhaps do that in a
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
404
from bzrlib.textui import show_status
405
289
# TODO: Re-adding a file that is removed in the working copy
406
290
# should probably put it back with the previous ID.
407
if isinstance(files, basestring):
408
assert(ids is None or isinstance(ids, basestring))
291
if isinstance(files, types.StringTypes):
414
ids = [None] * len(files)
416
assert(len(ids) == len(files))
420
inv = self.read_working_inventory()
421
for f,file_id in zip(files, ids):
422
if is_control_file(f):
423
raise BzrError("cannot add control file %s" % quotefn(f))
428
raise BzrError("cannot add top-level %r" % f)
430
fullpath = os.path.normpath(self.abspath(f))
433
kind = file_kind(fullpath)
435
# maybe something better?
436
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
438
if kind != 'file' and kind != 'directory':
439
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
442
file_id = gen_file_id(f)
443
inv.add_path(f, kind=kind, file_id=file_id)
446
print 'added', quotefn(f)
448
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
450
self._write_inventory(inv)
294
inv = self.read_working_inventory()
296
if is_control_file(f):
297
bailout("cannot add control file %s" % quotefn(f))
302
bailout("cannot add top-level %r" % f)
304
fullpath = os.path.normpath(self.abspath(f))
307
kind = file_kind(fullpath)
309
# maybe something better?
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
312
if kind != 'file' and kind != 'directory':
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
315
file_id = gen_file_id(f)
316
inv.add_path(f, kind=kind, file_id=file_id)
319
show_status('A', kind, quotefn(f))
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
323
self._write_inventory(inv)
455
326
def print_file(self, file, revno):
456
327
"""Print `file` to stdout."""
459
tree = self.revision_tree(self.lookup_revision(revno))
460
# use inventory as it was in that revision
461
file_id = tree.inventory.path2id(file)
463
raise BzrError("%r is not present in revision %d" % (file, revno))
464
tree.print_file(file_id)
328
tree = self.revision_tree(self.lookup_revision(revno))
329
# use inventory as it was in that revision
330
file_id = tree.inventory.path2id(file)
332
bailout("%r is not present in revision %d" % (file, revno))
333
tree.print_file(file_id)
469
336
def remove(self, files, verbose=False):
470
337
"""Mark nominated files for removal from the inventory.
480
369
is the opposite of add. Removing it is consistent with most
481
370
other tools. Maybe an option.
483
from bzrlib.textui import show_status
484
372
## TODO: Normalize names
485
373
## TODO: Remove nested loops; better scalability
486
if isinstance(files, basestring):
375
if isinstance(files, types.StringTypes):
492
tree = self.working_tree()
495
# do this before any modifications
499
raise BzrError("cannot remove unversioned file %s" % quotefn(f))
500
mutter("remove inventory entry %s {%s}" % (quotefn(f), fid))
502
# having remove it, it must be either ignored or unknown
503
if tree.is_ignored(f):
507
show_status(new_status, inv[fid].kind, quotefn(f))
510
self._write_inventory(inv)
515
# FIXME: this doesn't need to be a branch method
516
def set_inventory(self, new_inventory_list):
517
from bzrlib.inventory import Inventory, InventoryEntry
519
for path, file_id, parent, kind in new_inventory_list:
520
name = os.path.basename(path)
523
inv.add(InventoryEntry(file_id, name, kind, parent))
378
tree = self.working_tree()
381
# do this before any modifications
385
bailout("cannot remove unversioned file %s" % quotefn(f))
386
mutter("remove inventory entry %s {%s}" % (quotefn(f), fid))
388
# having remove it, it must be either ignored or unknown
389
if tree.is_ignored(f):
393
show_status(new_status, inv[fid].kind, quotefn(f))
524
396
self._write_inventory(inv)
543
415
return self.working_tree().unknowns()
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
546
588
def append_revision(self, revision_id):
547
from bzrlib.atomicfile import AtomicFile
549
589
mutter("add {%s} to revision-history" % revision_id)
550
rev_history = self.revision_history() + [revision_id]
552
f = AtomicFile(self.controlfilename('revision-history'))
554
for rev_id in rev_history:
590
rev_history = self.revision_history()
592
tmprhname = self.controlfilename('revision-history.tmp')
593
rhname = self.controlfilename('revision-history')
595
f = file(tmprhname, 'wt')
596
rev_history.append(revision_id)
597
f.write('\n'.join(rev_history))
601
if sys.platform == 'win32':
603
os.rename(tmprhname, rhname)
561
607
def get_revision(self, revision_id):
562
608
"""Return the Revision object for a named revision"""
563
from bzrlib.revision import Revision
564
from bzrlib.xml import unpack_xml
568
if not revision_id or not isinstance(revision_id, basestring):
569
raise ValueError('invalid revision-id: %r' % revision_id)
570
r = unpack_xml(Revision, self.revision_store[revision_id])
609
r = Revision.read_xml(self.revision_store[revision_id])
574
610
assert r.revision_id == revision_id
578
def get_revision_sha1(self, revision_id):
579
"""Hash the stored value of a revision, and return it."""
580
# In the future, revision entries will be signed. At that
581
# point, it is probably best *not* to include the signature
582
# in the revision hash. Because that lets you re-sign
583
# the revision, (add signatures/remove signatures) and still
584
# have all hash pointers stay consistent.
585
# But for now, just hash the contents.
586
return sha_file(self.revision_store[revision_id])
589
614
def get_inventory(self, inventory_id):
619
635
>>> ScratchBranch().revision_history()
624
return [l.rstrip('\r\n') for l in
625
self.controlfile('revision-history', 'r').readlines()]
630
def common_ancestor(self, other, self_revno=None, other_revno=None):
633
>>> sb = ScratchBranch(files=['foo', 'foo~'])
634
>>> sb.common_ancestor(sb) == (None, None)
636
>>> commit.commit(sb, "Committing first revision", verbose=False)
637
>>> sb.common_ancestor(sb)[0]
639
>>> clone = sb.clone()
640
>>> commit.commit(sb, "Committing second revision", verbose=False)
641
>>> sb.common_ancestor(sb)[0]
643
>>> sb.common_ancestor(clone)[0]
645
>>> commit.commit(clone, "Committing divergent second revision",
647
>>> sb.common_ancestor(clone)[0]
649
>>> sb.common_ancestor(clone) == clone.common_ancestor(sb)
651
>>> sb.common_ancestor(sb) != clone.common_ancestor(clone)
653
>>> clone2 = sb.clone()
654
>>> sb.common_ancestor(clone2)[0]
656
>>> sb.common_ancestor(clone2, self_revno=1)[0]
658
>>> sb.common_ancestor(clone2, other_revno=1)[0]
661
my_history = self.revision_history()
662
other_history = other.revision_history()
663
if self_revno is None:
664
self_revno = len(my_history)
665
if other_revno is None:
666
other_revno = len(other_history)
667
indices = range(min((self_revno, other_revno)))
670
if my_history[r] == other_history[r]:
671
return r+1, my_history[r]
674
def enum_history(self, direction):
675
"""Return (revno, revision_id) for history of branch.
678
'forward' is from earliest to latest
679
'reverse' is from latest to earliest
681
rh = self.revision_history()
682
if direction == 'forward':
687
elif direction == 'reverse':
693
raise ValueError('invalid history direction', direction)
638
return [l.rstrip('\r\n') for l in self.controlfile('revision-history', 'r').readlines()]
699
644
That is equivalent to the number of revisions committed to
647
>>> b = ScratchBranch()
650
>>> b.commit('no foo')
702
654
return len(self.revision_history())
705
657
def last_patch(self):
706
658
"""Return last patch hash, or None if no history.
660
>>> ScratchBranch().last_patch() == None
708
663
ph = self.revision_history()
715
def missing_revisions(self, other, stop_revision=None):
717
If self and other have not diverged, return a list of the revisions
718
present in other, but missing from self.
720
>>> from bzrlib.commit import commit
721
>>> bzrlib.trace.silent = True
722
>>> br1 = ScratchBranch()
723
>>> br2 = ScratchBranch()
724
>>> br1.missing_revisions(br2)
726
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
727
>>> br1.missing_revisions(br2)
729
>>> br2.missing_revisions(br1)
731
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
732
>>> br1.missing_revisions(br2)
734
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
735
>>> br1.missing_revisions(br2)
737
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
738
>>> br1.missing_revisions(br2)
739
Traceback (most recent call last):
740
DivergedBranches: These branches have diverged.
742
self_history = self.revision_history()
743
self_len = len(self_history)
744
other_history = other.revision_history()
745
other_len = len(other_history)
746
common_index = min(self_len, other_len) -1
747
if common_index >= 0 and \
748
self_history[common_index] != other_history[common_index]:
749
raise DivergedBranches(self, other)
751
if stop_revision is None:
752
stop_revision = other_len
753
elif stop_revision > other_len:
754
raise NoSuchRevision(self, stop_revision)
756
return other_history[self_len:stop_revision]
759
def update_revisions(self, other, stop_revision=None):
760
"""Pull in all new revisions from other branch.
762
>>> from bzrlib.commit import commit
763
>>> bzrlib.trace.silent = True
764
>>> br1 = ScratchBranch(files=['foo', 'bar'])
767
>>> commit(br1, "lala!", rev_id="REVISION-ID-1", verbose=False)
768
>>> br2 = ScratchBranch()
769
>>> br2.update_revisions(br1)
773
>>> br2.revision_history()
775
>>> br2.update_revisions(br1)
779
>>> br1.text_store.total_size() == br2.text_store.total_size()
782
from bzrlib.progress import ProgressBar
786
from sets import Set as set
790
pb.update('comparing histories')
791
revision_ids = self.missing_revisions(other, stop_revision)
793
if hasattr(other.revision_store, "prefetch"):
794
other.revision_store.prefetch(revision_ids)
795
if hasattr(other.inventory_store, "prefetch"):
796
inventory_ids = [other.get_revision(r).inventory_id
797
for r in revision_ids]
798
other.inventory_store.prefetch(inventory_ids)
803
for rev_id in revision_ids:
805
pb.update('fetching revision', i, len(revision_ids))
806
rev = other.get_revision(rev_id)
807
revisions.append(rev)
808
inv = other.get_inventory(str(rev.inventory_id))
809
for key, entry in inv.iter_entries():
810
if entry.text_id is None:
812
if entry.text_id not in self.text_store:
813
needed_texts.add(entry.text_id)
817
count = self.text_store.copy_multi(other.text_store, needed_texts)
818
print "Added %d texts." % count
819
inventory_ids = [ f.inventory_id for f in revisions ]
820
count = self.inventory_store.copy_multi(other.inventory_store,
822
print "Added %d inventories." % count
823
revision_ids = [ f.revision_id for f in revisions]
824
count = self.revision_store.copy_multi(other.revision_store,
826
for revision_id in revision_ids:
827
self.append_revision(revision_id)
828
print "Added %d revisions." % count
831
def commit(self, *args, **kw):
832
from bzrlib.commit import commit
833
commit(self, *args, **kw)
836
670
def lookup_revision(self, revno):
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
883
770
def rename_one(self, from_rel, to_rel):
884
771
"""Rename one file.
886
773
This can change the directory or the filename or both.
775
tree = self.working_tree()
777
if not tree.has_filename(from_rel):
778
bailout("can't rename: old working file %r does not exist" % from_rel)
779
if tree.has_filename(to_rel):
780
bailout("can't rename: new working file %r already exists" % to_rel)
782
file_id = inv.path2id(from_rel)
784
bailout("can't rename: old name %r is not versioned" % from_rel)
786
if inv.path2id(to_rel):
787
bailout("can't rename: new name %r is already versioned" % to_rel)
789
to_dir, to_tail = os.path.split(to_rel)
790
to_dir_id = inv.path2id(to_dir)
791
if to_dir_id == None and to_dir != '':
792
bailout("can't determine destination directory id for %r" % to_dir)
794
mutter("rename_one:")
795
mutter(" file_id {%s}" % file_id)
796
mutter(" from_rel %r" % from_rel)
797
mutter(" to_rel %r" % to_rel)
798
mutter(" to_dir %r" % to_dir)
799
mutter(" to_dir_id {%s}" % to_dir_id)
801
inv.rename(file_id, to_dir_id, to_tail)
803
print "%s => %s" % (from_rel, to_rel)
805
from_abs = self.abspath(from_rel)
806
to_abs = self.abspath(to_rel)
890
tree = self.working_tree()
892
if not tree.has_filename(from_rel):
893
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
894
if tree.has_filename(to_rel):
895
raise BzrError("can't rename: new working file %r already exists" % to_rel)
897
file_id = inv.path2id(from_rel)
899
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
901
if inv.path2id(to_rel):
902
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
904
to_dir, to_tail = os.path.split(to_rel)
905
to_dir_id = inv.path2id(to_dir)
906
if to_dir_id == None and to_dir != '':
907
raise BzrError("can't determine destination directory id for %r" % to_dir)
909
mutter("rename_one:")
910
mutter(" file_id {%s}" % file_id)
911
mutter(" from_rel %r" % from_rel)
912
mutter(" to_rel %r" % to_rel)
913
mutter(" to_dir %r" % to_dir)
914
mutter(" to_dir_id {%s}" % to_dir_id)
916
inv.rename(file_id, to_dir_id, to_tail)
918
print "%s => %s" % (from_rel, to_rel)
920
from_abs = self.abspath(from_rel)
921
to_abs = self.abspath(to_rel)
923
os.rename(from_abs, to_abs)
925
raise BzrError("failed to rename %r to %r: %s"
926
% (from_abs, to_abs, e[1]),
927
["rename rolled back"])
929
self._write_inventory(inv)
808
os.rename(from_abs, to_abs)
810
bailout("failed to rename %r to %r: %s"
811
% (from_abs, to_abs, e[1]),
812
["rename rolled back"])
814
self._write_inventory(inv)
934
818
def move(self, from_paths, to_name):
942
826
Note that to_name is only the last component of the new name;
943
827
this doesn't change the directory.
947
## TODO: Option to move IDs only
948
assert not isinstance(from_paths, basestring)
949
tree = self.working_tree()
951
to_abs = self.abspath(to_name)
952
if not isdir(to_abs):
953
raise BzrError("destination %r is not a directory" % to_abs)
954
if not tree.has_filename(to_name):
955
raise BzrError("destination %r not in working directory" % to_abs)
956
to_dir_id = inv.path2id(to_name)
957
if to_dir_id == None and to_name != '':
958
raise BzrError("destination %r is not a versioned directory" % to_name)
959
to_dir_ie = inv[to_dir_id]
960
if to_dir_ie.kind not in ('directory', 'root_directory'):
961
raise BzrError("destination %r is not a directory" % to_abs)
963
to_idpath = inv.get_idpath(to_dir_id)
966
if not tree.has_filename(f):
967
raise BzrError("%r does not exist in working tree" % f)
968
f_id = inv.path2id(f)
970
raise BzrError("%r is not versioned" % f)
971
name_tail = splitpath(f)[-1]
972
dest_path = appendpath(to_name, name_tail)
973
if tree.has_filename(dest_path):
974
raise BzrError("destination %r already exists" % dest_path)
975
if f_id in to_idpath:
976
raise BzrError("can't move %r to a subdirectory of itself" % f)
978
# OK, so there's a race here, it's possible that someone will
979
# create a file in this interval and then the rename might be
980
# left half-done. But we should have caught most problems.
983
name_tail = splitpath(f)[-1]
984
dest_path = appendpath(to_name, name_tail)
985
print "%s => %s" % (f, dest_path)
986
inv.rename(inv.path2id(f), to_dir_id, name_tail)
988
os.rename(self.abspath(f), self.abspath(dest_path))
990
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
991
["rename rolled back"])
993
self._write_inventory(inv)
998
def revert(self, filenames, old_tree=None, backups=True):
999
"""Restore selected files to the versions from a previous tree.
1002
If true (default) backups are made of files before
1005
from bzrlib.errors import NotVersionedError, BzrError
1006
from bzrlib.atomicfile import AtomicFile
1007
from bzrlib.osutils import backup_file
829
## TODO: Option to move IDs only
830
assert not isinstance(from_paths, basestring)
831
tree = self.working_tree()
833
to_abs = self.abspath(to_name)
834
if not isdir(to_abs):
835
bailout("destination %r is not a directory" % to_abs)
836
if not tree.has_filename(to_name):
837
bailout("destination %r not in working directory" % to_abs)
838
to_dir_id = inv.path2id(to_name)
839
if to_dir_id == None and to_name != '':
840
bailout("destination %r is not a versioned directory" % to_name)
841
to_dir_ie = inv[to_dir_id]
842
if to_dir_ie.kind not in ('directory', 'root_directory'):
843
bailout("destination %r is not a directory" % to_abs)
845
to_idpath = Set(inv.get_idpath(to_dir_id))
848
if not tree.has_filename(f):
849
bailout("%r does not exist in working tree" % f)
850
f_id = inv.path2id(f)
852
bailout("%r is not versioned" % f)
853
name_tail = splitpath(f)[-1]
854
dest_path = appendpath(to_name, name_tail)
855
if tree.has_filename(dest_path):
856
bailout("destination %r already exists" % dest_path)
857
if f_id in to_idpath:
858
bailout("can't move %r to a subdirectory of itself" % f)
860
# OK, so there's a race here, it's possible that someone will
861
# create a file in this interval and then the rename might be
862
# left half-done. But we should have caught most problems.
865
name_tail = splitpath(f)[-1]
866
dest_path = appendpath(to_name, name_tail)
867
print "%s => %s" % (f, dest_path)
868
inv.rename(inv.path2id(f), to_dir_id, name_tail)
870
os.rename(self.abspath(f), self.abspath(dest_path))
872
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
873
["rename rolled back"])
875
self._write_inventory(inv)
879
def show_status(self, show_all=False):
880
"""Display single-line status for non-ignored working files.
882
The list is show sorted in order by file name.
884
>>> b = ScratchBranch(files=['foo', 'foo~'])
890
>>> b.commit("add foo")
892
>>> os.unlink(b.abspath('foo'))
1009
inv = self.read_working_inventory()
1010
if old_tree is None:
1011
old_tree = self.basis_tree()
1012
old_inv = old_tree.inventory
1015
for fn in filenames:
1016
file_id = inv.path2id(fn)
1018
raise NotVersionedError("not a versioned file", fn)
1019
if not old_inv.has_id(file_id):
1020
raise BzrError("file not present in old tree", fn, file_id)
1021
nids.append((fn, file_id))
1023
# TODO: Rename back if it was previously at a different location
1025
# TODO: If given a directory, restore the entire contents from
1026
# the previous version.
1028
# TODO: Make a backup to a temporary file.
1030
# TODO: If the file previously didn't exist, delete it?
1031
for fn, file_id in nids:
1034
f = AtomicFile(fn, 'wb')
1036
f.write(old_tree.get_file(file_id).read())
896
TODO: Get state for single files.
899
# We have to build everything into a list first so that it can
900
# sorted by name, incorporating all the different sources.
902
# FIXME: Rather than getting things in random order and then sorting,
903
# just step through in order.
905
# Interesting case: the old ID for a file has been removed,
906
# but a new file has been created under that name.
908
old = self.basis_tree()
909
new = self.working_tree()
911
for fs, fid, oldname, newname, kind in diff_trees(old, new):
913
show_status(fs, kind,
914
oldname + ' => ' + newname)
915
elif fs == 'A' or fs == 'M':
916
show_status(fs, kind, newname)
918
show_status(fs, kind, oldname)
921
show_status(fs, kind, newname)
924
show_status(fs, kind, newname)
926
show_status(fs, kind, newname)
928
bailout("weird file state %r" % ((fs, fid),))
1043
932
class ScratchBranch(Branch):