23
25
from inventory import Inventory
24
26
from trace import mutter, note
25
from tree import Tree, EmptyTree, RevisionTree
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
26
28
from inventory import InventoryEntry, Inventory
27
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
28
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
29
joinpath, sha_file, sha_string, file_kind, local_time_offset, appendpath
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
30
32
from store import ImmutableStore
31
33
from revision import Revision
32
from errors import BzrError
34
from errors import bailout, BzrError
33
35
from textui import show_status
36
from diff import diff_trees
35
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
36
39
## TODO: Maybe include checks for common corruption of newlines, etc?
40
def find_branch(f, **args):
41
if f and (f.startswith('http://') or f.startswith('https://')):
43
return remotebranch.RemoteBranch(f, **args)
45
return Branch(f, **args)
49
def _relpath(base, path):
50
"""Return path relative to base, or raise exception.
52
The path may be either an absolute path or a path relative to the
53
current working directory.
55
Lifted out of Branch.relpath for ease of testing.
57
os.path.commonprefix (python2.4) has a bad bug that it works just
58
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
59
avoids that problem."""
60
rp = os.path.abspath(path)
64
while len(head) >= len(base):
67
head, tail = os.path.split(head)
71
from errors import NotBranchError
72
raise NotBranchError("path %r is not within branch %r" % (rp, base))
77
43
def find_branch_root(f=None):
78
44
"""Find the branch root enclosing f, or pwd.
80
f may be a filename or a URL.
82
46
It is not necessary that f exists.
84
48
Basically we keep looking up until we find the control directory or
104
65
raise BzrError('%r is not in a branch' % orig_f)
107
class DivergedBranches(Exception):
108
def __init__(self, branch1, branch2):
109
self.branch1 = branch1
110
self.branch2 = branch2
111
Exception.__init__(self, "These branches have diverged.")
114
class NoSuchRevision(BzrError):
115
def __init__(self, branch, revision):
117
self.revision = revision
118
msg = "Branch %s has no revision %d" % (branch, revision)
119
BzrError.__init__(self, msg)
122
70
######################################################################
125
class Branch(object):
126
74
"""Branch holding a history of revisions.
129
Base directory of the branch.
135
If _lock_mode is true, a positive count of the number of times the
139
Lock object from bzrlib.lock.
76
TODO: Perhaps use different stores for different classes of object,
77
so that we can keep track of how much space each one uses,
78
or garbage-collect them.
80
TODO: Add a RemoteBranch subclass. For the basic case of read-only
81
HTTP access this should be very easy by,
82
just redirecting controlfile access into HTTP requests.
83
We would need a RemoteStore working similarly.
85
TODO: Keep the on-disk branch locked while the object exists.
146
89
def __init__(self, base, init=False, find_root=True):
147
90
"""Create new branch object at a particular location.
285
179
os.mkdir(self.controlfilename([]))
286
180
self.controlfile('README', 'w').write(
287
181
"This is a Bazaar-NG control directory.\n"
288
"Do not change any files in this directory.\n")
182
"Do not change any files in this directory.")
289
183
self.controlfile('branch-format', 'w').write(BZR_BRANCH_FORMAT)
290
184
for d in ('text-store', 'inventory-store', 'revision-store'):
291
185
os.mkdir(self.controlfilename(d))
292
186
for f in ('revision-history', 'merged-patches',
293
'pending-merged-patches', 'branch-name',
187
'pending-merged-patches', 'branch-name'):
295
188
self.controlfile(f, 'w').write('')
296
189
mutter('created control directory in ' + self.base)
297
190
Inventory().write_xml(self.controlfile('inventory','w'))
363
251
This puts the files in the Added state, so that they will be
364
252
recorded by the next commit.
367
List of paths to add, relative to the base of the tree.
370
If set, use these instead of automatically generated ids.
371
Must be the same length as the list of files, but may
372
contain None for ids that are to be autogenerated.
374
254
TODO: Perhaps have an option to add the ids even if the files do
377
257
TODO: Perhaps return the ids of the files? But then again it
378
is easy to retrieve them if they're needed.
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
380
262
TODO: Adding a directory should optionally recurse down and
381
add all non-ignored children. Perhaps do that in a
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
384
289
# TODO: Re-adding a file that is removed in the working copy
385
290
# should probably put it back with the previous ID.
386
291
if isinstance(files, types.StringTypes):
387
assert(ids is None or isinstance(ids, types.StringTypes))
393
ids = [None] * len(files)
395
assert(len(ids) == len(files))
399
inv = self.read_working_inventory()
400
for f,file_id in zip(files, ids):
401
if is_control_file(f):
402
raise BzrError("cannot add control file %s" % quotefn(f))
407
raise BzrError("cannot add top-level %r" % f)
409
fullpath = os.path.normpath(self.abspath(f))
412
kind = file_kind(fullpath)
414
# maybe something better?
415
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
417
if kind != 'file' and kind != 'directory':
418
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
421
file_id = gen_file_id(f)
422
inv.add_path(f, kind=kind, file_id=file_id)
425
show_status('A', kind, quotefn(f))
427
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
429
self._write_inventory(inv)
294
inv = self.read_working_inventory()
296
if is_control_file(f):
297
bailout("cannot add control file %s" % quotefn(f))
302
bailout("cannot add top-level %r" % f)
304
fullpath = os.path.normpath(self.abspath(f))
307
kind = file_kind(fullpath)
309
# maybe something better?
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
312
if kind != 'file' and kind != 'directory':
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
315
file_id = gen_file_id(f)
316
inv.add_path(f, kind=kind, file_id=file_id)
319
show_status('A', kind, quotefn(f))
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
323
self._write_inventory(inv)
434
326
def print_file(self, file, revno):
435
327
"""Print `file` to stdout."""
438
tree = self.revision_tree(self.lookup_revision(revno))
439
# use inventory as it was in that revision
440
file_id = tree.inventory.path2id(file)
442
raise BzrError("%r is not present in revision %d" % (file, revno))
443
tree.print_file(file_id)
328
tree = self.revision_tree(self.lookup_revision(revno))
329
# use inventory as it was in that revision
330
file_id = tree.inventory.path2id(file)
332
bailout("%r is not present in revision %d" % (file, revno))
333
tree.print_file(file_id)
448
336
def remove(self, files, verbose=False):
449
337
"""Mark nominated files for removal from the inventory.
462
372
## TODO: Normalize names
463
373
## TODO: Remove nested loops; better scalability
464
375
if isinstance(files, types.StringTypes):
470
tree = self.working_tree()
473
# do this before any modifications
477
raise BzrError("cannot remove unversioned file %s" % quotefn(f))
478
mutter("remove inventory entry %s {%s}" % (quotefn(f), fid))
480
# having remove it, it must be either ignored or unknown
481
if tree.is_ignored(f):
485
show_status(new_status, inv[fid].kind, quotefn(f))
488
self._write_inventory(inv)
493
# FIXME: this doesn't need to be a branch method
494
def set_inventory(self, new_inventory_list):
496
for path, file_id, parent, kind in new_inventory_list:
497
name = os.path.basename(path)
500
inv.add(InventoryEntry(file_id, name, kind, parent))
378
tree = self.working_tree()
381
# do this before any modifications
385
bailout("cannot remove unversioned file %s" % quotefn(f))
386
mutter("remove inventory entry %s {%s}" % (quotefn(f), fid))
388
# having remove it, it must be either ignored or unknown
389
if tree.is_ignored(f):
393
show_status(new_status, inv[fid].kind, quotefn(f))
501
396
self._write_inventory(inv)
520
415
return self.working_tree().unknowns()
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
523
588
def append_revision(self, revision_id):
524
589
mutter("add {%s} to revision-history" % revision_id)
525
590
rev_history = self.revision_history()
542
607
def get_revision(self, revision_id):
543
608
"""Return the Revision object for a named revision"""
544
if not revision_id or not isinstance(revision_id, basestring):
545
raise ValueError('invalid revision-id: %r' % revision_id)
546
609
r = Revision.read_xml(self.revision_store[revision_id])
547
610
assert r.revision_id == revision_id
550
def get_revision_sha1(self, revision_id):
551
"""Hash the stored value of a revision, and return it."""
552
# In the future, revision entries will be signed. At that
553
# point, it is probably best *not* to include the signature
554
# in the revision hash. Because that lets you re-sign
555
# the revision, (add signatures/remove signatures) and still
556
# have all hash pointers stay consistent.
557
# But for now, just hash the contents.
558
return sha_file(self.revision_store[revision_id])
561
614
def get_inventory(self, inventory_id):
562
615
"""Get Inventory object by hash.
587
635
>>> ScratchBranch().revision_history()
592
return [l.rstrip('\r\n') for l in
593
self.controlfile('revision-history', 'r').readlines()]
598
def common_ancestor(self, other, self_revno=None, other_revno=None):
601
>>> sb = ScratchBranch(files=['foo', 'foo~'])
602
>>> sb.common_ancestor(sb) == (None, None)
604
>>> commit.commit(sb, "Committing first revision", verbose=False)
605
>>> sb.common_ancestor(sb)[0]
607
>>> clone = sb.clone()
608
>>> commit.commit(sb, "Committing second revision", verbose=False)
609
>>> sb.common_ancestor(sb)[0]
611
>>> sb.common_ancestor(clone)[0]
613
>>> commit.commit(clone, "Committing divergent second revision",
615
>>> sb.common_ancestor(clone)[0]
617
>>> sb.common_ancestor(clone) == clone.common_ancestor(sb)
619
>>> sb.common_ancestor(sb) != clone.common_ancestor(clone)
621
>>> clone2 = sb.clone()
622
>>> sb.common_ancestor(clone2)[0]
624
>>> sb.common_ancestor(clone2, self_revno=1)[0]
626
>>> sb.common_ancestor(clone2, other_revno=1)[0]
629
my_history = self.revision_history()
630
other_history = other.revision_history()
631
if self_revno is None:
632
self_revno = len(my_history)
633
if other_revno is None:
634
other_revno = len(other_history)
635
indices = range(min((self_revno, other_revno)))
638
if my_history[r] == other_history[r]:
639
return r+1, my_history[r]
642
def enum_history(self, direction):
643
"""Return (revno, revision_id) for history of branch.
646
'forward' is from earliest to latest
647
'reverse' is from latest to earliest
649
rh = self.revision_history()
650
if direction == 'forward':
655
elif direction == 'reverse':
661
raise ValueError('invalid history direction', direction)
638
return [l.rstrip('\r\n') for l in self.controlfile('revision-history', 'r').readlines()]
667
644
That is equivalent to the number of revisions committed to
647
>>> b = ScratchBranch()
650
>>> b.commit('no foo')
670
654
return len(self.revision_history())
673
657
def last_patch(self):
674
658
"""Return last patch hash, or None if no history.
660
>>> ScratchBranch().last_patch() == None
676
663
ph = self.revision_history()
683
def missing_revisions(self, other, stop_revision=None):
685
If self and other have not diverged, return a list of the revisions
686
present in other, but missing from self.
688
>>> from bzrlib.commit import commit
689
>>> bzrlib.trace.silent = True
690
>>> br1 = ScratchBranch()
691
>>> br2 = ScratchBranch()
692
>>> br1.missing_revisions(br2)
694
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
695
>>> br1.missing_revisions(br2)
697
>>> br2.missing_revisions(br1)
699
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
700
>>> br1.missing_revisions(br2)
702
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
703
>>> br1.missing_revisions(br2)
705
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
706
>>> br1.missing_revisions(br2)
707
Traceback (most recent call last):
708
DivergedBranches: These branches have diverged.
710
self_history = self.revision_history()
711
self_len = len(self_history)
712
other_history = other.revision_history()
713
other_len = len(other_history)
714
common_index = min(self_len, other_len) -1
715
if common_index >= 0 and \
716
self_history[common_index] != other_history[common_index]:
717
raise DivergedBranches(self, other)
719
if stop_revision is None:
720
stop_revision = other_len
721
elif stop_revision > other_len:
722
raise NoSuchRevision(self, stop_revision)
724
return other_history[self_len:stop_revision]
727
def update_revisions(self, other, stop_revision=None):
728
"""Pull in all new revisions from other branch.
730
>>> from bzrlib.commit import commit
731
>>> bzrlib.trace.silent = True
732
>>> br1 = ScratchBranch(files=['foo', 'bar'])
735
>>> commit(br1, "lala!", rev_id="REVISION-ID-1", verbose=False)
736
>>> br2 = ScratchBranch()
737
>>> br2.update_revisions(br1)
741
>>> br2.revision_history()
743
>>> br2.update_revisions(br1)
747
>>> br1.text_store.total_size() == br2.text_store.total_size()
750
from bzrlib.progress import ProgressBar
754
pb.update('comparing histories')
755
revision_ids = self.missing_revisions(other, stop_revision)
757
needed_texts = sets.Set()
759
for rev_id in revision_ids:
761
pb.update('fetching revision', i, len(revision_ids))
762
rev = other.get_revision(rev_id)
763
revisions.append(rev)
764
inv = other.get_inventory(str(rev.inventory_id))
765
for key, entry in inv.iter_entries():
766
if entry.text_id is None:
768
if entry.text_id not in self.text_store:
769
needed_texts.add(entry.text_id)
773
count = self.text_store.copy_multi(other.text_store, needed_texts)
774
print "Added %d texts." % count
775
inventory_ids = [ f.inventory_id for f in revisions ]
776
count = self.inventory_store.copy_multi(other.inventory_store,
778
print "Added %d inventories." % count
779
revision_ids = [ f.revision_id for f in revisions]
780
count = self.revision_store.copy_multi(other.revision_store,
782
for revision_id in revision_ids:
783
self.append_revision(revision_id)
784
print "Added %d revisions." % count
787
def commit(self, *args, **kw):
789
from bzrlib.commit import commit
790
commit(self, *args, **kw)
793
670
def lookup_revision(self, revno):
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
838
770
def rename_one(self, from_rel, to_rel):
839
771
"""Rename one file.
841
773
This can change the directory or the filename or both.
775
tree = self.working_tree()
777
if not tree.has_filename(from_rel):
778
bailout("can't rename: old working file %r does not exist" % from_rel)
779
if tree.has_filename(to_rel):
780
bailout("can't rename: new working file %r already exists" % to_rel)
782
file_id = inv.path2id(from_rel)
784
bailout("can't rename: old name %r is not versioned" % from_rel)
786
if inv.path2id(to_rel):
787
bailout("can't rename: new name %r is already versioned" % to_rel)
789
to_dir, to_tail = os.path.split(to_rel)
790
to_dir_id = inv.path2id(to_dir)
791
if to_dir_id == None and to_dir != '':
792
bailout("can't determine destination directory id for %r" % to_dir)
794
mutter("rename_one:")
795
mutter(" file_id {%s}" % file_id)
796
mutter(" from_rel %r" % from_rel)
797
mutter(" to_rel %r" % to_rel)
798
mutter(" to_dir %r" % to_dir)
799
mutter(" to_dir_id {%s}" % to_dir_id)
801
inv.rename(file_id, to_dir_id, to_tail)
803
print "%s => %s" % (from_rel, to_rel)
805
from_abs = self.abspath(from_rel)
806
to_abs = self.abspath(to_rel)
845
tree = self.working_tree()
847
if not tree.has_filename(from_rel):
848
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
849
if tree.has_filename(to_rel):
850
raise BzrError("can't rename: new working file %r already exists" % to_rel)
852
file_id = inv.path2id(from_rel)
854
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
856
if inv.path2id(to_rel):
857
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
859
to_dir, to_tail = os.path.split(to_rel)
860
to_dir_id = inv.path2id(to_dir)
861
if to_dir_id == None and to_dir != '':
862
raise BzrError("can't determine destination directory id for %r" % to_dir)
864
mutter("rename_one:")
865
mutter(" file_id {%s}" % file_id)
866
mutter(" from_rel %r" % from_rel)
867
mutter(" to_rel %r" % to_rel)
868
mutter(" to_dir %r" % to_dir)
869
mutter(" to_dir_id {%s}" % to_dir_id)
871
inv.rename(file_id, to_dir_id, to_tail)
873
print "%s => %s" % (from_rel, to_rel)
875
from_abs = self.abspath(from_rel)
876
to_abs = self.abspath(to_rel)
878
os.rename(from_abs, to_abs)
880
raise BzrError("failed to rename %r to %r: %s"
881
% (from_abs, to_abs, e[1]),
882
["rename rolled back"])
884
self._write_inventory(inv)
808
os.rename(from_abs, to_abs)
810
bailout("failed to rename %r to %r: %s"
811
% (from_abs, to_abs, e[1]),
812
["rename rolled back"])
814
self._write_inventory(inv)
889
818
def move(self, from_paths, to_name):
897
826
Note that to_name is only the last component of the new name;
898
827
this doesn't change the directory.
902
## TODO: Option to move IDs only
903
assert not isinstance(from_paths, basestring)
904
tree = self.working_tree()
906
to_abs = self.abspath(to_name)
907
if not isdir(to_abs):
908
raise BzrError("destination %r is not a directory" % to_abs)
909
if not tree.has_filename(to_name):
910
raise BzrError("destination %r not in working directory" % to_abs)
911
to_dir_id = inv.path2id(to_name)
912
if to_dir_id == None and to_name != '':
913
raise BzrError("destination %r is not a versioned directory" % to_name)
914
to_dir_ie = inv[to_dir_id]
915
if to_dir_ie.kind not in ('directory', 'root_directory'):
916
raise BzrError("destination %r is not a directory" % to_abs)
918
to_idpath = inv.get_idpath(to_dir_id)
921
if not tree.has_filename(f):
922
raise BzrError("%r does not exist in working tree" % f)
923
f_id = inv.path2id(f)
925
raise BzrError("%r is not versioned" % f)
926
name_tail = splitpath(f)[-1]
927
dest_path = appendpath(to_name, name_tail)
928
if tree.has_filename(dest_path):
929
raise BzrError("destination %r already exists" % dest_path)
930
if f_id in to_idpath:
931
raise BzrError("can't move %r to a subdirectory of itself" % f)
933
# OK, so there's a race here, it's possible that someone will
934
# create a file in this interval and then the rename might be
935
# left half-done. But we should have caught most problems.
938
name_tail = splitpath(f)[-1]
939
dest_path = appendpath(to_name, name_tail)
940
print "%s => %s" % (f, dest_path)
941
inv.rename(inv.path2id(f), to_dir_id, name_tail)
943
os.rename(self.abspath(f), self.abspath(dest_path))
945
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
946
["rename rolled back"])
948
self._write_inventory(inv)
829
## TODO: Option to move IDs only
830
assert not isinstance(from_paths, basestring)
831
tree = self.working_tree()
833
to_abs = self.abspath(to_name)
834
if not isdir(to_abs):
835
bailout("destination %r is not a directory" % to_abs)
836
if not tree.has_filename(to_name):
837
bailout("destination %r not in working directory" % to_abs)
838
to_dir_id = inv.path2id(to_name)
839
if to_dir_id == None and to_name != '':
840
bailout("destination %r is not a versioned directory" % to_name)
841
to_dir_ie = inv[to_dir_id]
842
if to_dir_ie.kind not in ('directory', 'root_directory'):
843
bailout("destination %r is not a directory" % to_abs)
845
to_idpath = Set(inv.get_idpath(to_dir_id))
848
if not tree.has_filename(f):
849
bailout("%r does not exist in working tree" % f)
850
f_id = inv.path2id(f)
852
bailout("%r is not versioned" % f)
853
name_tail = splitpath(f)[-1]
854
dest_path = appendpath(to_name, name_tail)
855
if tree.has_filename(dest_path):
856
bailout("destination %r already exists" % dest_path)
857
if f_id in to_idpath:
858
bailout("can't move %r to a subdirectory of itself" % f)
860
# OK, so there's a race here, it's possible that someone will
861
# create a file in this interval and then the rename might be
862
# left half-done. But we should have caught most problems.
865
name_tail = splitpath(f)[-1]
866
dest_path = appendpath(to_name, name_tail)
867
print "%s => %s" % (f, dest_path)
868
inv.rename(inv.path2id(f), to_dir_id, name_tail)
870
os.rename(self.abspath(f), self.abspath(dest_path))
872
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
873
["rename rolled back"])
875
self._write_inventory(inv)
879
def show_status(self, show_all=False):
880
"""Display single-line status for non-ignored working files.
882
The list is show sorted in order by file name.
884
>>> b = ScratchBranch(files=['foo', 'foo~'])
890
>>> b.commit("add foo")
892
>>> os.unlink(b.abspath('foo'))
896
TODO: Get state for single files.
899
# We have to build everything into a list first so that it can
900
# sorted by name, incorporating all the different sources.
902
# FIXME: Rather than getting things in random order and then sorting,
903
# just step through in order.
905
# Interesting case: the old ID for a file has been removed,
906
# but a new file has been created under that name.
908
old = self.basis_tree()
909
new = self.working_tree()
911
for fs, fid, oldname, newname, kind in diff_trees(old, new):
913
show_status(fs, kind,
914
oldname + ' => ' + newname)
915
elif fs == 'A' or fs == 'M':
916
show_status(fs, kind, newname)
918
show_status(fs, kind, oldname)
921
show_status(fs, kind, newname)
924
show_status(fs, kind, newname)
926
show_status(fs, kind, newname)
928
bailout("weird file state %r" % ((fs, fid),))
954
932
class ScratchBranch(Branch):
981
955
file(os.path.join(self.base, f), 'w').write('content of %s' % f)
986
>>> orig = ScratchBranch(files=["file1", "file2"])
987
>>> clone = orig.clone()
988
>>> os.path.samefile(orig.base, clone.base)
990
>>> os.path.isfile(os.path.join(clone.base, "file1"))
993
base = tempfile.mkdtemp()
995
shutil.copytree(self.base, base, symlinks=True)
996
return ScratchBranch(base=base)
998
958
def __del__(self):
1002
959
"""Destroy the test branch, removing the scratch directory."""
1005
mutter("delete ScratchBranch %s" % self.base)
1006
shutil.rmtree(self.base)
961
shutil.rmtree(self.base)
1008
963
# Work around for shutil.rmtree failing on Windows when
1009
964
# readonly files are encountered
1010
mutter("hit exception in destroying ScratchBranch: %s" % e)
1011
965
for root, dirs, files in os.walk(self.base, topdown=False):
1012
966
for name in files:
1013
967
os.chmod(os.path.join(root, name), 0700)
1014
968
shutil.rmtree(self.base)