23
25
from inventory import Inventory
24
26
from trace import mutter, note
25
from tree import Tree, EmptyTree, RevisionTree
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
26
28
from inventory import InventoryEntry, Inventory
27
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, chomp, \
28
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
29
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
30
32
from store import ImmutableStore
31
33
from revision import Revision
32
from errors import BzrError
34
from errors import bailout, BzrError
33
35
from textui import show_status
36
from diff import diff_trees
35
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
36
39
## TODO: Maybe include checks for common corruption of newlines, etc?
40
def find_branch(f, **args):
41
if f and (f.startswith('http://') or f.startswith('https://')):
43
return remotebranch.RemoteBranch(f, **args)
45
return Branch(f, **args)
49
def with_writelock(method):
50
"""Method decorator for functions run with the branch locked."""
52
# called with self set to the branch
55
return method(self, *a, **k)
61
def with_readlock(method):
65
return method(self, *a, **k)
71
def _relpath(base, path):
72
"""Return path relative to base, or raise exception.
74
The path may be either an absolute path or a path relative to the
75
current working directory.
77
Lifted out of Branch.relpath for ease of testing.
79
os.path.commonprefix (python2.4) has a bad bug that it works just
80
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
81
avoids that problem."""
82
rp = os.path.abspath(path)
86
while len(head) >= len(base):
89
head, tail = os.path.split(head)
93
from errors import NotBranchError
94
raise NotBranchError("path %r is not within branch %r" % (rp, base))
99
43
def find_branch_root(f=None):
100
44
"""Find the branch root enclosing f, or pwd.
102
f may be a filename or a URL.
104
46
It is not necessary that f exists.
106
48
Basically we keep looking up until we find the control directory or
360
251
This puts the files in the Added state, so that they will be
361
252
recorded by the next commit.
364
List of paths to add, relative to the base of the tree.
367
If set, use these instead of automatically generated ids.
368
Must be the same length as the list of files, but may
369
contain None for ids that are to be autogenerated.
371
254
TODO: Perhaps have an option to add the ids even if the files do
374
257
TODO: Perhaps return the ids of the files? But then again it
375
is easy to retrieve them if they're needed.
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
377
262
TODO: Adding a directory should optionally recurse down and
378
add all non-ignored children. Perhaps do that in a
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
381
289
# TODO: Re-adding a file that is removed in the working copy
382
290
# should probably put it back with the previous ID.
383
291
if isinstance(files, types.StringTypes):
384
assert(ids is None or isinstance(ids, types.StringTypes))
390
ids = [None] * len(files)
392
assert(len(ids) == len(files))
394
294
inv = self.read_working_inventory()
395
for f,file_id in zip(files, ids):
396
296
if is_control_file(f):
397
raise BzrError("cannot add control file %s" % quotefn(f))
297
bailout("cannot add control file %s" % quotefn(f))
399
299
fp = splitpath(f)
402
raise BzrError("cannot add top-level %r" % f)
302
bailout("cannot add top-level %r" % f)
404
304
fullpath = os.path.normpath(self.abspath(f))
407
307
kind = file_kind(fullpath)
409
309
# maybe something better?
410
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
412
312
if kind != 'file' and kind != 'directory':
413
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
416
file_id = gen_file_id(f)
315
file_id = gen_file_id(f)
417
316
inv.add_path(f, kind=kind, file_id=file_id)
420
319
show_status('A', kind, quotefn(f))
422
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
424
323
self._write_inventory(inv)
427
326
def print_file(self, file, revno):
428
327
"""Print `file` to stdout."""
504
415
return self.working_tree().unknowns()
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
507
588
def append_revision(self, revision_id):
508
589
mutter("add {%s} to revision-history" % revision_id)
509
590
rev_history = self.revision_history()
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
654
770
def rename_one(self, from_rel, to_rel):
655
771
"""Rename one file.
657
773
This can change the directory or the filename or both.
659
775
tree = self.working_tree()
660
776
inv = tree.inventory
661
777
if not tree.has_filename(from_rel):
662
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
778
bailout("can't rename: old working file %r does not exist" % from_rel)
663
779
if tree.has_filename(to_rel):
664
raise BzrError("can't rename: new working file %r already exists" % to_rel)
780
bailout("can't rename: new working file %r already exists" % to_rel)
666
782
file_id = inv.path2id(from_rel)
667
783
if file_id == None:
668
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
784
bailout("can't rename: old name %r is not versioned" % from_rel)
670
786
if inv.path2id(to_rel):
671
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
787
bailout("can't rename: new name %r is already versioned" % to_rel)
673
789
to_dir, to_tail = os.path.split(to_rel)
674
790
to_dir_id = inv.path2id(to_dir)
675
791
if to_dir_id == None and to_dir != '':
676
raise BzrError("can't determine destination directory id for %r" % to_dir)
792
bailout("can't determine destination directory id for %r" % to_dir)
678
794
mutter("rename_one:")
679
795
mutter(" file_id {%s}" % file_id)
717
832
inv = tree.inventory
718
833
to_abs = self.abspath(to_name)
719
834
if not isdir(to_abs):
720
raise BzrError("destination %r is not a directory" % to_abs)
835
bailout("destination %r is not a directory" % to_abs)
721
836
if not tree.has_filename(to_name):
722
raise BzrError("destination %r not in working directory" % to_abs)
837
bailout("destination %r not in working directory" % to_abs)
723
838
to_dir_id = inv.path2id(to_name)
724
839
if to_dir_id == None and to_name != '':
725
raise BzrError("destination %r is not a versioned directory" % to_name)
840
bailout("destination %r is not a versioned directory" % to_name)
726
841
to_dir_ie = inv[to_dir_id]
727
842
if to_dir_ie.kind not in ('directory', 'root_directory'):
728
raise BzrError("destination %r is not a directory" % to_abs)
843
bailout("destination %r is not a directory" % to_abs)
730
to_idpath = inv.get_idpath(to_dir_id)
845
to_idpath = Set(inv.get_idpath(to_dir_id))
732
847
for f in from_paths:
733
848
if not tree.has_filename(f):
734
raise BzrError("%r does not exist in working tree" % f)
849
bailout("%r does not exist in working tree" % f)
735
850
f_id = inv.path2id(f)
737
raise BzrError("%r is not versioned" % f)
852
bailout("%r is not versioned" % f)
738
853
name_tail = splitpath(f)[-1]
739
854
dest_path = appendpath(to_name, name_tail)
740
855
if tree.has_filename(dest_path):
741
raise BzrError("destination %r already exists" % dest_path)
856
bailout("destination %r already exists" % dest_path)
742
857
if f_id in to_idpath:
743
raise BzrError("can't move %r to a subdirectory of itself" % f)
858
bailout("can't move %r to a subdirectory of itself" % f)
745
860
# OK, so there's a race here, it's possible that someone will
746
861
# create a file in this interval and then the rename might be
755
870
os.rename(self.abspath(f), self.abspath(dest_path))
756
871
except OSError, e:
757
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
872
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
758
873
["rename rolled back"])
760
875
self._write_inventory(inv)
879
def show_status(self, show_all=False):
880
"""Display single-line status for non-ignored working files.
882
The list is show sorted in order by file name.
884
>>> b = ScratchBranch(files=['foo', 'foo~'])
890
>>> b.commit("add foo")
892
>>> os.unlink(b.abspath('foo'))
896
TODO: Get state for single files.
899
# We have to build everything into a list first so that it can
900
# sorted by name, incorporating all the different sources.
902
# FIXME: Rather than getting things in random order and then sorting,
903
# just step through in order.
905
# Interesting case: the old ID for a file has been removed,
906
# but a new file has been created under that name.
908
old = self.basis_tree()
909
new = self.working_tree()
911
for fs, fid, oldname, newname, kind in diff_trees(old, new):
913
show_status(fs, kind,
914
oldname + ' => ' + newname)
915
elif fs == 'A' or fs == 'M':
916
show_status(fs, kind, newname)
918
show_status(fs, kind, oldname)
921
show_status(fs, kind, newname)
924
show_status(fs, kind, newname)
926
show_status(fs, kind, newname)
928
bailout("weird file state %r" % ((fs, fid),))
765
932
class ScratchBranch(Branch):
766
933
"""Special test class: a branch that cleans up after itself.