23
25
from inventory import Inventory
24
26
from trace import mutter, note
25
from tree import Tree, EmptyTree, RevisionTree
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
26
28
from inventory import InventoryEntry, Inventory
27
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, chomp, \
28
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
29
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
30
32
from store import ImmutableStore
31
33
from revision import Revision
32
34
from errors import bailout, BzrError
33
35
from textui import show_status
36
from diff import diff_trees
35
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
36
39
## TODO: Maybe include checks for common corruption of newlines, etc?
40
def find_branch(f, **args):
41
if f and (f.startswith('http://') or f.startswith('https://')):
43
return remotebranch.RemoteBranch(f, **args)
45
return Branch(f, **args)
48
43
def find_branch_root(f=None):
49
44
"""Find the branch root enclosing f, or pwd.
51
f may be a filename or a URL.
53
46
It is not necessary that f exists.
55
48
Basically we keep looking up until we find the control directory or
80
70
######################################################################
84
74
"""Branch holding a history of revisions.
87
Base directory of the branch.
76
TODO: Perhaps use different stores for different classes of object,
77
so that we can keep track of how much space each one uses,
78
or garbage-collect them.
80
TODO: Add a RemoteBranch subclass. For the basic case of read-only
81
HTTP access this should be very easy by,
82
just redirecting controlfile access into HTTP requests.
83
We would need a RemoteStore working similarly.
85
TODO: Keep the on-disk branch locked while the object exists.
92
def __init__(self, base, init=False, find_root=True, lock_mode='w'):
89
def __init__(self, base, init=False, find_root=True):
93
90
"""Create new branch object at a particular location.
95
92
base -- Base directory for the branch.
130
126
__repr__ = __str__
134
def lock(self, mode='w'):
135
"""Lock the on-disk branch, excluding other processes."""
141
om = os.O_WRONLY | os.O_CREAT
146
raise BzrError("invalid locking mode %r" % mode)
149
lockfile = os.open(self.controlfilename('branch-lock'), om)
151
if e.errno == errno.ENOENT:
152
# might not exist on branches from <0.0.4
153
self.controlfile('branch-lock', 'w').close()
154
lockfile = os.open(self.controlfilename('branch-lock'), om)
158
fcntl.lockf(lockfile, lm)
160
fcntl.lockf(lockfile, fcntl.LOCK_UN)
162
self._lockmode = None
164
self._lockmode = mode
166
warning("please write a locking method for platform %r" % sys.platform)
168
self._lockmode = None
170
self._lockmode = mode
173
def _need_readlock(self):
174
if self._lockmode not in ['r', 'w']:
175
raise BzrError('need read lock on branch, only have %r' % self._lockmode)
177
def _need_writelock(self):
178
if self._lockmode not in ['w']:
179
raise BzrError('need write lock on branch, only have %r' % self._lockmode)
182
129
def abspath(self, name):
183
130
"""Return absolute filename for something in the branch"""
184
131
return os.path.join(self.base, name)
321
262
TODO: Adding a directory should optionally recurse down and
322
263
add all non-ignored children. Perhaps do that in a
323
264
higher-level method.
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
325
self._need_writelock()
327
289
# TODO: Re-adding a file that is removed in the working copy
328
290
# should probably put it back with the previous ID.
329
291
if isinstance(files, types.StringTypes):
330
assert(ids is None or isinstance(ids, types.StringTypes))
336
ids = [None] * len(files)
338
assert(len(ids) == len(files))
340
294
inv = self.read_working_inventory()
341
for f,file_id in zip(files, ids):
342
296
if is_control_file(f):
343
297
bailout("cannot add control file %s" % quotefn(f))
451
415
return self.working_tree().unknowns()
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
454
588
def append_revision(self, revision_id):
455
589
mutter("add {%s} to revision-history" % revision_id)
456
590
rev_history = self.revision_history()
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
604
770
def rename_one(self, from_rel, to_rel):
607
This can change the directory or the filename or both.
609
self._need_writelock()
610
771
tree = self.working_tree()
611
772
inv = tree.inventory
612
773
if not tree.has_filename(from_rel):
875
def show_status(self, show_all=False):
876
"""Display single-line status for non-ignored working files.
878
The list is show sorted in order by file name.
880
>>> b = ScratchBranch(files=['foo', 'foo~'])
886
>>> b.commit("add foo")
888
>>> os.unlink(b.abspath('foo'))
892
TODO: Get state for single files.
895
# We have to build everything into a list first so that it can
896
# sorted by name, incorporating all the different sources.
898
# FIXME: Rather than getting things in random order and then sorting,
899
# just step through in order.
901
# Interesting case: the old ID for a file has been removed,
902
# but a new file has been created under that name.
904
old = self.basis_tree()
905
new = self.working_tree()
907
for fs, fid, oldname, newname, kind in diff_trees(old, new):
909
show_status(fs, kind,
910
oldname + ' => ' + newname)
911
elif fs == 'A' or fs == 'M':
912
show_status(fs, kind, newname)
914
show_status(fs, kind, oldname)
917
show_status(fs, kind, newname)
920
show_status(fs, kind, newname)
922
show_status(fs, kind, newname)
924
bailout("weird file state %r" % ((fs, fid),))
716
928
class ScratchBranch(Branch):
717
929
"""Special test class: a branch that cleans up after itself.