15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
import sys, os, os.path, random, time, sha, sets, types, re, shutil, tempfile
21
import traceback, socket, fnmatch, difflib, time
22
from binascii import hexlify
21
from bzrlib.trace import mutter, note
22
from bzrlib.osutils import isdir, quotefn, compact_date, rand_bytes, splitpath, \
23
sha_file, appendpath, file_kind
24
from bzrlib.errors import BzrError
25
from inventory import Inventory
26
from trace import mutter, note
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
28
from inventory import InventoryEntry, Inventory
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
32
from store import ImmutableStore
33
from revision import Revision
34
from errors import bailout, BzrError
35
from textui import show_status
36
from diff import diff_trees
26
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
27
39
## TODO: Maybe include checks for common corruption of newlines, etc?
31
def find_branch(f, **args):
32
if f and (f.startswith('http://') or f.startswith('https://')):
34
return remotebranch.RemoteBranch(f, **args)
36
return Branch(f, **args)
39
def find_cached_branch(f, cache_root, **args):
40
from remotebranch import RemoteBranch
41
br = find_branch(f, **args)
42
def cacheify(br, store_name):
43
from meta_store import CachedStore
44
cache_path = os.path.join(cache_root, store_name)
46
new_store = CachedStore(getattr(br, store_name), cache_path)
47
setattr(br, store_name, new_store)
49
if isinstance(br, RemoteBranch):
50
cacheify(br, 'inventory_store')
51
cacheify(br, 'text_store')
52
cacheify(br, 'revision_store')
56
def _relpath(base, path):
57
"""Return path relative to base, or raise exception.
59
The path may be either an absolute path or a path relative to the
60
current working directory.
62
Lifted out of Branch.relpath for ease of testing.
64
os.path.commonprefix (python2.4) has a bad bug that it works just
65
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
66
avoids that problem."""
67
rp = os.path.abspath(path)
71
while len(head) >= len(base):
74
head, tail = os.path.split(head)
78
from errors import NotBranchError
79
raise NotBranchError("path %r is not within branch %r" % (rp, base))
84
43
def find_branch_root(f=None):
85
44
"""Find the branch root enclosing f, or pwd.
87
f may be a filename or a URL.
89
46
It is not necessary that f exists.
91
48
Basically we keep looking up until we find the control directory or
329
204
fmt = self.controlfile('branch-format', 'r').read()
330
205
fmt.replace('\r\n', '')
331
206
if fmt != BZR_BRANCH_FORMAT:
332
raise BzrError('sorry, branch format %r not supported' % fmt,
333
['use a different bzr version',
334
'or remove the .bzr directory and "bzr init" again'])
336
def get_root_id(self):
337
"""Return the id of this branches root"""
338
inv = self.read_working_inventory()
339
return inv.root.file_id
341
def set_root_id(self, file_id):
342
inv = self.read_working_inventory()
343
orig_root_id = inv.root.file_id
344
del inv._byid[inv.root.file_id]
345
inv.root.file_id = file_id
346
inv._byid[inv.root.file_id] = inv.root
349
if entry.parent_id in (None, orig_root_id):
350
entry.parent_id = inv.root.file_id
351
self._write_inventory(inv)
207
bailout('sorry, branch format %r not supported' % fmt,
208
['use a different bzr version',
209
'or remove the .bzr directory and "bzr init" again'])
353
212
def read_working_inventory(self):
354
213
"""Read the working inventory."""
355
from bzrlib.inventory import Inventory
356
from bzrlib.xml import unpack_xml
357
from time import time
361
# ElementTree does its own conversion from UTF-8, so open in
363
inv = unpack_xml(Inventory,
364
self.controlfile('inventory', 'rb'))
365
mutter("loaded inventory of %d items in %f"
366
% (len(inv), time() - before))
215
# ElementTree does its own conversion from UTF-8, so open in
217
inv = Inventory.read_xml(self.controlfile('inventory', 'rb'))
218
mutter("loaded inventory of %d items in %f"
219
% (len(inv), time.time() - before))
372
223
def _write_inventory(self, inv):
373
224
"""Update the working inventory.
404
251
This puts the files in the Added state, so that they will be
405
252
recorded by the next commit.
408
List of paths to add, relative to the base of the tree.
411
If set, use these instead of automatically generated ids.
412
Must be the same length as the list of files, but may
413
contain None for ids that are to be autogenerated.
415
254
TODO: Perhaps have an option to add the ids even if the files do
418
257
TODO: Perhaps return the ids of the files? But then again it
419
is easy to retrieve them if they're needed.
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
421
262
TODO: Adding a directory should optionally recurse down and
422
add all non-ignored children. Perhaps do that in a
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
425
from bzrlib.textui import show_status
426
289
# TODO: Re-adding a file that is removed in the working copy
427
290
# should probably put it back with the previous ID.
428
if isinstance(files, basestring):
429
assert(ids is None or isinstance(ids, basestring))
291
if isinstance(files, types.StringTypes):
435
ids = [None] * len(files)
437
assert(len(ids) == len(files))
441
inv = self.read_working_inventory()
442
for f,file_id in zip(files, ids):
443
if is_control_file(f):
444
raise BzrError("cannot add control file %s" % quotefn(f))
449
raise BzrError("cannot add top-level %r" % f)
451
fullpath = os.path.normpath(self.abspath(f))
454
kind = file_kind(fullpath)
456
# maybe something better?
457
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
459
if kind != 'file' and kind != 'directory':
460
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
463
file_id = gen_file_id(f)
464
inv.add_path(f, kind=kind, file_id=file_id)
467
print 'added', quotefn(f)
469
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
471
self._write_inventory(inv)
294
inv = self.read_working_inventory()
296
if is_control_file(f):
297
bailout("cannot add control file %s" % quotefn(f))
302
bailout("cannot add top-level %r" % f)
304
fullpath = os.path.normpath(self.abspath(f))
307
kind = file_kind(fullpath)
309
# maybe something better?
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
312
if kind != 'file' and kind != 'directory':
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
315
file_id = gen_file_id(f)
316
inv.add_path(f, kind=kind, file_id=file_id)
319
show_status('A', kind, quotefn(f))
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
323
self._write_inventory(inv)
476
326
def print_file(self, file, revno):
477
327
"""Print `file` to stdout."""
480
tree = self.revision_tree(self.lookup_revision(revno))
481
# use inventory as it was in that revision
482
file_id = tree.inventory.path2id(file)
484
raise BzrError("%r is not present in revision %s" % (file, revno))
485
tree.print_file(file_id)
328
tree = self.revision_tree(self.lookup_revision(revno))
329
# use inventory as it was in that revision
330
file_id = tree.inventory.path2id(file)
332
bailout("%r is not present in revision %d" % (file, revno))
333
tree.print_file(file_id)
490
336
def remove(self, files, verbose=False):
491
337
"""Mark nominated files for removal from the inventory.
564
415
return self.working_tree().unknowns()
567
def append_revision(self, *revision_ids):
568
from bzrlib.atomicfile import AtomicFile
570
for revision_id in revision_ids:
571
mutter("add {%s} to revision-history" % revision_id)
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
588
def append_revision(self, revision_id):
589
mutter("add {%s} to revision-history" % revision_id)
573
590
rev_history = self.revision_history()
574
rev_history.extend(revision_ids)
576
f = AtomicFile(self.controlfilename('revision-history'))
578
for rev_id in rev_history:
592
tmprhname = self.controlfilename('revision-history.tmp')
593
rhname = self.controlfilename('revision-history')
595
f = file(tmprhname, 'wt')
596
rev_history.append(revision_id)
597
f.write('\n'.join(rev_history))
601
if sys.platform == 'win32':
603
os.rename(tmprhname, rhname)
585
607
def get_revision(self, revision_id):
586
608
"""Return the Revision object for a named revision"""
587
from bzrlib.revision import Revision
588
from bzrlib.xml import unpack_xml
592
if not revision_id or not isinstance(revision_id, basestring):
593
raise ValueError('invalid revision-id: %r' % revision_id)
594
r = unpack_xml(Revision, self.revision_store[revision_id])
609
r = Revision.read_xml(self.revision_store[revision_id])
598
610
assert r.revision_id == revision_id
602
def get_revision_sha1(self, revision_id):
603
"""Hash the stored value of a revision, and return it."""
604
# In the future, revision entries will be signed. At that
605
# point, it is probably best *not* to include the signature
606
# in the revision hash. Because that lets you re-sign
607
# the revision, (add signatures/remove signatures) and still
608
# have all hash pointers stay consistent.
609
# But for now, just hash the contents.
610
return sha_file(self.revision_store[revision_id])
613
614
def get_inventory(self, inventory_id):
645
635
>>> ScratchBranch().revision_history()
650
return [l.rstrip('\r\n') for l in
651
self.controlfile('revision-history', 'r').readlines()]
656
def common_ancestor(self, other, self_revno=None, other_revno=None):
659
>>> sb = ScratchBranch(files=['foo', 'foo~'])
660
>>> sb.common_ancestor(sb) == (None, None)
662
>>> commit.commit(sb, "Committing first revision", verbose=False)
663
>>> sb.common_ancestor(sb)[0]
665
>>> clone = sb.clone()
666
>>> commit.commit(sb, "Committing second revision", verbose=False)
667
>>> sb.common_ancestor(sb)[0]
669
>>> sb.common_ancestor(clone)[0]
671
>>> commit.commit(clone, "Committing divergent second revision",
673
>>> sb.common_ancestor(clone)[0]
675
>>> sb.common_ancestor(clone) == clone.common_ancestor(sb)
677
>>> sb.common_ancestor(sb) != clone.common_ancestor(clone)
679
>>> clone2 = sb.clone()
680
>>> sb.common_ancestor(clone2)[0]
682
>>> sb.common_ancestor(clone2, self_revno=1)[0]
684
>>> sb.common_ancestor(clone2, other_revno=1)[0]
687
my_history = self.revision_history()
688
other_history = other.revision_history()
689
if self_revno is None:
690
self_revno = len(my_history)
691
if other_revno is None:
692
other_revno = len(other_history)
693
indices = range(min((self_revno, other_revno)))
696
if my_history[r] == other_history[r]:
697
return r+1, my_history[r]
700
def enum_history(self, direction):
701
"""Return (revno, revision_id) for history of branch.
704
'forward' is from earliest to latest
705
'reverse' is from latest to earliest
707
rh = self.revision_history()
708
if direction == 'forward':
713
elif direction == 'reverse':
719
raise ValueError('invalid history direction', direction)
638
return [l.rstrip('\r\n') for l in self.controlfile('revision-history', 'r').readlines()]
725
644
That is equivalent to the number of revisions committed to
647
>>> b = ScratchBranch()
650
>>> b.commit('no foo')
728
654
return len(self.revision_history())
731
657
def last_patch(self):
732
658
"""Return last patch hash, or None if no history.
660
>>> ScratchBranch().last_patch() == None
734
663
ph = self.revision_history()
741
def missing_revisions(self, other, stop_revision=None):
743
If self and other have not diverged, return a list of the revisions
744
present in other, but missing from self.
746
>>> from bzrlib.commit import commit
747
>>> bzrlib.trace.silent = True
748
>>> br1 = ScratchBranch()
749
>>> br2 = ScratchBranch()
750
>>> br1.missing_revisions(br2)
752
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
753
>>> br1.missing_revisions(br2)
755
>>> br2.missing_revisions(br1)
757
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
758
>>> br1.missing_revisions(br2)
760
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
761
>>> br1.missing_revisions(br2)
763
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
764
>>> br1.missing_revisions(br2)
765
Traceback (most recent call last):
766
DivergedBranches: These branches have diverged.
768
self_history = self.revision_history()
769
self_len = len(self_history)
770
other_history = other.revision_history()
771
other_len = len(other_history)
772
common_index = min(self_len, other_len) -1
773
if common_index >= 0 and \
774
self_history[common_index] != other_history[common_index]:
775
raise DivergedBranches(self, other)
777
if stop_revision is None:
778
stop_revision = other_len
779
elif stop_revision > other_len:
780
raise NoSuchRevision(self, stop_revision)
782
return other_history[self_len:stop_revision]
785
def update_revisions(self, other, stop_revision=None):
786
"""Pull in all new revisions from other branch.
788
>>> from bzrlib.commit import commit
789
>>> bzrlib.trace.silent = True
790
>>> br1 = ScratchBranch(files=['foo', 'bar'])
793
>>> commit(br1, "lala!", rev_id="REVISION-ID-1", verbose=False)
794
>>> br2 = ScratchBranch()
795
>>> br2.update_revisions(br1)
799
>>> br2.revision_history()
801
>>> br2.update_revisions(br1)
805
>>> br1.text_store.total_size() == br2.text_store.total_size()
808
from bzrlib.progress import ProgressBar
812
from sets import Set as set
816
pb.update('comparing histories')
817
revision_ids = self.missing_revisions(other, stop_revision)
819
if hasattr(other.revision_store, "prefetch"):
820
other.revision_store.prefetch(revision_ids)
821
if hasattr(other.inventory_store, "prefetch"):
822
inventory_ids = [other.get_revision(r).inventory_id
823
for r in revision_ids]
824
other.inventory_store.prefetch(inventory_ids)
829
for rev_id in revision_ids:
831
pb.update('fetching revision', i, len(revision_ids))
832
rev = other.get_revision(rev_id)
833
revisions.append(rev)
834
inv = other.get_inventory(str(rev.inventory_id))
835
for key, entry in inv.iter_entries():
836
if entry.text_id is None:
838
if entry.text_id not in self.text_store:
839
needed_texts.add(entry.text_id)
843
count = self.text_store.copy_multi(other.text_store, needed_texts)
844
print "Added %d texts." % count
845
inventory_ids = [ f.inventory_id for f in revisions ]
846
count = self.inventory_store.copy_multi(other.inventory_store,
848
print "Added %d inventories." % count
849
revision_ids = [ f.revision_id for f in revisions]
850
count = self.revision_store.copy_multi(other.revision_store,
852
for revision_id in revision_ids:
853
self.append_revision(revision_id)
854
print "Added %d revisions." % count
857
def commit(self, *args, **kw):
858
from bzrlib.commit import commit
859
commit(self, *args, **kw)
862
def lookup_revision(self, revision):
863
"""Return the revision identifier for a given revision information."""
864
revno, info = self.get_revision_info(revision)
867
def get_revision_info(self, revision):
868
"""Return (revno, revision id) for revision identifier.
870
revision can be an integer, in which case it is assumed to be revno (though
871
this will translate negative values into positive ones)
872
revision can also be a string, in which case it is parsed for something like
873
'date:' or 'revid:' etc.
878
try:# Convert to int if possible
879
revision = int(revision)
882
revs = self.revision_history()
883
if isinstance(revision, int):
886
# Mabye we should do this first, but we don't need it if revision == 0
888
revno = len(revs) + revision + 1
891
elif isinstance(revision, basestring):
892
for prefix, func in Branch.REVISION_NAMESPACES.iteritems():
893
if revision.startswith(prefix):
894
revno = func(self, revs, revision)
897
raise BzrError('No namespace registered for string: %r' % revision)
899
if revno is None or revno <= 0 or revno > len(revs):
900
raise BzrError("no such revision %s" % revision)
901
return revno, revs[revno-1]
903
def _namespace_revno(self, revs, revision):
904
"""Lookup a revision by revision number"""
905
assert revision.startswith('revno:')
907
return int(revision[6:])
910
REVISION_NAMESPACES['revno:'] = _namespace_revno
912
def _namespace_revid(self, revs, revision):
913
assert revision.startswith('revid:')
915
return revs.index(revision[6:]) + 1
918
REVISION_NAMESPACES['revid:'] = _namespace_revid
920
def _namespace_last(self, revs, revision):
921
assert revision.startswith('last:')
923
offset = int(revision[5:])
928
raise BzrError('You must supply a positive value for --revision last:XXX')
929
return len(revs) - offset + 1
930
REVISION_NAMESPACES['last:'] = _namespace_last
932
def _namespace_tag(self, revs, revision):
933
assert revision.startswith('tag:')
934
raise BzrError('tag: namespace registered, but not implemented.')
935
REVISION_NAMESPACES['tag:'] = _namespace_tag
937
def _namespace_date(self, revs, revision):
938
assert revision.startswith('date:')
940
# Spec for date revisions:
942
# value can be 'yesterday', 'today', 'tomorrow' or a YYYY-MM-DD string.
943
# it can also start with a '+/-/='. '+' says match the first
944
# entry after the given date. '-' is match the first entry before the date
945
# '=' is match the first entry after, but still on the given date.
947
# +2005-05-12 says find the first matching entry after May 12th, 2005 at 0:00
948
# -2005-05-12 says find the first matching entry before May 12th, 2005 at 0:00
949
# =2005-05-12 says find the first match after May 12th, 2005 at 0:00 but before
950
# May 13th, 2005 at 0:00
952
# So the proper way of saying 'give me all entries for today' is:
953
# -r {date:+today}:{date:-tomorrow}
954
# The default is '=' when not supplied
957
if val[:1] in ('+', '-', '='):
958
match_style = val[:1]
961
today = datetime.datetime.today().replace(hour=0,minute=0,second=0,microsecond=0)
962
if val.lower() == 'yesterday':
963
dt = today - datetime.timedelta(days=1)
964
elif val.lower() == 'today':
966
elif val.lower() == 'tomorrow':
967
dt = today + datetime.timedelta(days=1)
970
# This should be done outside the function to avoid recompiling it.
971
_date_re = re.compile(
972
r'(?P<date>(?P<year>\d\d\d\d)-(?P<month>\d\d)-(?P<day>\d\d))?'
974
r'(?P<time>(?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d))?)?'
976
m = _date_re.match(val)
977
if not m or (not m.group('date') and not m.group('time')):
978
raise BzrError('Invalid revision date %r' % revision)
981
year, month, day = int(m.group('year')), int(m.group('month')), int(m.group('day'))
983
year, month, day = today.year, today.month, today.day
985
hour = int(m.group('hour'))
986
minute = int(m.group('minute'))
987
if m.group('second'):
988
second = int(m.group('second'))
992
hour, minute, second = 0,0,0
994
dt = datetime.datetime(year=year, month=month, day=day,
995
hour=hour, minute=minute, second=second)
999
if match_style == '-':
1001
elif match_style == '=':
1002
last = dt + datetime.timedelta(days=1)
1005
for i in range(len(revs)-1, -1, -1):
1006
r = self.get_revision(revs[i])
1007
# TODO: Handle timezone.
1008
dt = datetime.datetime.fromtimestamp(r.timestamp)
1009
if first >= dt and (last is None or dt >= last):
1012
for i in range(len(revs)):
1013
r = self.get_revision(revs[i])
1014
# TODO: Handle timezone.
1015
dt = datetime.datetime.fromtimestamp(r.timestamp)
1016
if first <= dt and (last is None or dt <= last):
1018
REVISION_NAMESPACES['date:'] = _namespace_date
670
def lookup_revision(self, revno):
671
"""Return revision hash for revision number."""
676
# list is 0-based; revisions are 1-based
677
return self.revision_history()[revno-1]
679
raise BzrError("no such revision %s" % revno)
1020
682
def revision_tree(self, revision_id):
1021
683
"""Return Tree for a revision on this branch.
1023
685
`revision_id` may be None for the null revision, in which case
1024
686
an `EmptyTree` is returned."""
1025
from bzrlib.tree import EmptyTree, RevisionTree
1026
# TODO: refactor this to use an existing revision object
1027
# so we don't need to read it in twice.
1028
688
if revision_id == None:
1029
return EmptyTree(self.get_root_id())
1031
691
inv = self.get_revision_inventory(revision_id)
1032
692
return RevisionTree(self.text_store, inv)
1042
701
"""Return `Tree` object for last revision.
1044
703
If there are no revisions yet, return an `EmptyTree`.
705
>>> b = ScratchBranch(files=['foo'])
706
>>> b.basis_tree().has_filename('foo')
708
>>> b.working_tree().has_filename('foo')
711
>>> b.commit('add foo')
712
>>> b.basis_tree().has_filename('foo')
1046
from bzrlib.tree import EmptyTree, RevisionTree
1047
715
r = self.last_patch()
1049
return EmptyTree(self.get_root_id())
1051
719
return RevisionTree(self.text_store, self.get_revision_inventory(r))
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
1055
770
def rename_one(self, from_rel, to_rel):
1056
771
"""Rename one file.
1058
773
This can change the directory or the filename or both.
775
tree = self.working_tree()
777
if not tree.has_filename(from_rel):
778
bailout("can't rename: old working file %r does not exist" % from_rel)
779
if tree.has_filename(to_rel):
780
bailout("can't rename: new working file %r already exists" % to_rel)
782
file_id = inv.path2id(from_rel)
784
bailout("can't rename: old name %r is not versioned" % from_rel)
786
if inv.path2id(to_rel):
787
bailout("can't rename: new name %r is already versioned" % to_rel)
789
to_dir, to_tail = os.path.split(to_rel)
790
to_dir_id = inv.path2id(to_dir)
791
if to_dir_id == None and to_dir != '':
792
bailout("can't determine destination directory id for %r" % to_dir)
794
mutter("rename_one:")
795
mutter(" file_id {%s}" % file_id)
796
mutter(" from_rel %r" % from_rel)
797
mutter(" to_rel %r" % to_rel)
798
mutter(" to_dir %r" % to_dir)
799
mutter(" to_dir_id {%s}" % to_dir_id)
801
inv.rename(file_id, to_dir_id, to_tail)
803
print "%s => %s" % (from_rel, to_rel)
805
from_abs = self.abspath(from_rel)
806
to_abs = self.abspath(to_rel)
1062
tree = self.working_tree()
1063
inv = tree.inventory
1064
if not tree.has_filename(from_rel):
1065
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1066
if tree.has_filename(to_rel):
1067
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1069
file_id = inv.path2id(from_rel)
1071
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1073
if inv.path2id(to_rel):
1074
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1076
to_dir, to_tail = os.path.split(to_rel)
1077
to_dir_id = inv.path2id(to_dir)
1078
if to_dir_id == None and to_dir != '':
1079
raise BzrError("can't determine destination directory id for %r" % to_dir)
1081
mutter("rename_one:")
1082
mutter(" file_id {%s}" % file_id)
1083
mutter(" from_rel %r" % from_rel)
1084
mutter(" to_rel %r" % to_rel)
1085
mutter(" to_dir %r" % to_dir)
1086
mutter(" to_dir_id {%s}" % to_dir_id)
1088
inv.rename(file_id, to_dir_id, to_tail)
1090
print "%s => %s" % (from_rel, to_rel)
1092
from_abs = self.abspath(from_rel)
1093
to_abs = self.abspath(to_rel)
1095
os.rename(from_abs, to_abs)
1097
raise BzrError("failed to rename %r to %r: %s"
1098
% (from_abs, to_abs, e[1]),
1099
["rename rolled back"])
1101
self._write_inventory(inv)
808
os.rename(from_abs, to_abs)
810
bailout("failed to rename %r to %r: %s"
811
% (from_abs, to_abs, e[1]),
812
["rename rolled back"])
814
self._write_inventory(inv)
1106
818
def move(self, from_paths, to_name):
1114
826
Note that to_name is only the last component of the new name;
1115
827
this doesn't change the directory.
1119
## TODO: Option to move IDs only
1120
assert not isinstance(from_paths, basestring)
1121
tree = self.working_tree()
1122
inv = tree.inventory
1123
to_abs = self.abspath(to_name)
1124
if not isdir(to_abs):
1125
raise BzrError("destination %r is not a directory" % to_abs)
1126
if not tree.has_filename(to_name):
1127
raise BzrError("destination %r not in working directory" % to_abs)
1128
to_dir_id = inv.path2id(to_name)
1129
if to_dir_id == None and to_name != '':
1130
raise BzrError("destination %r is not a versioned directory" % to_name)
1131
to_dir_ie = inv[to_dir_id]
1132
if to_dir_ie.kind not in ('directory', 'root_directory'):
1133
raise BzrError("destination %r is not a directory" % to_abs)
1135
to_idpath = inv.get_idpath(to_dir_id)
1137
for f in from_paths:
1138
if not tree.has_filename(f):
1139
raise BzrError("%r does not exist in working tree" % f)
1140
f_id = inv.path2id(f)
1142
raise BzrError("%r is not versioned" % f)
1143
name_tail = splitpath(f)[-1]
1144
dest_path = appendpath(to_name, name_tail)
1145
if tree.has_filename(dest_path):
1146
raise BzrError("destination %r already exists" % dest_path)
1147
if f_id in to_idpath:
1148
raise BzrError("can't move %r to a subdirectory of itself" % f)
1150
# OK, so there's a race here, it's possible that someone will
1151
# create a file in this interval and then the rename might be
1152
# left half-done. But we should have caught most problems.
1154
for f in from_paths:
1155
name_tail = splitpath(f)[-1]
1156
dest_path = appendpath(to_name, name_tail)
1157
print "%s => %s" % (f, dest_path)
1158
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1160
os.rename(self.abspath(f), self.abspath(dest_path))
1162
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1163
["rename rolled back"])
1165
self._write_inventory(inv)
1170
def revert(self, filenames, old_tree=None, backups=True):
1171
"""Restore selected files to the versions from a previous tree.
1174
If true (default) backups are made of files before
1177
from bzrlib.errors import NotVersionedError, BzrError
1178
from bzrlib.atomicfile import AtomicFile
1179
from bzrlib.osutils import backup_file
829
## TODO: Option to move IDs only
830
assert not isinstance(from_paths, basestring)
831
tree = self.working_tree()
833
to_abs = self.abspath(to_name)
834
if not isdir(to_abs):
835
bailout("destination %r is not a directory" % to_abs)
836
if not tree.has_filename(to_name):
837
bailout("destination %r not in working directory" % to_abs)
838
to_dir_id = inv.path2id(to_name)
839
if to_dir_id == None and to_name != '':
840
bailout("destination %r is not a versioned directory" % to_name)
841
to_dir_ie = inv[to_dir_id]
842
if to_dir_ie.kind not in ('directory', 'root_directory'):
843
bailout("destination %r is not a directory" % to_abs)
845
to_idpath = Set(inv.get_idpath(to_dir_id))
848
if not tree.has_filename(f):
849
bailout("%r does not exist in working tree" % f)
850
f_id = inv.path2id(f)
852
bailout("%r is not versioned" % f)
853
name_tail = splitpath(f)[-1]
854
dest_path = appendpath(to_name, name_tail)
855
if tree.has_filename(dest_path):
856
bailout("destination %r already exists" % dest_path)
857
if f_id in to_idpath:
858
bailout("can't move %r to a subdirectory of itself" % f)
860
# OK, so there's a race here, it's possible that someone will
861
# create a file in this interval and then the rename might be
862
# left half-done. But we should have caught most problems.
865
name_tail = splitpath(f)[-1]
866
dest_path = appendpath(to_name, name_tail)
867
print "%s => %s" % (f, dest_path)
868
inv.rename(inv.path2id(f), to_dir_id, name_tail)
870
os.rename(self.abspath(f), self.abspath(dest_path))
872
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
873
["rename rolled back"])
875
self._write_inventory(inv)
879
def show_status(self, show_all=False):
880
"""Display single-line status for non-ignored working files.
882
The list is show sorted in order by file name.
884
>>> b = ScratchBranch(files=['foo', 'foo~'])
890
>>> b.commit("add foo")
892
>>> os.unlink(b.abspath('foo'))
1181
inv = self.read_working_inventory()
1182
if old_tree is None:
1183
old_tree = self.basis_tree()
1184
old_inv = old_tree.inventory
1187
for fn in filenames:
1188
file_id = inv.path2id(fn)
1190
raise NotVersionedError("not a versioned file", fn)
1191
if not old_inv.has_id(file_id):
1192
raise BzrError("file not present in old tree", fn, file_id)
1193
nids.append((fn, file_id))
1195
# TODO: Rename back if it was previously at a different location
1197
# TODO: If given a directory, restore the entire contents from
1198
# the previous version.
1200
# TODO: Make a backup to a temporary file.
1202
# TODO: If the file previously didn't exist, delete it?
1203
for fn, file_id in nids:
1206
f = AtomicFile(fn, 'wb')
1208
f.write(old_tree.get_file(file_id).read())
1214
def pending_merges(self):
1215
"""Return a list of pending merges.
1217
These are revisions that have been merged into the working
1218
directory but not yet committed.
896
TODO: Get state for single files.
1220
cfn = self.controlfilename('pending-merges')
1221
if not os.path.exists(cfn):
1224
for l in self.controlfile('pending-merges', 'r').readlines():
1225
p.append(l.rstrip('\n'))
1229
def add_pending_merge(self, revision_id):
1230
from bzrlib.revision import validate_revision_id
1232
validate_revision_id(revision_id)
1234
p = self.pending_merges()
1235
if revision_id in p:
1237
p.append(revision_id)
1238
self.set_pending_merges(p)
1241
def set_pending_merges(self, rev_list):
1242
from bzrlib.atomicfile import AtomicFile
1245
f = AtomicFile(self.controlfilename('pending-merges'))
899
# We have to build everything into a list first so that it can
900
# sorted by name, incorporating all the different sources.
902
# FIXME: Rather than getting things in random order and then sorting,
903
# just step through in order.
905
# Interesting case: the old ID for a file has been removed,
906
# but a new file has been created under that name.
908
old = self.basis_tree()
909
new = self.working_tree()
911
for fs, fid, oldname, newname, kind in diff_trees(old, new):
913
show_status(fs, kind,
914
oldname + ' => ' + newname)
915
elif fs == 'A' or fs == 'M':
916
show_status(fs, kind, newname)
918
show_status(fs, kind, oldname)
921
show_status(fs, kind, newname)
924
show_status(fs, kind, newname)
926
show_status(fs, kind, newname)
928
bailout("weird file state %r" % ((fs, fid),))
1257
932
class ScratchBranch(Branch):