15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
import sys, os, os.path, random, time, sha, sets, types, re, shutil, tempfile
21
import traceback, socket, fnmatch, difflib, time
22
from binascii import hexlify
21
from bzrlib.trace import mutter, note
22
from bzrlib.osutils import isdir, quotefn, compact_date, rand_bytes, splitpath, \
23
sha_file, appendpath, file_kind
24
from bzrlib.errors import BzrError
25
from inventory import Inventory
26
from trace import mutter, note
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
28
from inventory import InventoryEntry, Inventory
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, \
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
32
from store import ImmutableStore
33
from revision import Revision
34
from errors import bailout, BzrError
35
from textui import show_status
36
from diff import diff_trees
26
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
27
39
## TODO: Maybe include checks for common corruption of newlines, etc?
31
def find_branch(f, **args):
32
if f and (f.startswith('http://') or f.startswith('https://')):
34
return remotebranch.RemoteBranch(f, **args)
36
return Branch(f, **args)
39
def find_cached_branch(f, cache_root, **args):
40
from remotebranch import RemoteBranch
41
br = find_branch(f, **args)
42
def cacheify(br, store_name):
43
from meta_store import CachedStore
44
cache_path = os.path.join(cache_root, store_name)
46
new_store = CachedStore(getattr(br, store_name), cache_path)
47
setattr(br, store_name, new_store)
49
if isinstance(br, RemoteBranch):
50
cacheify(br, 'inventory_store')
51
cacheify(br, 'text_store')
52
cacheify(br, 'revision_store')
56
def _relpath(base, path):
57
"""Return path relative to base, or raise exception.
59
The path may be either an absolute path or a path relative to the
60
current working directory.
62
Lifted out of Branch.relpath for ease of testing.
64
os.path.commonprefix (python2.4) has a bad bug that it works just
65
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
66
avoids that problem."""
67
rp = os.path.abspath(path)
71
while len(head) >= len(base):
74
head, tail = os.path.split(head)
78
from errors import NotBranchError
79
raise NotBranchError("path %r is not within branch %r" % (rp, base))
84
43
def find_branch_root(f=None):
85
44
"""Find the branch root enclosing f, or pwd.
87
f may be a filename or a URL.
89
46
It is not necessary that f exists.
91
48
Basically we keep looking up until we find the control directory or
329
242
fmt = self.controlfile('branch-format', 'r').read()
330
243
fmt.replace('\r\n', '')
331
244
if fmt != BZR_BRANCH_FORMAT:
332
raise BzrError('sorry, branch format %r not supported' % fmt,
333
['use a different bzr version',
334
'or remove the .bzr directory and "bzr init" again'])
336
def get_root_id(self):
337
"""Return the id of this branches root"""
338
inv = self.read_working_inventory()
339
return inv.root.file_id
341
def set_root_id(self, file_id):
342
inv = self.read_working_inventory()
343
orig_root_id = inv.root.file_id
344
del inv._byid[inv.root.file_id]
345
inv.root.file_id = file_id
346
inv._byid[inv.root.file_id] = inv.root
349
if entry.parent_id in (None, orig_root_id):
350
entry.parent_id = inv.root.file_id
351
self._write_inventory(inv)
245
bailout('sorry, branch format %r not supported' % fmt,
246
['use a different bzr version',
247
'or remove the .bzr directory and "bzr init" again'])
353
250
def read_working_inventory(self):
354
251
"""Read the working inventory."""
355
from bzrlib.inventory import Inventory
356
from bzrlib.xml import unpack_xml
357
from time import time
361
# ElementTree does its own conversion from UTF-8, so open in
363
inv = unpack_xml(Inventory,
364
self.controlfile('inventory', 'rb'))
365
mutter("loaded inventory of %d items in %f"
366
% (len(inv), time() - before))
252
self._need_readlock()
254
# ElementTree does its own conversion from UTF-8, so open in
256
inv = Inventory.read_xml(self.controlfile('inventory', 'rb'))
257
mutter("loaded inventory of %d items in %f"
258
% (len(inv), time.time() - before))
372
262
def _write_inventory(self, inv):
373
263
"""Update the working inventory.
404
291
This puts the files in the Added state, so that they will be
405
292
recorded by the next commit.
408
List of paths to add, relative to the base of the tree.
411
If set, use these instead of automatically generated ids.
412
Must be the same length as the list of files, but may
413
contain None for ids that are to be autogenerated.
415
294
TODO: Perhaps have an option to add the ids even if the files do
418
297
TODO: Perhaps return the ids of the files? But then again it
419
is easy to retrieve them if they're needed.
298
is easy to retrieve them if they're needed.
300
TODO: Option to specify file id.
421
302
TODO: Adding a directory should optionally recurse down and
422
add all non-ignored children. Perhaps do that in a
303
add all non-ignored children. Perhaps do that in a
306
>>> b = ScratchBranch(files=['foo'])
307
>>> 'foo' in b.unknowns()
312
>>> 'foo' in b.unknowns()
314
>>> bool(b.inventory.path2id('foo'))
320
Traceback (most recent call last):
322
BzrError: ('foo is already versioned', [])
324
>>> b.add(['nothere'])
325
Traceback (most recent call last):
326
BzrError: ('cannot add: not a regular file or directory: nothere', [])
425
from bzrlib.textui import show_status
328
self._need_writelock()
426
330
# TODO: Re-adding a file that is removed in the working copy
427
331
# should probably put it back with the previous ID.
428
if isinstance(files, basestring):
429
assert(ids is None or isinstance(ids, basestring))
332
if isinstance(files, types.StringTypes):
435
ids = [None] * len(files)
437
assert(len(ids) == len(files))
441
inv = self.read_working_inventory()
442
for f,file_id in zip(files, ids):
443
if is_control_file(f):
444
raise BzrError("cannot add control file %s" % quotefn(f))
449
raise BzrError("cannot add top-level %r" % f)
451
fullpath = os.path.normpath(self.abspath(f))
454
kind = file_kind(fullpath)
456
# maybe something better?
457
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
459
if kind != 'file' and kind != 'directory':
460
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
463
file_id = gen_file_id(f)
464
inv.add_path(f, kind=kind, file_id=file_id)
467
print 'added', quotefn(f)
469
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
471
self._write_inventory(inv)
335
inv = self.read_working_inventory()
337
if is_control_file(f):
338
bailout("cannot add control file %s" % quotefn(f))
343
bailout("cannot add top-level %r" % f)
345
fullpath = os.path.normpath(self.abspath(f))
348
kind = file_kind(fullpath)
350
# maybe something better?
351
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
353
if kind != 'file' and kind != 'directory':
354
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
356
file_id = gen_file_id(f)
357
inv.add_path(f, kind=kind, file_id=file_id)
360
show_status('A', kind, quotefn(f))
362
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
364
self._write_inventory(inv)
476
367
def print_file(self, file, revno):
477
368
"""Print `file` to stdout."""
480
tree = self.revision_tree(self.lookup_revision(revno))
481
# use inventory as it was in that revision
482
file_id = tree.inventory.path2id(file)
484
raise BzrError("%r is not present in revision %s" % (file, revno))
485
tree.print_file(file_id)
369
self._need_readlock()
370
tree = self.revision_tree(self.lookup_revision(revno))
371
# use inventory as it was in that revision
372
file_id = tree.inventory.path2id(file)
374
bailout("%r is not present in revision %d" % (file, revno))
375
tree.print_file(file_id)
490
378
def remove(self, files, verbose=False):
491
379
"""Mark nominated files for removal from the inventory.
564
458
return self.working_tree().unknowns()
567
def append_revision(self, *revision_ids):
568
from bzrlib.atomicfile import AtomicFile
570
for revision_id in revision_ids:
571
mutter("add {%s} to revision-history" % revision_id)
461
def commit(self, message, timestamp=None, timezone=None,
464
"""Commit working copy as a new revision.
466
The basic approach is to add all the file texts into the
467
store, then the inventory, then make a new revision pointing
468
to that inventory and store that.
470
This is not quite safe if the working copy changes during the
471
commit; for the moment that is simply not allowed. A better
472
approach is to make a temporary copy of the files before
473
computing their hashes, and then add those hashes in turn to
474
the inventory. This should mean at least that there are no
475
broken hash pointers. There is no way we can get a snapshot
476
of the whole directory at an instant. This would also have to
477
be robust against files disappearing, moving, etc. So the
478
whole thing is a bit hard.
480
timestamp -- if not None, seconds-since-epoch for a
481
postdated/predated commit.
483
self._need_writelock()
485
## TODO: Show branch names
487
# TODO: Don't commit if there are no changes, unless forced?
489
# First walk over the working inventory; and both update that
490
# and also build a new revision inventory. The revision
491
# inventory needs to hold the text-id, sha1 and size of the
492
# actual file versions committed in the revision. (These are
493
# not present in the working inventory.) We also need to
494
# detect missing/deleted files, and remove them from the
497
work_inv = self.read_working_inventory()
499
basis = self.basis_tree()
500
basis_inv = basis.inventory
502
for path, entry in work_inv.iter_entries():
503
## TODO: Cope with files that have gone missing.
505
## TODO: Check that the file kind has not changed from the previous
506
## revision of this file (if any).
510
p = self.abspath(path)
511
file_id = entry.file_id
512
mutter('commit prep file %s, id %r ' % (p, file_id))
514
if not os.path.exists(p):
515
mutter(" file is missing, removing from inventory")
517
show_status('D', entry.kind, quotefn(path))
518
missing_ids.append(file_id)
521
# TODO: Handle files that have been deleted
523
# TODO: Maybe a special case for empty files? Seems a
524
# waste to store them many times.
528
if basis_inv.has_id(file_id):
529
old_kind = basis_inv[file_id].kind
530
if old_kind != entry.kind:
531
bailout("entry %r changed kind from %r to %r"
532
% (file_id, old_kind, entry.kind))
534
if entry.kind == 'directory':
536
bailout("%s is entered as directory but not a directory" % quotefn(p))
537
elif entry.kind == 'file':
539
bailout("%s is entered as file but is not a file" % quotefn(p))
541
content = file(p, 'rb').read()
543
entry.text_sha1 = sha_string(content)
544
entry.text_size = len(content)
546
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
548
and (old_ie.text_size == entry.text_size)
549
and (old_ie.text_sha1 == entry.text_sha1)):
550
## assert content == basis.get_file(file_id).read()
551
entry.text_id = basis_inv[file_id].text_id
552
mutter(' unchanged from previous text_id {%s}' %
556
entry.text_id = gen_file_id(entry.name)
557
self.text_store.add(content, entry.text_id)
558
mutter(' stored with text_id {%s}' % entry.text_id)
562
elif (old_ie.name == entry.name
563
and old_ie.parent_id == entry.parent_id):
568
show_status(state, entry.kind, quotefn(path))
570
for file_id in missing_ids:
571
# have to do this later so we don't mess up the iterator.
572
# since parents may be removed before their children we
575
# FIXME: There's probably a better way to do this; perhaps
576
# the workingtree should know how to filter itself.
577
if work_inv.has_id(file_id):
578
del work_inv[file_id]
581
inv_id = rev_id = _gen_revision_id(time.time())
583
inv_tmp = tempfile.TemporaryFile()
584
inv.write_xml(inv_tmp)
586
self.inventory_store.add(inv_tmp, inv_id)
587
mutter('new inventory_id is {%s}' % inv_id)
589
self._write_inventory(work_inv)
591
if timestamp == None:
592
timestamp = time.time()
594
if committer == None:
595
committer = username()
598
timezone = local_time_offset()
600
mutter("building commit log message")
601
rev = Revision(timestamp=timestamp,
604
precursor = self.last_patch(),
609
rev_tmp = tempfile.TemporaryFile()
610
rev.write_xml(rev_tmp)
612
self.revision_store.add(rev_tmp, rev_id)
613
mutter("new revision_id is {%s}" % rev_id)
615
## XXX: Everything up to here can simply be orphaned if we abort
616
## the commit; it will leave junk files behind but that doesn't
619
## TODO: Read back the just-generated changeset, and make sure it
620
## applies and recreates the right state.
622
## TODO: Also calculate and store the inventory SHA1
623
mutter("committing patch r%d" % (self.revno() + 1))
626
self.append_revision(rev_id)
629
note("commited r%d" % self.revno())
632
def append_revision(self, revision_id):
633
mutter("add {%s} to revision-history" % revision_id)
573
634
rev_history = self.revision_history()
574
rev_history.extend(revision_ids)
576
f = AtomicFile(self.controlfilename('revision-history'))
578
for rev_id in rev_history:
636
tmprhname = self.controlfilename('revision-history.tmp')
637
rhname = self.controlfilename('revision-history')
639
f = file(tmprhname, 'wt')
640
rev_history.append(revision_id)
641
f.write('\n'.join(rev_history))
645
if sys.platform == 'win32':
647
os.rename(tmprhname, rhname)
585
651
def get_revision(self, revision_id):
586
652
"""Return the Revision object for a named revision"""
587
from bzrlib.revision import Revision
588
from bzrlib.xml import unpack_xml
592
if not revision_id or not isinstance(revision_id, basestring):
593
raise ValueError('invalid revision-id: %r' % revision_id)
594
r = unpack_xml(Revision, self.revision_store[revision_id])
653
self._need_readlock()
654
r = Revision.read_xml(self.revision_store[revision_id])
598
655
assert r.revision_id == revision_id
602
def get_revision_sha1(self, revision_id):
603
"""Hash the stored value of a revision, and return it."""
604
# In the future, revision entries will be signed. At that
605
# point, it is probably best *not* to include the signature
606
# in the revision hash. Because that lets you re-sign
607
# the revision, (add signatures/remove signatures) and still
608
# have all hash pointers stay consistent.
609
# But for now, just hash the contents.
610
return sha_file(self.revision_store[revision_id])
613
659
def get_inventory(self, inventory_id):
645
682
>>> ScratchBranch().revision_history()
650
return [l.rstrip('\r\n') for l in
651
self.controlfile('revision-history', 'r').readlines()]
656
def common_ancestor(self, other, self_revno=None, other_revno=None):
659
>>> sb = ScratchBranch(files=['foo', 'foo~'])
660
>>> sb.common_ancestor(sb) == (None, None)
662
>>> commit.commit(sb, "Committing first revision", verbose=False)
663
>>> sb.common_ancestor(sb)[0]
665
>>> clone = sb.clone()
666
>>> commit.commit(sb, "Committing second revision", verbose=False)
667
>>> sb.common_ancestor(sb)[0]
669
>>> sb.common_ancestor(clone)[0]
671
>>> commit.commit(clone, "Committing divergent second revision",
673
>>> sb.common_ancestor(clone)[0]
675
>>> sb.common_ancestor(clone) == clone.common_ancestor(sb)
677
>>> sb.common_ancestor(sb) != clone.common_ancestor(clone)
679
>>> clone2 = sb.clone()
680
>>> sb.common_ancestor(clone2)[0]
682
>>> sb.common_ancestor(clone2, self_revno=1)[0]
684
>>> sb.common_ancestor(clone2, other_revno=1)[0]
687
my_history = self.revision_history()
688
other_history = other.revision_history()
689
if self_revno is None:
690
self_revno = len(my_history)
691
if other_revno is None:
692
other_revno = len(other_history)
693
indices = range(min((self_revno, other_revno)))
696
if my_history[r] == other_history[r]:
697
return r+1, my_history[r]
700
def enum_history(self, direction):
701
"""Return (revno, revision_id) for history of branch.
704
'forward' is from earliest to latest
705
'reverse' is from latest to earliest
707
rh = self.revision_history()
708
if direction == 'forward':
713
elif direction == 'reverse':
719
raise ValueError('invalid history direction', direction)
685
self._need_readlock()
686
return [l.rstrip('\r\n') for l in self.controlfile('revision-history', 'r').readlines()]
725
692
That is equivalent to the number of revisions committed to
695
>>> b = ScratchBranch()
698
>>> b.commit('no foo')
728
702
return len(self.revision_history())
731
705
def last_patch(self):
732
706
"""Return last patch hash, or None if no history.
708
>>> ScratchBranch().last_patch() == None
734
711
ph = self.revision_history()
741
def missing_revisions(self, other, stop_revision=None):
743
If self and other have not diverged, return a list of the revisions
744
present in other, but missing from self.
746
>>> from bzrlib.commit import commit
747
>>> bzrlib.trace.silent = True
748
>>> br1 = ScratchBranch()
749
>>> br2 = ScratchBranch()
750
>>> br1.missing_revisions(br2)
752
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
753
>>> br1.missing_revisions(br2)
755
>>> br2.missing_revisions(br1)
757
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
758
>>> br1.missing_revisions(br2)
760
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
761
>>> br1.missing_revisions(br2)
763
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
764
>>> br1.missing_revisions(br2)
765
Traceback (most recent call last):
766
DivergedBranches: These branches have diverged.
768
self_history = self.revision_history()
769
self_len = len(self_history)
770
other_history = other.revision_history()
771
other_len = len(other_history)
772
common_index = min(self_len, other_len) -1
773
if common_index >= 0 and \
774
self_history[common_index] != other_history[common_index]:
775
raise DivergedBranches(self, other)
777
if stop_revision is None:
778
stop_revision = other_len
779
elif stop_revision > other_len:
780
raise NoSuchRevision(self, stop_revision)
782
return other_history[self_len:stop_revision]
785
def update_revisions(self, other, stop_revision=None):
786
"""Pull in all new revisions from other branch.
788
>>> from bzrlib.commit import commit
789
>>> bzrlib.trace.silent = True
790
>>> br1 = ScratchBranch(files=['foo', 'bar'])
793
>>> commit(br1, "lala!", rev_id="REVISION-ID-1", verbose=False)
794
>>> br2 = ScratchBranch()
795
>>> br2.update_revisions(br1)
799
>>> br2.revision_history()
801
>>> br2.update_revisions(br1)
805
>>> br1.text_store.total_size() == br2.text_store.total_size()
808
from bzrlib.progress import ProgressBar
812
pb.update('comparing histories')
813
revision_ids = self.missing_revisions(other, stop_revision)
815
if hasattr(other.revision_store, "prefetch"):
816
other.revision_store.prefetch(revision_ids)
817
if hasattr(other.inventory_store, "prefetch"):
818
inventory_ids = [other.get_revision(r).inventory_id
819
for r in revision_ids]
820
other.inventory_store.prefetch(inventory_ids)
825
for rev_id in revision_ids:
827
pb.update('fetching revision', i, len(revision_ids))
828
rev = other.get_revision(rev_id)
829
revisions.append(rev)
830
inv = other.get_inventory(str(rev.inventory_id))
831
for key, entry in inv.iter_entries():
832
if entry.text_id is None:
834
if entry.text_id not in self.text_store:
835
needed_texts.add(entry.text_id)
839
count = self.text_store.copy_multi(other.text_store, needed_texts)
840
print "Added %d texts." % count
841
inventory_ids = [ f.inventory_id for f in revisions ]
842
count = self.inventory_store.copy_multi(other.inventory_store,
844
print "Added %d inventories." % count
845
revision_ids = [ f.revision_id for f in revisions]
846
count = self.revision_store.copy_multi(other.revision_store,
848
for revision_id in revision_ids:
849
self.append_revision(revision_id)
850
print "Added %d revisions." % count
853
def commit(self, *args, **kw):
854
from bzrlib.commit import commit
855
commit(self, *args, **kw)
858
def lookup_revision(self, revision):
859
"""Return the revision identifier for a given revision information."""
860
revno, info = self.get_revision_info(revision)
863
def get_revision_info(self, revision):
864
"""Return (revno, revision id) for revision identifier.
866
revision can be an integer, in which case it is assumed to be revno (though
867
this will translate negative values into positive ones)
868
revision can also be a string, in which case it is parsed for something like
869
'date:' or 'revid:' etc.
874
try:# Convert to int if possible
875
revision = int(revision)
878
revs = self.revision_history()
879
if isinstance(revision, int):
882
# Mabye we should do this first, but we don't need it if revision == 0
884
revno = len(revs) + revision + 1
887
elif isinstance(revision, basestring):
888
for prefix, func in Branch.REVISION_NAMESPACES.iteritems():
889
if revision.startswith(prefix):
890
revno = func(self, revs, revision)
893
raise BzrError('No namespace registered for string: %r' % revision)
895
if revno is None or revno <= 0 or revno > len(revs):
896
raise BzrError("no such revision %s" % revision)
897
return revno, revs[revno-1]
899
def _namespace_revno(self, revs, revision):
900
"""Lookup a revision by revision number"""
901
assert revision.startswith('revno:')
903
return int(revision[6:])
906
REVISION_NAMESPACES['revno:'] = _namespace_revno
908
def _namespace_revid(self, revs, revision):
909
assert revision.startswith('revid:')
911
return revs.index(revision[6:]) + 1
914
REVISION_NAMESPACES['revid:'] = _namespace_revid
916
def _namespace_last(self, revs, revision):
917
assert revision.startswith('last:')
919
offset = int(revision[5:])
924
raise BzrError('You must supply a positive value for --revision last:XXX')
925
return len(revs) - offset + 1
926
REVISION_NAMESPACES['last:'] = _namespace_last
928
def _namespace_tag(self, revs, revision):
929
assert revision.startswith('tag:')
930
raise BzrError('tag: namespace registered, but not implemented.')
931
REVISION_NAMESPACES['tag:'] = _namespace_tag
933
def _namespace_date(self, revs, revision):
934
assert revision.startswith('date:')
936
# Spec for date revisions:
938
# value can be 'yesterday', 'today', 'tomorrow' or a YYYY-MM-DD string.
939
# it can also start with a '+/-/='. '+' says match the first
940
# entry after the given date. '-' is match the first entry before the date
941
# '=' is match the first entry after, but still on the given date.
943
# +2005-05-12 says find the first matching entry after May 12th, 2005 at 0:00
944
# -2005-05-12 says find the first matching entry before May 12th, 2005 at 0:00
945
# =2005-05-12 says find the first match after May 12th, 2005 at 0:00 but before
946
# May 13th, 2005 at 0:00
948
# So the proper way of saying 'give me all entries for today' is:
949
# -r {date:+today}:{date:-tomorrow}
950
# The default is '=' when not supplied
953
if val[:1] in ('+', '-', '='):
954
match_style = val[:1]
957
today = datetime.datetime.today().replace(hour=0,minute=0,second=0,microsecond=0)
958
if val.lower() == 'yesterday':
959
dt = today - datetime.timedelta(days=1)
960
elif val.lower() == 'today':
962
elif val.lower() == 'tomorrow':
963
dt = today + datetime.timedelta(days=1)
966
# This should be done outside the function to avoid recompiling it.
967
_date_re = re.compile(
968
r'(?P<date>(?P<year>\d\d\d\d)-(?P<month>\d\d)-(?P<day>\d\d))?'
970
r'(?P<time>(?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d))?)?'
972
m = _date_re.match(val)
973
if not m or (not m.group('date') and not m.group('time')):
974
raise BzrError('Invalid revision date %r' % revision)
977
year, month, day = int(m.group('year')), int(m.group('month')), int(m.group('day'))
979
year, month, day = today.year, today.month, today.day
981
hour = int(m.group('hour'))
982
minute = int(m.group('minute'))
983
if m.group('second'):
984
second = int(m.group('second'))
988
hour, minute, second = 0,0,0
990
dt = datetime.datetime(year=year, month=month, day=day,
991
hour=hour, minute=minute, second=second)
995
if match_style == '-':
997
elif match_style == '=':
998
last = dt + datetime.timedelta(days=1)
1001
for i in range(len(revs)-1, -1, -1):
1002
r = self.get_revision(revs[i])
1003
# TODO: Handle timezone.
1004
dt = datetime.datetime.fromtimestamp(r.timestamp)
1005
if first >= dt and (last is None or dt >= last):
1008
for i in range(len(revs)):
1009
r = self.get_revision(revs[i])
1010
# TODO: Handle timezone.
1011
dt = datetime.datetime.fromtimestamp(r.timestamp)
1012
if first <= dt and (last is None or dt <= last):
1014
REVISION_NAMESPACES['date:'] = _namespace_date
718
def lookup_revision(self, revno):
719
"""Return revision hash for revision number."""
724
# list is 0-based; revisions are 1-based
725
return self.revision_history()[revno-1]
727
raise BzrError("no such revision %s" % revno)
1016
730
def revision_tree(self, revision_id):
1017
731
"""Return Tree for a revision on this branch.
1019
733
`revision_id` may be None for the null revision, in which case
1020
734
an `EmptyTree` is returned."""
1021
from bzrlib.tree import EmptyTree, RevisionTree
1022
# TODO: refactor this to use an existing revision object
1023
# so we don't need to read it in twice.
735
self._need_readlock()
1024
736
if revision_id == None:
1025
return EmptyTree(self.get_root_id())
1027
739
inv = self.get_revision_inventory(revision_id)
1028
740
return RevisionTree(self.text_store, inv)
1038
749
"""Return `Tree` object for last revision.
1040
751
If there are no revisions yet, return an `EmptyTree`.
753
>>> b = ScratchBranch(files=['foo'])
754
>>> b.basis_tree().has_filename('foo')
756
>>> b.working_tree().has_filename('foo')
759
>>> b.commit('add foo')
760
>>> b.basis_tree().has_filename('foo')
1042
from bzrlib.tree import EmptyTree, RevisionTree
1043
763
r = self.last_patch()
1045
return EmptyTree(self.get_root_id())
1047
767
return RevisionTree(self.text_store, self.get_revision_inventory(r))
771
def write_log(self, show_timezone='original', verbose=False):
772
"""Write out human-readable log of commits to this branch
774
utc -- If true, show dates in universal time, not local time."""
775
self._need_readlock()
776
## TODO: Option to choose either original, utc or local timezone
779
for p in self.revision_history():
781
print 'revno:', revno
782
## TODO: Show hash if --id is given.
783
##print 'revision-hash:', p
784
rev = self.get_revision(p)
785
print 'committer:', rev.committer
786
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
789
## opportunistic consistency check, same as check_patch_chaining
790
if rev.precursor != precursor:
791
bailout("mismatched precursor!")
795
print ' (no message)'
797
for l in rev.message.split('\n'):
800
if verbose == True and precursor != None:
801
print 'changed files:'
802
tree = self.revision_tree(p)
803
prevtree = self.revision_tree(precursor)
805
for file_state, fid, old_name, new_name, kind in \
806
diff_trees(prevtree, tree, ):
807
if file_state == 'A' or file_state == 'M':
808
show_status(file_state, kind, new_name)
809
elif file_state == 'D':
810
show_status(file_state, kind, old_name)
811
elif file_state == 'R':
812
show_status(file_state, kind,
813
old_name + ' => ' + new_name)
1051
819
def rename_one(self, from_rel, to_rel):
1052
820
"""Rename one file.
1054
822
This can change the directory or the filename or both.
824
self._need_writelock()
825
tree = self.working_tree()
827
if not tree.has_filename(from_rel):
828
bailout("can't rename: old working file %r does not exist" % from_rel)
829
if tree.has_filename(to_rel):
830
bailout("can't rename: new working file %r already exists" % to_rel)
832
file_id = inv.path2id(from_rel)
834
bailout("can't rename: old name %r is not versioned" % from_rel)
836
if inv.path2id(to_rel):
837
bailout("can't rename: new name %r is already versioned" % to_rel)
839
to_dir, to_tail = os.path.split(to_rel)
840
to_dir_id = inv.path2id(to_dir)
841
if to_dir_id == None and to_dir != '':
842
bailout("can't determine destination directory id for %r" % to_dir)
844
mutter("rename_one:")
845
mutter(" file_id {%s}" % file_id)
846
mutter(" from_rel %r" % from_rel)
847
mutter(" to_rel %r" % to_rel)
848
mutter(" to_dir %r" % to_dir)
849
mutter(" to_dir_id {%s}" % to_dir_id)
851
inv.rename(file_id, to_dir_id, to_tail)
853
print "%s => %s" % (from_rel, to_rel)
855
from_abs = self.abspath(from_rel)
856
to_abs = self.abspath(to_rel)
1058
tree = self.working_tree()
1059
inv = tree.inventory
1060
if not tree.has_filename(from_rel):
1061
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1062
if tree.has_filename(to_rel):
1063
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1065
file_id = inv.path2id(from_rel)
1067
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1069
if inv.path2id(to_rel):
1070
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1072
to_dir, to_tail = os.path.split(to_rel)
1073
to_dir_id = inv.path2id(to_dir)
1074
if to_dir_id == None and to_dir != '':
1075
raise BzrError("can't determine destination directory id for %r" % to_dir)
1077
mutter("rename_one:")
1078
mutter(" file_id {%s}" % file_id)
1079
mutter(" from_rel %r" % from_rel)
1080
mutter(" to_rel %r" % to_rel)
1081
mutter(" to_dir %r" % to_dir)
1082
mutter(" to_dir_id {%s}" % to_dir_id)
1084
inv.rename(file_id, to_dir_id, to_tail)
1086
print "%s => %s" % (from_rel, to_rel)
1088
from_abs = self.abspath(from_rel)
1089
to_abs = self.abspath(to_rel)
1091
os.rename(from_abs, to_abs)
1093
raise BzrError("failed to rename %r to %r: %s"
1094
% (from_abs, to_abs, e[1]),
1095
["rename rolled back"])
1097
self._write_inventory(inv)
858
os.rename(from_abs, to_abs)
860
bailout("failed to rename %r to %r: %s"
861
% (from_abs, to_abs, e[1]),
862
["rename rolled back"])
864
self._write_inventory(inv)
1102
868
def move(self, from_paths, to_name):
1110
876
Note that to_name is only the last component of the new name;
1111
877
this doesn't change the directory.
1115
## TODO: Option to move IDs only
1116
assert not isinstance(from_paths, basestring)
1117
tree = self.working_tree()
1118
inv = tree.inventory
1119
to_abs = self.abspath(to_name)
1120
if not isdir(to_abs):
1121
raise BzrError("destination %r is not a directory" % to_abs)
1122
if not tree.has_filename(to_name):
1123
raise BzrError("destination %r not in working directory" % to_abs)
1124
to_dir_id = inv.path2id(to_name)
1125
if to_dir_id == None and to_name != '':
1126
raise BzrError("destination %r is not a versioned directory" % to_name)
1127
to_dir_ie = inv[to_dir_id]
1128
if to_dir_ie.kind not in ('directory', 'root_directory'):
1129
raise BzrError("destination %r is not a directory" % to_abs)
1131
to_idpath = inv.get_idpath(to_dir_id)
1133
for f in from_paths:
1134
if not tree.has_filename(f):
1135
raise BzrError("%r does not exist in working tree" % f)
1136
f_id = inv.path2id(f)
1138
raise BzrError("%r is not versioned" % f)
1139
name_tail = splitpath(f)[-1]
1140
dest_path = appendpath(to_name, name_tail)
1141
if tree.has_filename(dest_path):
1142
raise BzrError("destination %r already exists" % dest_path)
1143
if f_id in to_idpath:
1144
raise BzrError("can't move %r to a subdirectory of itself" % f)
1146
# OK, so there's a race here, it's possible that someone will
1147
# create a file in this interval and then the rename might be
1148
# left half-done. But we should have caught most problems.
1150
for f in from_paths:
1151
name_tail = splitpath(f)[-1]
1152
dest_path = appendpath(to_name, name_tail)
1153
print "%s => %s" % (f, dest_path)
1154
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1156
os.rename(self.abspath(f), self.abspath(dest_path))
1158
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1159
["rename rolled back"])
1161
self._write_inventory(inv)
1166
def revert(self, filenames, old_tree=None, backups=True):
1167
"""Restore selected files to the versions from a previous tree.
1170
If true (default) backups are made of files before
1173
from bzrlib.errors import NotVersionedError, BzrError
1174
from bzrlib.atomicfile import AtomicFile
1175
from bzrlib.osutils import backup_file
879
self._need_writelock()
880
## TODO: Option to move IDs only
881
assert not isinstance(from_paths, basestring)
882
tree = self.working_tree()
884
to_abs = self.abspath(to_name)
885
if not isdir(to_abs):
886
bailout("destination %r is not a directory" % to_abs)
887
if not tree.has_filename(to_name):
888
bailout("destination %r not in working directory" % to_abs)
889
to_dir_id = inv.path2id(to_name)
890
if to_dir_id == None and to_name != '':
891
bailout("destination %r is not a versioned directory" % to_name)
892
to_dir_ie = inv[to_dir_id]
893
if to_dir_ie.kind not in ('directory', 'root_directory'):
894
bailout("destination %r is not a directory" % to_abs)
896
to_idpath = Set(inv.get_idpath(to_dir_id))
899
if not tree.has_filename(f):
900
bailout("%r does not exist in working tree" % f)
901
f_id = inv.path2id(f)
903
bailout("%r is not versioned" % f)
904
name_tail = splitpath(f)[-1]
905
dest_path = appendpath(to_name, name_tail)
906
if tree.has_filename(dest_path):
907
bailout("destination %r already exists" % dest_path)
908
if f_id in to_idpath:
909
bailout("can't move %r to a subdirectory of itself" % f)
911
# OK, so there's a race here, it's possible that someone will
912
# create a file in this interval and then the rename might be
913
# left half-done. But we should have caught most problems.
916
name_tail = splitpath(f)[-1]
917
dest_path = appendpath(to_name, name_tail)
918
print "%s => %s" % (f, dest_path)
919
inv.rename(inv.path2id(f), to_dir_id, name_tail)
921
os.rename(self.abspath(f), self.abspath(dest_path))
923
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
924
["rename rolled back"])
926
self._write_inventory(inv)
930
def show_status(self, show_all=False):
931
"""Display single-line status for non-ignored working files.
933
The list is show sorted in order by file name.
935
>>> b = ScratchBranch(files=['foo', 'foo~'])
941
>>> b.commit("add foo")
943
>>> os.unlink(b.abspath('foo'))
1177
inv = self.read_working_inventory()
1178
if old_tree is None:
1179
old_tree = self.basis_tree()
1180
old_inv = old_tree.inventory
1183
for fn in filenames:
1184
file_id = inv.path2id(fn)
1186
raise NotVersionedError("not a versioned file", fn)
1187
if not old_inv.has_id(file_id):
1188
raise BzrError("file not present in old tree", fn, file_id)
1189
nids.append((fn, file_id))
1191
# TODO: Rename back if it was previously at a different location
1193
# TODO: If given a directory, restore the entire contents from
1194
# the previous version.
1196
# TODO: Make a backup to a temporary file.
1198
# TODO: If the file previously didn't exist, delete it?
1199
for fn, file_id in nids:
1202
f = AtomicFile(fn, 'wb')
1204
f.write(old_tree.get_file(file_id).read())
1210
def pending_merges(self):
1211
"""Return a list of pending merges.
1213
These are revisions that have been merged into the working
1214
directory but not yet committed.
947
TODO: Get state for single files.
1216
cfn = self.controlfilename('pending-merges')
1217
if not os.path.exists(cfn):
1220
for l in self.controlfile('pending-merges', 'r').readlines():
1221
p.append(l.rstrip('\n'))
1225
def add_pending_merge(self, revision_id):
1226
from bzrlib.revision import validate_revision_id
1228
validate_revision_id(revision_id)
1230
p = self.pending_merges()
1231
if revision_id in p:
1233
p.append(revision_id)
1234
self.set_pending_merges(p)
1237
def set_pending_merges(self, rev_list):
1238
from bzrlib.atomicfile import AtomicFile
1241
f = AtomicFile(self.controlfilename('pending-merges'))
949
self._need_readlock()
951
# We have to build everything into a list first so that it can
952
# sorted by name, incorporating all the different sources.
954
# FIXME: Rather than getting things in random order and then sorting,
955
# just step through in order.
957
# Interesting case: the old ID for a file has been removed,
958
# but a new file has been created under that name.
960
old = self.basis_tree()
961
new = self.working_tree()
963
for fs, fid, oldname, newname, kind in diff_trees(old, new):
965
show_status(fs, kind,
966
oldname + ' => ' + newname)
967
elif fs == 'A' or fs == 'M':
968
show_status(fs, kind, newname)
970
show_status(fs, kind, oldname)
973
show_status(fs, kind, newname)
976
show_status(fs, kind, newname)
978
show_status(fs, kind, newname)
980
bailout("weird file state %r" % ((fs, fid),))
1253
984
class ScratchBranch(Branch):