15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
import sys, os, os.path, random, time, sha, sets, types, re, shutil, tempfile
21
import traceback, socket, fnmatch, difflib, time
22
from binascii import hexlify
22
from bzrlib.trace import mutter, note
23
from bzrlib.osutils import isdir, quotefn, compact_date, rand_bytes, \
25
sha_file, appendpath, file_kind
27
from bzrlib.errors import BzrError, InvalidRevisionNumber, InvalidRevisionId, \
28
DivergedBranches, NotBranchError
29
from bzrlib.textui import show_status
30
from bzrlib.revision import Revision
31
from bzrlib.delta import compare_trees
32
from bzrlib.tree import EmptyTree, RevisionTree
25
from inventory import Inventory
26
from trace import mutter, note
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
28
from inventory import InventoryEntry, Inventory
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, chomp, \
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
32
from store import ImmutableStore
33
from revision import Revision
34
from errors import bailout, BzrError
35
from textui import show_status
36
from diff import diff_trees
38
38
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
39
39
## TODO: Maybe include checks for common corruption of newlines, etc?
42
# TODO: Some operations like log might retrieve the same revisions
43
# repeatedly to calculate deltas. We could perhaps have a weakref
44
# cache in memory to make this faster.
46
# TODO: please move the revision-string syntax stuff out of the branch
47
# object; it's clutter
50
def find_branch(f, **args):
51
from bzrlib.transport import transport
52
from bzrlib.local_transport import LocalTransport
54
# FIXME: This is a hack around transport so that
55
# We can search the local directories for
57
if args.has_key('init') and args['init']:
58
# Don't search if we are init-ing
59
return Branch(t, **args)
60
if isinstance(t, LocalTransport):
61
root = find_branch_root(f)
64
return Branch(t, **args)
66
def _relpath(base, path):
67
"""Return path relative to base, or raise exception.
69
The path may be either an absolute path or a path relative to the
70
current working directory.
72
Lifted out of Branch.relpath for ease of testing.
74
os.path.commonprefix (python2.4) has a bad bug that it works just
75
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
76
avoids that problem."""
77
rp = os.path.abspath(path)
81
while len(head) >= len(base):
84
head, tail = os.path.split(head)
88
raise NotBranchError("path %r is not within branch %r" % (rp, base))
93
43
def find_branch_root(f=None):
94
44
"""Find the branch root enclosing f, or pwd.
96
f may be a filename or a URL.
98
46
It is not necessary that f exists.
100
48
Basically we keep looking up until we find the control directory or
101
run into the root. If there isn't one, raises NotBranchError.
52
elif hasattr(os.path, 'realpath'):
106
53
f = os.path.realpath(f)
107
if not os.path.exists(f):
108
raise BzrError('%r does not exist' % f)
55
f = os.path.abspath(f)
116
62
head, tail = os.path.split(f)
118
64
# reached the root, whatever that may be
119
raise NotBranchError('%s is not in a branch' % orig_f)
65
raise BzrError('%r is not in a branch' % orig_f)
125
70
######################################################################
128
class Branch(object):
129
74
"""Branch holding a history of revisions.
132
Base directory of the branch.
138
If _lock_mode is true, a positive count of the number of times the
142
Lock object from bzrlib.lock.
76
TODO: Perhaps use different stores for different classes of object,
77
so that we can keep track of how much space each one uses,
78
or garbage-collect them.
80
TODO: Add a RemoteBranch subclass. For the basic case of read-only
81
HTTP access this should be very easy by,
82
just redirecting controlfile access into HTTP requests.
83
We would need a RemoteStore working similarly.
85
TODO: Keep the on-disk branch locked while the object exists.
150
# Map some sort of prefix into a namespace
151
# stuff like "revno:10", "revid:", etc.
152
# This should match a prefix with a function which accepts
153
REVISION_NAMESPACES = {}
155
def __init__(self, transport, init=False):
89
def __init__(self, base, init=False, find_root=True):
156
90
"""Create new branch object at a particular location.
158
transport -- A Transport object, defining how to access files.
159
(If a string, transport.transport() will be used to
160
create a Transport object)
92
base -- Base directory for the branch.
162
94
init -- If True, create new control files in a previously
163
95
unversioned directory. If False, the branch must already
98
find_root -- If true and init is false, find the root of the
99
existing branch containing base.
166
101
In the test suite, creation of new trees is tested using the
167
102
`ScratchBranch` class.
169
if isinstance(transport, basestring):
170
from transport import transport as get_transport
171
transport = get_transport(transport)
173
self._transport = transport
105
self.base = os.path.realpath(base)
175
106
self._make_control()
108
self.base = find_branch_root(base)
110
self.base = os.path.realpath(base)
111
if not isdir(self.controlfilename('.')):
112
bailout("not a bzr branch: %s" % quotefn(base),
113
['use "bzr init" to initialize a new working tree',
114
'current bzr can only operate from top-of-tree'])
176
115
self._check_format()
117
self.text_store = ImmutableStore(self.controlfilename('text-store'))
118
self.revision_store = ImmutableStore(self.controlfilename('revision-store'))
119
self.inventory_store = ImmutableStore(self.controlfilename('inventory-store'))
179
122
def __str__(self):
180
return '%s(%r)' % (self.__class__.__name__, self._transport.base)
123
return '%s(%r)' % (self.__class__.__name__, self.base)
183
126
__repr__ = __str__
187
if self._lock_mode or self._lock:
188
from bzrlib.warnings import warn
189
warn("branch %r was not explicitly unlocked" % self)
192
# TODO: It might be best to do this somewhere else,
193
# but it is nice for a Branch object to automatically
194
# cache it's information.
195
# Alternatively, we could have the Transport objects cache requests
196
# See the earlier discussion about how major objects (like Branch)
197
# should never expect their __del__ function to run.
198
if self.cache_root is not None:
199
#from warnings import warn
200
#warn("branch %r auto-cleanup of cache files" % self)
203
shutil.rmtree(self.cache_root)
206
self.cache_root = None
210
return self._transport.base
213
base = property(_get_base)
216
def lock_write(self):
217
# TODO: Upgrade locking to support using a Transport,
218
# and potentially a remote locking protocol
220
if self._lock_mode != 'w':
221
from bzrlib.errors import LockError
222
raise LockError("can't upgrade to a write lock from %r" %
224
self._lock_count += 1
226
self._lock = self._transport.lock_write(
227
self._rel_controlfilename('branch-lock'))
228
self._lock_mode = 'w'
234
assert self._lock_mode in ('r', 'w'), \
235
"invalid lock mode %r" % self._lock_mode
236
self._lock_count += 1
238
self._lock = self._transport.lock_read(
239
self._rel_controlfilename('branch-lock'))
240
self._lock_mode = 'r'
244
if not self._lock_mode:
245
from bzrlib.errors import LockError
246
raise LockError('branch %r is not locked' % (self))
248
if self._lock_count > 1:
249
self._lock_count -= 1
253
self._lock_mode = self._lock_count = None
255
129
def abspath(self, name):
256
130
"""Return absolute filename for something in the branch"""
257
return self._transport.abspath(name)
131
return os.path.join(self.base, name)
259
134
def relpath(self, path):
260
135
"""Return path relative to this branch of something inside it.
262
137
Raises an error if path is not in this branch."""
263
return self._transport.relpath(path)
266
def _rel_controlfilename(self, file_or_path):
267
if isinstance(file_or_path, basestring):
268
file_or_path = [file_or_path]
269
return [bzrlib.BZRDIR] + file_or_path
138
rp = os.path.realpath(path)
140
if not rp.startswith(self.base):
141
bailout("path %r is not within branch %r" % (rp, self.base))
142
rp = rp[len(self.base):]
143
rp = rp.lstrip(os.sep)
271
147
def controlfilename(self, file_or_path):
272
148
"""Return location relative to branch."""
273
return self._transport.abspath(self._rel_controlfilename(file_or_path))
149
if isinstance(file_or_path, types.StringTypes):
150
file_or_path = [file_or_path]
151
return os.path.join(self.base, bzrlib.BZRDIR, *file_or_path)
276
154
def controlfile(self, file_or_path, mode='r'):
280
158
and binary. binary files are untranslated byte streams. Text
281
159
control files are stored with Unix newlines and in UTF-8, even
282
160
if the platform or locale defaults are different.
284
Controlfiles should almost never be opened in write mode but
285
rather should be atomically copied and replaced using atomicfile.
289
relpath = self._rel_controlfilename(file_or_path)
290
#TODO: codecs.open() buffers linewise, so it was overloaded with
291
# a much larger buffer, do we need to do the same for getreader/getwriter?
293
return self._transport.get(relpath)
295
raise BzrError("Branch.controlfile(mode='wb') is not supported, use put_controlfiles")
297
return codecs.getreader('utf-8')(self._transport.get(relpath), errors='replace')
299
raise BzrError("Branch.controlfile(mode='w') is not supported, use put_controlfiles")
163
fn = self.controlfilename(file_or_path)
165
if mode == 'rb' or mode == 'wb':
166
return file(fn, mode)
167
elif mode == 'r' or mode == 'w':
168
# open in binary mode anyhow so there's no newline translation;
169
# codecs uses line buffering by default; don't want that.
171
return codecs.open(fn, mode + 'b', 'utf-8',
301
174
raise BzrError("invalid controlfile mode %r" % mode)
303
def put_controlfile(self, path, f, encode=True):
304
"""Write an entry as a controlfile.
306
:param path: The path to put the file, relative to the .bzr control
308
:param f: A file-like or string object whose contents should be copied.
309
:param encode: If true, encode the contents as utf-8
311
self.put_controlfiles([(path, f)], encode=encode)
313
def put_controlfiles(self, files, encode=True):
314
"""Write several entries as controlfiles.
316
:param files: A list of [(path, file)] pairs, where the path is the directory
317
underneath the bzr control directory
318
:param encode: If true, encode the contents as utf-8
322
for path, f in files:
324
if isinstance(f, basestring):
325
f = f.encode('utf-8', 'replace')
327
f = codecs.getwriter('utf-8')(f, errors='replace')
328
path = self._rel_controlfilename(path)
329
ctrl_files.append((path, f))
330
self._transport.put_multi(ctrl_files)
332
178
def _make_control(self):
333
from bzrlib.inventory import Inventory
334
from cStringIO import StringIO
336
# Create an empty inventory
338
# if we want per-tree root ids then this is the place to set
339
# them; they're not needed for now and so ommitted for
341
bzrlib.xml.serializer_v4.write_inventory(Inventory(), sio)
343
dirs = [[], 'text-store', 'inventory-store', 'revision-store']
179
os.mkdir(self.controlfilename([]))
180
self.controlfile('README', 'w').write(
345
181
"This is a Bazaar-NG control directory.\n"
346
"Do not change any files in this directory.\n"),
347
('branch-format', BZR_BRANCH_FORMAT),
348
('revision-history', ''),
349
('merged-patches', ''),
350
('pending-merged-patches', ''),
353
('pending-merges', ''),
354
('inventory', sio.getvalue())
356
self._transport.mkdir_multi([self._rel_controlfilename(d) for d in dirs])
357
self.put_controlfiles(files)
358
mutter('created control directory in ' + self._transport.base)
182
"Do not change any files in this directory.")
183
self.controlfile('branch-format', 'w').write(BZR_BRANCH_FORMAT)
184
for d in ('text-store', 'inventory-store', 'revision-store'):
185
os.mkdir(self.controlfilename(d))
186
for f in ('revision-history', 'merged-patches',
187
'pending-merged-patches', 'branch-name'):
188
self.controlfile(f, 'w').write('')
189
mutter('created control directory in ' + self.base)
190
Inventory().write_xml(self.controlfile('inventory','w'))
360
193
def _check_format(self):
361
194
"""Check this branch format is supported.
369
202
# on Windows from Linux and so on. I think it might be better
370
203
# to always make all internal files in unix format.
371
204
fmt = self.controlfile('branch-format', 'r').read()
372
fmt = fmt.replace('\r\n', '\n')
205
fmt.replace('\r\n', '')
373
206
if fmt != BZR_BRANCH_FORMAT:
374
raise BzrError('sorry, branch format %r not supported' % fmt,
375
['use a different bzr version',
376
'or remove the .bzr directory and "bzr init" again'])
378
# We know that the format is the currently supported one.
379
# So create the rest of the entries.
380
from bzrlib.store.compressed_text import CompressedTextStore
382
if self._transport.should_cache():
384
self.cache_root = tempfile.mkdtemp(prefix='bzr-cache')
385
mutter('Branch %r using caching in %r' % (self, self.cache_root))
387
self.cache_root = None
390
relpath = self._rel_controlfilename(name)
391
store = CompressedTextStore(self._transport.clone(relpath))
392
if self._transport.should_cache():
393
from meta_store import CachedStore
394
cache_path = os.path.join(self.cache_root, name)
396
store = CachedStore(store, cache_path)
399
self.text_store = get_store('text-store')
400
self.revision_store = get_store('revision-store')
401
self.inventory_store = get_store('inventory-store')
403
def get_root_id(self):
404
"""Return the id of this branches root"""
405
inv = self.read_working_inventory()
406
return inv.root.file_id
408
def set_root_id(self, file_id):
409
inv = self.read_working_inventory()
410
orig_root_id = inv.root.file_id
411
del inv._byid[inv.root.file_id]
412
inv.root.file_id = file_id
413
inv._byid[inv.root.file_id] = inv.root
416
if entry.parent_id in (None, orig_root_id):
417
entry.parent_id = inv.root.file_id
418
self._write_inventory(inv)
207
bailout('sorry, branch format %r not supported' % fmt,
208
['use a different bzr version',
209
'or remove the .bzr directory and "bzr init" again'])
420
212
def read_working_inventory(self):
421
213
"""Read the working inventory."""
422
from bzrlib.inventory import Inventory
425
# ElementTree does its own conversion from UTF-8, so open in
427
f = self.controlfile('inventory', 'rb')
428
return bzrlib.xml.serializer_v4.read_inventory(f)
215
# ElementTree does its own conversion from UTF-8, so open in
217
inv = Inventory.read_xml(self.controlfile('inventory', 'rb'))
218
mutter("loaded inventory of %d items in %f"
219
% (len(inv), time.time() - before))
433
223
def _write_inventory(self, inv):
434
224
"""Update the working inventory.
436
226
That is to say, the inventory describing changes underway, that
437
227
will be committed to the next revision.
439
from cStringIO import StringIO
443
bzrlib.xml.serializer_v4.write_inventory(inv, sio)
445
# Transport handles atomicity
446
self.put_controlfile('inventory', sio)
229
## TODO: factor out to atomicfile? is rename safe on windows?
230
## TODO: Maybe some kind of clean/dirty marker on inventory?
231
tmpfname = self.controlfilename('inventory.tmp')
232
tmpf = file(tmpfname, 'wb')
235
inv_fname = self.controlfilename('inventory')
236
if sys.platform == 'win32':
238
os.rename(tmpfname, inv_fname)
450
239
mutter('wrote working inventory')
453
242
inventory = property(read_working_inventory, _write_inventory, None,
454
243
"""Inventory for the working copy.""")
457
def add(self, files, ids=None):
246
def add(self, files, verbose=False):
458
247
"""Make files versioned.
460
Note that the command line normally calls smart_add instead,
461
which can automatically recurse.
249
Note that the command line normally calls smart_add instead.
463
251
This puts the files in the Added state, so that they will be
464
252
recorded by the next commit.
467
List of paths to add, relative to the base of the tree.
470
If set, use these instead of automatically generated ids.
471
Must be the same length as the list of files, but may
472
contain None for ids that are to be autogenerated.
474
254
TODO: Perhaps have an option to add the ids even if the files do
477
TODO: Perhaps yield the ids and paths as they're added.
257
TODO: Perhaps return the ids of the files? But then again it
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
262
TODO: Adding a directory should optionally recurse down and
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
479
289
# TODO: Re-adding a file that is removed in the working copy
480
290
# should probably put it back with the previous ID.
481
if isinstance(files, basestring):
482
assert(ids is None or isinstance(ids, basestring))
291
if isinstance(files, types.StringTypes):
488
ids = [None] * len(files)
490
assert(len(ids) == len(files))
494
inv = self.read_working_inventory()
495
for f,file_id in zip(files, ids):
496
if is_control_file(f):
497
raise BzrError("cannot add control file %s" % quotefn(f))
502
raise BzrError("cannot add top-level %r" % f)
504
fullpath = os.path.normpath(self.abspath(f))
507
kind = file_kind(fullpath)
509
# maybe something better?
510
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
512
if kind != 'file' and kind != 'directory':
513
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
516
file_id = gen_file_id(f)
517
inv.add_path(f, kind=kind, file_id=file_id)
519
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
521
self._write_inventory(inv)
294
inv = self.read_working_inventory()
296
if is_control_file(f):
297
bailout("cannot add control file %s" % quotefn(f))
302
bailout("cannot add top-level %r" % f)
304
fullpath = os.path.normpath(self.abspath(f))
307
kind = file_kind(fullpath)
309
# maybe something better?
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
312
if kind != 'file' and kind != 'directory':
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
315
file_id = gen_file_id(f)
316
inv.add_path(f, kind=kind, file_id=file_id)
319
show_status('A', kind, quotefn(f))
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
323
self._write_inventory(inv)
526
326
def print_file(self, file, revno):
527
327
"""Print `file` to stdout."""
530
tree = self.revision_tree(self.lookup_revision(revno))
531
# use inventory as it was in that revision
532
file_id = tree.inventory.path2id(file)
534
raise BzrError("%r is not present in revision %s" % (file, revno))
535
tree.print_file(file_id)
328
tree = self.revision_tree(self.lookup_revision(revno))
329
# use inventory as it was in that revision
330
file_id = tree.inventory.path2id(file)
332
bailout("%r is not present in revision %d" % (file, revno))
333
tree.print_file(file_id)
540
336
def remove(self, files, verbose=False):
541
337
"""Mark nominated files for removal from the inventory.
613
415
return self.working_tree().unknowns()
616
def append_revision(self, *revision_ids):
617
for revision_id in revision_ids:
618
mutter("add {%s} to revision-history" % revision_id)
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
588
def append_revision(self, revision_id):
589
mutter("add {%s} to revision-history" % revision_id)
620
590
rev_history = self.revision_history()
621
rev_history.extend(revision_ids)
625
self.put_controlfile('revision-history', '\n'.join(rev_history))
630
def get_revision_xml_file(self, revision_id):
631
"""Return XML file object for revision object."""
632
if not revision_id or not isinstance(revision_id, basestring):
633
raise InvalidRevisionId(revision_id)
638
return self.revision_store[revision_id]
639
except (IndexError, KeyError):
640
raise bzrlib.errors.NoSuchRevision(self, revision_id)
646
get_revision_xml = get_revision_xml_file
592
tmprhname = self.controlfilename('revision-history.tmp')
593
rhname = self.controlfilename('revision-history')
595
f = file(tmprhname, 'wt')
596
rev_history.append(revision_id)
597
f.write('\n'.join(rev_history))
601
if sys.platform == 'win32':
603
os.rename(tmprhname, rhname)
649
607
def get_revision(self, revision_id):
650
608
"""Return the Revision object for a named revision"""
651
xml_file = self.get_revision_xml_file(revision_id)
654
r = bzrlib.xml.serializer_v4.read_revision(xml_file)
655
except SyntaxError, e:
656
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
609
r = Revision.read_xml(self.revision_store[revision_id])
660
610
assert r.revision_id == revision_id
664
def get_revision_delta(self, revno):
665
"""Return the delta for one revision.
667
The delta is relative to its mainline predecessor, or the
668
empty tree for revision 1.
670
assert isinstance(revno, int)
671
rh = self.revision_history()
672
if not (1 <= revno <= len(rh)):
673
raise InvalidRevisionNumber(revno)
675
# revno is 1-based; list is 0-based
677
new_tree = self.revision_tree(rh[revno-1])
679
old_tree = EmptyTree()
681
old_tree = self.revision_tree(rh[revno-2])
683
return compare_trees(old_tree, new_tree)
686
def get_revisions(self, revision_ids, pb=None):
687
"""Return the Revision object for a set of named revisions"""
688
from bzrlib.revision import Revision
689
from bzrlib.xml import unpack_xml
691
# TODO: We need to decide what to do here
692
# we cannot use a generator with a try/finally, because
693
# you cannot guarantee that the caller will iterate through
695
# in the past, get_inventory wasn't even wrapped in a
696
# try/finally locking block.
697
# We could either lock without the try/finally, or just
698
# not lock at all. We are reading entries that should
700
# I prefer locking with no finally, so that if someone
701
# asks for a list of revisions, but doesn't consume them,
702
# that is their problem, and they will suffer the consequences
704
for xml_file in self.revision_store.get(revision_ids, pb=pb):
706
r = bzrlib.xml.serializer_v4.read_revision(xml_file)
707
except SyntaxError, e:
708
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
714
def get_revision_sha1(self, revision_id):
715
"""Hash the stored value of a revision, and return it."""
716
# In the future, revision entries will be signed. At that
717
# point, it is probably best *not* to include the signature
718
# in the revision hash. Because that lets you re-sign
719
# the revision, (add signatures/remove signatures) and still
720
# have all hash pointers stay consistent.
721
# But for now, just hash the contents.
722
return bzrlib.osutils.sha_file(self.get_revision_xml(revision_id))
725
614
def get_inventory(self, inventory_id):
726
615
"""Get Inventory object by hash.
728
617
TODO: Perhaps for this and similar methods, take a revision
729
618
parameter which can be either an integer revno or a
732
f = self.get_inventory_xml_file(inventory_id)
733
return bzrlib.xml.serializer_v4.read_inventory(f)
736
def get_inventory_xml(self, inventory_id):
737
"""Get inventory XML as a file object."""
738
# Shouldn't this have a read-lock around it?
739
# As well as some sort of trap for missing ids?
740
return self.inventory_store[inventory_id]
742
get_inventory_xml_file = get_inventory_xml
744
def get_inventories(self, inventory_ids, pb=None, ignore_missing=False):
745
"""Get Inventory objects by id
747
from bzrlib.inventory import Inventory
749
# See the discussion in get_revisions for why
750
# we don't use a try/finally block here
752
for f in self.inventory_store.get(inventory_ids, pb=pb, ignore_missing=ignore_missing):
754
# TODO: Possibly put a try/except around this to handle
755
# read serialization errors
756
r = bzrlib.xml.serializer_v4.read_inventory(f)
761
raise bzrlib.errors.NoSuchRevision(self, revision_id)
764
def get_inventory_sha1(self, inventory_id):
765
"""Return the sha1 hash of the inventory entry
767
return sha_file(self.get_inventory_xml(inventory_id))
620
i = Inventory.read_xml(self.inventory_store[inventory_id])
770
624
def get_revision_inventory(self, revision_id):
771
625
"""Return inventory of a past revision."""
772
# bzr 0.0.6 imposes the constraint that the inventory_id
773
# must be the same as its revision, so this is trivial.
774
626
if revision_id == None:
775
from bzrlib.inventory import Inventory
776
return Inventory(self.get_root_id())
778
return self.get_inventory(revision_id)
629
return self.get_inventory(self.get_revision(revision_id).inventory_id)
781
632
def revision_history(self):
843
644
That is equivalent to the number of revisions committed to
647
>>> b = ScratchBranch()
650
>>> b.commit('no foo')
846
654
return len(self.revision_history())
849
657
def last_patch(self):
850
658
"""Return last patch hash, or None if no history.
660
>>> ScratchBranch().last_patch() == None
852
663
ph = self.revision_history()
859
def missing_revisions(self, other, stop_revision=None, diverged_ok=False):
861
If self and other have not diverged, return a list of the revisions
862
present in other, but missing from self.
864
>>> from bzrlib.commit import commit
865
>>> bzrlib.trace.silent = True
866
>>> br1 = ScratchBranch()
867
>>> br2 = ScratchBranch()
868
>>> br1.missing_revisions(br2)
870
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
871
>>> br1.missing_revisions(br2)
873
>>> br2.missing_revisions(br1)
875
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
876
>>> br1.missing_revisions(br2)
878
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
879
>>> br1.missing_revisions(br2)
881
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
882
>>> br1.missing_revisions(br2)
883
Traceback (most recent call last):
884
DivergedBranches: These branches have diverged.
886
self_history = self.revision_history()
887
self_len = len(self_history)
888
other_history = other.revision_history()
889
other_len = len(other_history)
890
common_index = min(self_len, other_len) -1
891
if common_index >= 0 and \
892
self_history[common_index] != other_history[common_index]:
893
raise DivergedBranches(self, other)
895
if stop_revision is None:
896
stop_revision = other_len
897
elif stop_revision > other_len:
898
raise bzrlib.errors.NoSuchRevision(self, stop_revision)
900
return other_history[self_len:stop_revision]
903
def update_revisions(self, other, stop_revision=None):
904
"""Pull in all new revisions from other branch.
906
from bzrlib.fetch import greedy_fetch
907
from bzrlib.revision import get_intervening_revisions
909
pb = bzrlib.ui.ui_factory.progress_bar()
910
pb.update('comparing histories')
911
if stop_revision is None:
912
other_revision = other.last_patch()
914
other_revision = other.lookup_revision(stop_revision)
915
count = greedy_fetch(self, other, other_revision, pb)[0]
917
revision_ids = self.missing_revisions(other, stop_revision)
918
except DivergedBranches, e:
920
revision_ids = get_intervening_revisions(self.last_patch(),
921
other_revision, self)
922
assert self.last_patch() not in revision_ids
923
except bzrlib.errors.NotAncestor:
926
self.append_revision(*revision_ids)
929
def install_revisions(self, other, revision_ids, pb):
930
# We are going to iterate this many times, so make sure
931
# that it is a list, and not a generator
932
revision_ids = list(revision_ids)
933
if hasattr(other.revision_store, "prefetch"):
934
other.revision_store.prefetch(revision_ids)
935
if hasattr(other.inventory_store, "prefetch"):
936
other.inventory_store.prefetch(inventory_ids)
939
pb = bzrlib.ui.ui_factory.progress_bar()
941
# This entire next section is generally done
942
# with either generators, or bulk updates
943
inventories = other.get_inventories(revision_ids, ignore_missing=True)
947
good_revisions = set()
948
for i, (inv, rev_id) in enumerate(zip(inventories, revision_ids)):
949
pb.update('fetching revision', i+1, len(revision_ids))
951
# We don't really need to get the revision here, because
952
# the only thing we needed was the inventory_id, which now
953
# is (by design) identical to the revision_id
955
# rev = other.get_revision(rev_id)
956
# except bzrlib.errors.NoSuchRevision:
957
# failures.add(rev_id)
964
good_revisions.add(rev_id)
967
for key, entry in inv.iter_entries():
968
if entry.text_id is None:
970
text_ids.append(entry.text_id)
972
has_ids = self.text_store.has(text_ids)
973
for has, text_id in zip(has_ids, text_ids):
975
needed_texts.add(text_id)
979
count, cp_fail = self.text_store.copy_multi(other.text_store,
981
#print "Added %d texts." % count
982
count, cp_fail = self.inventory_store.copy_multi(other.inventory_store,
984
#print "Added %d inventories." % count
985
count, cp_fail = self.revision_store.copy_multi(other.revision_store,
988
assert len(cp_fail) == 0
989
return count, failures
992
def commit(self, *args, **kw):
993
from bzrlib.commit import commit
994
commit(self, *args, **kw)
997
def lookup_revision(self, revision):
998
"""Return the revision identifier for a given revision information."""
999
revno, info = self._get_revision_info(revision)
1003
def revision_id_to_revno(self, revision_id):
1004
"""Given a revision id, return its revno"""
1005
history = self.revision_history()
1007
return history.index(revision_id) + 1
1009
raise bzrlib.errors.NoSuchRevision(self, revision_id)
1012
def get_revision_info(self, revision):
1013
"""Return (revno, revision id) for revision identifier.
1015
revision can be an integer, in which case it is assumed to be revno (though
1016
this will translate negative values into positive ones)
1017
revision can also be a string, in which case it is parsed for something like
1018
'date:' or 'revid:' etc.
1020
revno, rev_id = self._get_revision_info(revision)
1022
raise bzrlib.errors.NoSuchRevision(self, revision)
1023
return revno, rev_id
1025
def get_rev_id(self, revno, history=None):
1026
"""Find the revision id of the specified revno."""
670
def lookup_revision(self, revno):
671
"""Return revision hash for revision number."""
1030
history = self.revision_history()
1031
elif revno <= 0 or revno > len(history):
1032
raise bzrlib.errors.NoSuchRevision(self, revno)
1033
return history[revno - 1]
1035
def _get_revision_info(self, revision):
1036
"""Return (revno, revision id) for revision specifier.
1038
revision can be an integer, in which case it is assumed to be revno
1039
(though this will translate negative values into positive ones)
1040
revision can also be a string, in which case it is parsed for something
1041
like 'date:' or 'revid:' etc.
1043
A revid is always returned. If it is None, the specifier referred to
1044
the null revision. If the revid does not occur in the revision
1045
history, revno will be None.
1048
if revision is None:
1051
try:# Convert to int if possible
1052
revision = int(revision)
1055
revs = self.revision_history()
1056
if isinstance(revision, int):
1058
revno = len(revs) + revision + 1
1061
rev_id = self.get_rev_id(revno, revs)
1062
elif isinstance(revision, basestring):
1063
for prefix, func in Branch.REVISION_NAMESPACES.iteritems():
1064
if revision.startswith(prefix):
1065
result = func(self, revs, revision)
1067
revno, rev_id = result
1070
rev_id = self.get_rev_id(revno, revs)
1073
raise BzrError('No namespace registered for string: %r' %
1076
raise TypeError('Unhandled revision type %s' % revision)
1080
raise bzrlib.errors.NoSuchRevision(self, revision)
1081
return revno, rev_id
1083
def _namespace_revno(self, revs, revision):
1084
"""Lookup a revision by revision number"""
1085
assert revision.startswith('revno:')
1087
return (int(revision[6:]),)
1090
REVISION_NAMESPACES['revno:'] = _namespace_revno
1092
def _namespace_revid(self, revs, revision):
1093
assert revision.startswith('revid:')
1094
rev_id = revision[len('revid:'):]
1096
return revs.index(rev_id) + 1, rev_id
1099
REVISION_NAMESPACES['revid:'] = _namespace_revid
1101
def _namespace_last(self, revs, revision):
1102
assert revision.startswith('last:')
1104
offset = int(revision[5:])
1109
raise BzrError('You must supply a positive value for --revision last:XXX')
1110
return (len(revs) - offset + 1,)
1111
REVISION_NAMESPACES['last:'] = _namespace_last
1113
def _namespace_tag(self, revs, revision):
1114
assert revision.startswith('tag:')
1115
raise BzrError('tag: namespace registered, but not implemented.')
1116
REVISION_NAMESPACES['tag:'] = _namespace_tag
1118
def _namespace_date(self, revs, revision):
1119
assert revision.startswith('date:')
1121
# Spec for date revisions:
1123
# value can be 'yesterday', 'today', 'tomorrow' or a YYYY-MM-DD string.
1124
# it can also start with a '+/-/='. '+' says match the first
1125
# entry after the given date. '-' is match the first entry before the date
1126
# '=' is match the first entry after, but still on the given date.
1128
# +2005-05-12 says find the first matching entry after May 12th, 2005 at 0:00
1129
# -2005-05-12 says find the first matching entry before May 12th, 2005 at 0:00
1130
# =2005-05-12 says find the first match after May 12th, 2005 at 0:00 but before
1131
# May 13th, 2005 at 0:00
1133
# So the proper way of saying 'give me all entries for today' is:
1134
# -r {date:+today}:{date:-tomorrow}
1135
# The default is '=' when not supplied
1138
if val[:1] in ('+', '-', '='):
1139
match_style = val[:1]
1142
today = datetime.datetime.today().replace(hour=0,minute=0,second=0,microsecond=0)
1143
if val.lower() == 'yesterday':
1144
dt = today - datetime.timedelta(days=1)
1145
elif val.lower() == 'today':
1147
elif val.lower() == 'tomorrow':
1148
dt = today + datetime.timedelta(days=1)
1151
# This should be done outside the function to avoid recompiling it.
1152
_date_re = re.compile(
1153
r'(?P<date>(?P<year>\d\d\d\d)-(?P<month>\d\d)-(?P<day>\d\d))?'
1155
r'(?P<time>(?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d))?)?'
1157
m = _date_re.match(val)
1158
if not m or (not m.group('date') and not m.group('time')):
1159
raise BzrError('Invalid revision date %r' % revision)
1162
year, month, day = int(m.group('year')), int(m.group('month')), int(m.group('day'))
1164
year, month, day = today.year, today.month, today.day
1166
hour = int(m.group('hour'))
1167
minute = int(m.group('minute'))
1168
if m.group('second'):
1169
second = int(m.group('second'))
1173
hour, minute, second = 0,0,0
1175
dt = datetime.datetime(year=year, month=month, day=day,
1176
hour=hour, minute=minute, second=second)
1180
if match_style == '-':
1182
elif match_style == '=':
1183
last = dt + datetime.timedelta(days=1)
1186
for i in range(len(revs)-1, -1, -1):
1187
r = self.get_revision(revs[i])
1188
# TODO: Handle timezone.
1189
dt = datetime.datetime.fromtimestamp(r.timestamp)
1190
if first >= dt and (last is None or dt >= last):
1193
for i in range(len(revs)):
1194
r = self.get_revision(revs[i])
1195
# TODO: Handle timezone.
1196
dt = datetime.datetime.fromtimestamp(r.timestamp)
1197
if first <= dt and (last is None or dt <= last):
1199
REVISION_NAMESPACES['date:'] = _namespace_date
1202
def _namespace_ancestor(self, revs, revision):
1203
from revision import common_ancestor, MultipleRevisionSources
1204
other_branch = find_branch(_trim_namespace('ancestor', revision))
1205
revision_a = self.last_patch()
1206
revision_b = other_branch.last_patch()
1207
for r, b in ((revision_a, self), (revision_b, other_branch)):
1209
raise bzrlib.errors.NoCommits(b)
1210
revision_source = MultipleRevisionSources(self, other_branch)
1211
result = common_ancestor(revision_a, revision_b, revision_source)
1213
revno = self.revision_id_to_revno(result)
1214
except bzrlib.errors.NoSuchRevision:
1219
REVISION_NAMESPACES['ancestor:'] = _namespace_ancestor
676
# list is 0-based; revisions are 1-based
677
return self.revision_history()[revno-1]
679
raise BzrError("no such revision %s" % revno)
1221
682
def revision_tree(self, revision_id):
1222
683
"""Return Tree for a revision on this branch.
1224
685
`revision_id` may be None for the null revision, in which case
1225
686
an `EmptyTree` is returned."""
1226
# TODO: refactor this to use an existing revision object
1227
# so we don't need to read it in twice.
1228
688
if revision_id == None:
1229
689
return EmptyTree()
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
1255
770
def rename_one(self, from_rel, to_rel):
1258
This can change the directory or the filename or both.
771
tree = self.working_tree()
773
if not tree.has_filename(from_rel):
774
bailout("can't rename: old working file %r does not exist" % from_rel)
775
if tree.has_filename(to_rel):
776
bailout("can't rename: new working file %r already exists" % to_rel)
778
file_id = inv.path2id(from_rel)
780
bailout("can't rename: old name %r is not versioned" % from_rel)
782
if inv.path2id(to_rel):
783
bailout("can't rename: new name %r is already versioned" % to_rel)
785
to_dir, to_tail = os.path.split(to_rel)
786
to_dir_id = inv.path2id(to_dir)
787
if to_dir_id == None and to_dir != '':
788
bailout("can't determine destination directory id for %r" % to_dir)
790
mutter("rename_one:")
791
mutter(" file_id {%s}" % file_id)
792
mutter(" from_rel %r" % from_rel)
793
mutter(" to_rel %r" % to_rel)
794
mutter(" to_dir %r" % to_dir)
795
mutter(" to_dir_id {%s}" % to_dir_id)
797
inv.rename(file_id, to_dir_id, to_tail)
799
print "%s => %s" % (from_rel, to_rel)
801
from_abs = self.abspath(from_rel)
802
to_abs = self.abspath(to_rel)
1262
tree = self.working_tree()
1263
inv = tree.inventory
1264
if not tree.has_filename(from_rel):
1265
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1266
if tree.has_filename(to_rel):
1267
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1269
file_id = inv.path2id(from_rel)
1271
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1273
if inv.path2id(to_rel):
1274
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1276
to_dir, to_tail = os.path.split(to_rel)
1277
to_dir_id = inv.path2id(to_dir)
1278
if to_dir_id == None and to_dir != '':
1279
raise BzrError("can't determine destination directory id for %r" % to_dir)
1281
mutter("rename_one:")
1282
mutter(" file_id {%s}" % file_id)
1283
mutter(" from_rel %r" % from_rel)
1284
mutter(" to_rel %r" % to_rel)
1285
mutter(" to_dir %r" % to_dir)
1286
mutter(" to_dir_id {%s}" % to_dir_id)
1288
inv.rename(file_id, to_dir_id, to_tail)
1290
from_abs = self.abspath(from_rel)
1291
to_abs = self.abspath(to_rel)
1293
os.rename(from_abs, to_abs)
1295
raise BzrError("failed to rename %r to %r: %s"
1296
% (from_abs, to_abs, e[1]),
1297
["rename rolled back"])
1299
self._write_inventory(inv)
804
os.rename(from_abs, to_abs)
806
bailout("failed to rename %r to %r: %s"
807
% (from_abs, to_abs, e[1]),
808
["rename rolled back"])
810
self._write_inventory(inv)
1304
814
def move(self, from_paths, to_name):
1312
822
Note that to_name is only the last component of the new name;
1313
823
this doesn't change the directory.
1315
This returns a list of (from_path, to_path) pairs for each
1316
entry that is moved.
1321
## TODO: Option to move IDs only
1322
assert not isinstance(from_paths, basestring)
1323
tree = self.working_tree()
1324
inv = tree.inventory
1325
to_abs = self.abspath(to_name)
1326
if not isdir(to_abs):
1327
raise BzrError("destination %r is not a directory" % to_abs)
1328
if not tree.has_filename(to_name):
1329
raise BzrError("destination %r not in working directory" % to_abs)
1330
to_dir_id = inv.path2id(to_name)
1331
if to_dir_id == None and to_name != '':
1332
raise BzrError("destination %r is not a versioned directory" % to_name)
1333
to_dir_ie = inv[to_dir_id]
1334
if to_dir_ie.kind not in ('directory', 'root_directory'):
1335
raise BzrError("destination %r is not a directory" % to_abs)
1337
to_idpath = inv.get_idpath(to_dir_id)
1339
for f in from_paths:
1340
if not tree.has_filename(f):
1341
raise BzrError("%r does not exist in working tree" % f)
1342
f_id = inv.path2id(f)
1344
raise BzrError("%r is not versioned" % f)
1345
name_tail = splitpath(f)[-1]
1346
dest_path = appendpath(to_name, name_tail)
1347
if tree.has_filename(dest_path):
1348
raise BzrError("destination %r already exists" % dest_path)
1349
if f_id in to_idpath:
1350
raise BzrError("can't move %r to a subdirectory of itself" % f)
1352
# OK, so there's a race here, it's possible that someone will
1353
# create a file in this interval and then the rename might be
1354
# left half-done. But we should have caught most problems.
1356
for f in from_paths:
1357
name_tail = splitpath(f)[-1]
1358
dest_path = appendpath(to_name, name_tail)
1359
result.append((f, dest_path))
1360
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1362
os.rename(self.abspath(f), self.abspath(dest_path))
1364
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1365
["rename rolled back"])
1367
self._write_inventory(inv)
1374
def revert(self, filenames, old_tree=None, backups=True):
1375
"""Restore selected files to the versions from a previous tree.
1378
If true (default) backups are made of files before
1381
from bzrlib.errors import NotVersionedError, BzrError
1382
from bzrlib.atomicfile import AtomicFile
1383
from bzrlib.osutils import backup_file
1385
inv = self.read_working_inventory()
1386
if old_tree is None:
1387
old_tree = self.basis_tree()
1388
old_inv = old_tree.inventory
1391
for fn in filenames:
1392
file_id = inv.path2id(fn)
1394
raise NotVersionedError("not a versioned file", fn)
1395
if not old_inv.has_id(file_id):
1396
raise BzrError("file not present in old tree", fn, file_id)
1397
nids.append((fn, file_id))
1399
# TODO: Rename back if it was previously at a different location
1401
# TODO: If given a directory, restore the entire contents from
1402
# the previous version.
1404
# TODO: Make a backup to a temporary file.
1406
# TODO: If the file previously didn't exist, delete it?
1407
for fn, file_id in nids:
1410
f = AtomicFile(fn, 'wb')
1412
f.write(old_tree.get_file(file_id).read())
1418
def pending_merges(self):
1419
"""Return a list of pending merges.
1421
These are revisions that have been merged into the working
1422
directory but not yet committed.
1424
cfn = self._rel_controlfilename('pending-merges')
1425
if not self._transport.has(cfn):
1428
for l in self.controlfile('pending-merges', 'r').readlines():
1429
p.append(l.rstrip('\n'))
1433
def add_pending_merge(self, *revision_ids):
1434
from bzrlib.revision import validate_revision_id
1436
for rev_id in revision_ids:
1437
validate_revision_id(rev_id)
1439
p = self.pending_merges()
1441
for rev_id in revision_ids:
1447
self.set_pending_merges(p)
1449
def set_pending_merges(self, rev_list):
1452
self.put_controlfile('pending-merges', '\n'.join(rev_list))
1457
def get_parent(self):
1458
"""Return the parent location of the branch.
1460
This is the default location for push/pull/missing. The usual
1461
pattern is that the user can override it by specifying a
1465
_locs = ['parent', 'pull', 'x-pull']
1468
return self.controlfile(l, 'r').read().strip('\n')
1470
if e.errno != errno.ENOENT:
1475
def set_parent(self, url):
1476
# TODO: Maybe delete old location files?
1477
from bzrlib.atomicfile import AtomicFile
1480
f = AtomicFile(self.controlfilename('parent'))
1489
def check_revno(self, revno):
1491
Check whether a revno corresponds to any revision.
1492
Zero (the NULL revision) is considered valid.
1495
self.check_real_revno(revno)
1497
def check_real_revno(self, revno):
1499
Check whether a revno corresponds to a real revision.
1500
Zero (the NULL revision) is considered invalid
1502
if revno < 1 or revno > self.revno():
1503
raise InvalidRevisionNumber(revno)
825
## TODO: Option to move IDs only
826
assert not isinstance(from_paths, basestring)
827
tree = self.working_tree()
829
to_abs = self.abspath(to_name)
830
if not isdir(to_abs):
831
bailout("destination %r is not a directory" % to_abs)
832
if not tree.has_filename(to_name):
833
bailout("destination %r not in working directory" % to_abs)
834
to_dir_id = inv.path2id(to_name)
835
if to_dir_id == None and to_name != '':
836
bailout("destination %r is not a versioned directory" % to_name)
837
to_dir_ie = inv[to_dir_id]
838
if to_dir_ie.kind not in ('directory', 'root_directory'):
839
bailout("destination %r is not a directory" % to_abs)
841
to_idpath = Set(inv.get_idpath(to_dir_id))
844
if not tree.has_filename(f):
845
bailout("%r does not exist in working tree" % f)
846
f_id = inv.path2id(f)
848
bailout("%r is not versioned" % f)
849
name_tail = splitpath(f)[-1]
850
dest_path = appendpath(to_name, name_tail)
851
if tree.has_filename(dest_path):
852
bailout("destination %r already exists" % dest_path)
853
if f_id in to_idpath:
854
bailout("can't move %r to a subdirectory of itself" % f)
856
# OK, so there's a race here, it's possible that someone will
857
# create a file in this interval and then the rename might be
858
# left half-done. But we should have caught most problems.
861
name_tail = splitpath(f)[-1]
862
dest_path = appendpath(to_name, name_tail)
863
print "%s => %s" % (f, dest_path)
864
inv.rename(inv.path2id(f), to_dir_id, name_tail)
866
os.rename(self.abspath(f), self.abspath(dest_path))
868
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
869
["rename rolled back"])
871
self._write_inventory(inv)
875
def show_status(self, show_all=False):
876
"""Display single-line status for non-ignored working files.
878
The list is show sorted in order by file name.
880
>>> b = ScratchBranch(files=['foo', 'foo~'])
886
>>> b.commit("add foo")
888
>>> os.unlink(b.abspath('foo'))
892
TODO: Get state for single files.
895
# We have to build everything into a list first so that it can
896
# sorted by name, incorporating all the different sources.
898
# FIXME: Rather than getting things in random order and then sorting,
899
# just step through in order.
901
# Interesting case: the old ID for a file has been removed,
902
# but a new file has been created under that name.
904
old = self.basis_tree()
905
new = self.working_tree()
907
for fs, fid, oldname, newname, kind in diff_trees(old, new):
909
show_status(fs, kind,
910
oldname + ' => ' + newname)
911
elif fs == 'A' or fs == 'M':
912
show_status(fs, kind, newname)
914
show_status(fs, kind, oldname)
917
show_status(fs, kind, newname)
920
show_status(fs, kind, newname)
922
show_status(fs, kind, newname)
924
bailout("weird file state %r" % ((fs, fid),))
1508
928
class ScratchBranch(Branch):