362
343
' --revision or a revision_id')
363
344
b = WorkingTree.open_containing(u'.')[0].branch
365
revisions = b.repository.revisions
366
if revisions is None:
367
raise errors.BzrCommandError('Repository %r does not support '
368
'access to raw revision texts')
346
# TODO: jam 20060112 should cat-revision always output utf-8?
347
if revision_id is not None:
348
revision_id = osutils.safe_revision_id(revision_id, warn=False)
350
self.outf.write(b.repository.get_revision_xml(revision_id).decode('utf-8'))
351
except errors.NoSuchRevision:
352
msg = "The repository %s contains no revision %s." % (b.repository.base,
354
raise errors.BzrCommandError(msg)
355
elif revision is not None:
358
raise errors.BzrCommandError('You cannot specify a NULL'
360
rev_id = rev.as_revision_id(b)
361
self.outf.write(b.repository.get_revision_xml(rev_id).decode('utf-8'))
370
b.repository.lock_read()
372
# TODO: jam 20060112 should cat-revision always output utf-8?
373
if revision_id is not None:
374
revision_id = osutils.safe_revision_id(revision_id, warn=False)
376
self.print_revision(revisions, revision_id)
377
except errors.NoSuchRevision:
378
msg = "The repository %s contains no revision %s." % (
379
b.repository.base, revision_id)
380
raise errors.BzrCommandError(msg)
381
elif revision is not None:
384
raise errors.BzrCommandError(
385
'You cannot specify a NULL revision.')
386
rev_id = rev.as_revision_id(b)
387
self.print_revision(revisions, rev_id)
389
b.repository.unlock()
392
364
class cmd_dump_btree(Command):
393
__doc__ = """Dump the contents of a btree index file to stdout.
365
"""Dump the contents of a btree index file to stdout.
395
367
PATH is a btree index file, it can be any URL. This includes things like
396
368
.bzr/repository/pack-names, or .bzr/repository/indices/a34b3a...ca4a4.iix
479
447
To re-create the working tree, use "bzr checkout".
481
449
_see_also = ['checkout', 'working-trees']
482
takes_args = ['location*']
450
takes_args = ['location?']
483
451
takes_options = [
485
453
help='Remove the working tree even if it has '
486
454
'uncommitted changes.'),
489
def run(self, location_list, force=False):
490
if not location_list:
493
for location in location_list:
494
d = bzrdir.BzrDir.open(location)
497
working = d.open_workingtree()
498
except errors.NoWorkingTree:
499
raise errors.BzrCommandError("No working tree to remove")
500
except errors.NotLocalUrl:
501
raise errors.BzrCommandError("You cannot remove the working tree"
504
if (working.has_changes()):
505
raise errors.UncommittedChanges(working)
507
if working.user_url != working.branch.user_url:
508
raise errors.BzrCommandError("You cannot remove the working tree"
509
" from a lightweight checkout")
511
d.destroy_workingtree()
457
def run(self, location='.', force=False):
458
d = bzrdir.BzrDir.open(location)
461
working = d.open_workingtree()
462
except errors.NoWorkingTree:
463
raise errors.BzrCommandError("No working tree to remove")
464
except errors.NotLocalUrl:
465
raise errors.BzrCommandError("You cannot remove the working tree"
468
if (working.has_changes()):
469
raise errors.UncommittedChanges(working)
471
working_path = working.bzrdir.root_transport.base
472
branch_path = working.branch.bzrdir.root_transport.base
473
if working_path != branch_path:
474
raise errors.BzrCommandError("You cannot remove the working tree"
475
" from a lightweight checkout")
477
d.destroy_workingtree()
514
480
class cmd_revno(Command):
515
__doc__ = """Show current revision number.
481
"""Show current revision number.
517
483
This is equal to the number of revisions on this branch.
569
541
wt = WorkingTree.open_containing(directory)[0]
571
self.add_cleanup(wt.lock_read().unlock)
572
544
except (errors.NoWorkingTree, errors.NotLocalUrl):
574
546
b = Branch.open_containing(directory)[0]
575
self.add_cleanup(b.lock_read().unlock)
577
if revision is not None:
578
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
579
if revision_info_list is not None:
580
for rev_str in revision_info_list:
581
rev_spec = RevisionSpec.from_string(rev_str)
582
revision_ids.append(rev_spec.as_revision_id(b))
583
# No arguments supplied, default to the last revision
584
if len(revision_ids) == 0:
587
raise errors.NoWorkingTree(directory)
588
revision_ids.append(wt.last_revision())
550
if revision is not None:
551
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
552
if revision_info_list is not None:
553
for rev_str in revision_info_list:
554
rev_spec = RevisionSpec.from_string(rev_str)
555
revision_ids.append(rev_spec.as_revision_id(b))
556
# No arguments supplied, default to the last revision
557
if len(revision_ids) == 0:
560
raise errors.NoWorkingTree(directory)
561
revision_ids.append(wt.last_revision())
563
revision_ids.append(b.last_revision())
567
for revision_id in revision_ids:
569
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
570
revno = '.'.join(str(i) for i in dotted_revno)
571
except errors.NoSuchRevision:
573
maxlen = max(maxlen, len(revno))
574
revinfos.append([revno, revision_id])
590
revision_ids.append(b.last_revision())
594
for revision_id in revision_ids:
596
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
597
revno = '.'.join(str(i) for i in dotted_revno)
598
except errors.NoSuchRevision:
600
maxlen = max(maxlen, len(revno))
601
revinfos.append([revno, revision_id])
604
581
for ri in revinfos:
605
582
self.outf.write('%*s %s\n' % (maxlen, ri[0], ri[1]))
608
585
class cmd_add(Command):
609
__doc__ = """Add specified files or directories.
586
"""Add specified files or directories.
611
588
In non-recursive mode, all the named items are added, regardless
612
589
of whether they were previously ignored. A warning is given if
756
731
revision = _get_one_revision('inventory', revision)
757
732
work_tree, file_list = tree_files(file_list)
758
self.add_cleanup(work_tree.lock_read().unlock)
759
if revision is not None:
760
tree = revision.as_tree(work_tree.branch)
762
extra_trees = [work_tree]
763
self.add_cleanup(tree.lock_read().unlock)
768
if file_list is not None:
769
file_ids = tree.paths2ids(file_list, trees=extra_trees,
770
require_versioned=True)
771
# find_ids_across_trees may include some paths that don't
773
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
774
for file_id in file_ids if file_id in tree)
776
entries = tree.inventory.entries()
733
work_tree.lock_read()
735
if revision is not None:
736
tree = revision.as_tree(work_tree.branch)
738
extra_trees = [work_tree]
744
if file_list is not None:
745
file_ids = tree.paths2ids(file_list, trees=extra_trees,
746
require_versioned=True)
747
# find_ids_across_trees may include some paths that don't
749
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
750
for file_id in file_ids if file_id in tree)
752
entries = tree.inventory.entries()
755
if tree is not work_tree:
779
758
for path, entry in entries:
780
759
if kind and kind != entry.kind:
1019
1000
branch_from = Branch.open(location,
1020
1001
possible_transports=possible_transports)
1021
self.add_cleanup(branch_from.lock_read().unlock)
1023
1003
if branch_to.get_parent() is None or remember:
1024
1004
branch_to.set_parent(branch_from.base)
1026
if revision is not None:
1027
revision_id = revision.as_revision_id(branch_from)
1029
if tree_to is not None:
1030
view_info = _get_view_info_for_change_reporter(tree_to)
1031
change_reporter = delta._ChangeReporter(
1032
unversioned_filter=tree_to.is_ignored,
1033
view_info=view_info)
1034
result = tree_to.pull(
1035
branch_from, overwrite, revision_id, change_reporter,
1036
possible_transports=possible_transports, local=local)
1038
result = branch_to.pull(
1039
branch_from, overwrite, revision_id, local=local)
1041
result.report(self.outf)
1042
if verbose and result.old_revid != result.new_revid:
1043
log.show_branch_change(
1044
branch_to, self.outf, result.old_revno,
1006
if branch_from is not branch_to:
1007
branch_from.lock_read()
1009
if revision is not None:
1010
revision_id = revision.as_revision_id(branch_from)
1012
branch_to.lock_write()
1014
if tree_to is not None:
1015
view_info = _get_view_info_for_change_reporter(tree_to)
1016
change_reporter = delta._ChangeReporter(
1017
unversioned_filter=tree_to.is_ignored,
1018
view_info=view_info)
1019
result = tree_to.pull(
1020
branch_from, overwrite, revision_id, change_reporter,
1021
possible_transports=possible_transports, local=local)
1023
result = branch_to.pull(
1024
branch_from, overwrite, revision_id, local=local)
1026
result.report(self.outf)
1027
if verbose and result.old_revid != result.new_revid:
1028
log.show_branch_change(
1029
branch_to, self.outf, result.old_revno,
1034
if branch_from is not branch_to:
1035
branch_from.unlock()
1048
1038
class cmd_push(Command):
1049
__doc__ = """Update a mirror of this branch.
1039
"""Update a mirror of this branch.
1051
1041
The target branch will not have its working tree populated because this
1052
1042
is both expensive, and is not supported on remote file systems.
1113
1103
# Get the source branch
1114
1104
(tree, br_from,
1115
1105
_unused) = bzrdir.BzrDir.open_containing_tree_or_branch(directory)
1107
strict = br_from.get_config().get_user_option_as_bool('push_strict')
1108
if strict is None: strict = True # default value
1116
1109
# Get the tip's revision_id
1117
1110
revision = _get_one_revision('push', revision)
1118
1111
if revision is not None:
1119
1112
revision_id = revision.in_history(br_from).rev_id
1121
1114
revision_id = None
1122
if tree is not None and revision_id is None:
1123
tree.check_changed_or_out_of_date(
1124
strict, 'push_strict',
1125
more_error='Use --no-strict to force the push.',
1126
more_warning='Uncommitted changes will not be pushed.')
1115
if strict and tree is not None and revision_id is None:
1116
if (tree.has_changes()):
1117
raise errors.UncommittedChanges(
1118
tree, more='Use --no-strict to force the push.')
1119
if tree.last_revision() != tree.branch.last_revision():
1120
# The tree has lost sync with its branch, there is little
1121
# chance that the user is aware of it but he can still force
1122
# the push with --no-strict
1123
raise errors.OutOfDateTree(
1124
tree, more='Use --no-strict to force the push.')
1127
1126
# Get the stacked_on branch, if any
1128
1127
if stacked_on is not None:
1129
1128
stacked_on = urlutils.normalize_url(stacked_on)
1194
1193
' directory exists, but does not already'
1195
1194
' have a control directory. This flag will'
1196
1195
' allow branch to proceed.'),
1198
help="Bind new branch to from location."),
1200
1197
aliases = ['get', 'clone']
1202
1199
def run(self, from_location, to_location=None, revision=None,
1203
1200
hardlink=False, stacked=False, standalone=False, no_tree=False,
1204
use_existing_dir=False, switch=False, bind=False):
1201
use_existing_dir=False, switch=False):
1205
1202
from bzrlib import switch as _mod_switch
1206
1203
from bzrlib.tag import _merge_tags_if_possible
1207
1204
accelerator_tree, br_from = bzrdir.BzrDir.open_tree_or_branch(
1206
if (accelerator_tree is not None and
1207
accelerator_tree.supports_content_filtering()):
1208
accelerator_tree = None
1209
1209
revision = _get_one_revision('branch', revision)
1210
self.add_cleanup(br_from.lock_read().unlock)
1211
if revision is not None:
1212
revision_id = revision.as_revision_id(br_from)
1214
# FIXME - wt.last_revision, fallback to branch, fall back to
1215
# None or perhaps NULL_REVISION to mean copy nothing
1217
revision_id = br_from.last_revision()
1218
if to_location is None:
1219
to_location = urlutils.derive_to_location(from_location)
1220
to_transport = transport.get_transport(to_location)
1222
to_transport.mkdir('.')
1223
except errors.FileExists:
1224
if not use_existing_dir:
1225
raise errors.BzrCommandError('Target directory "%s" '
1226
'already exists.' % to_location)
1212
if revision is not None:
1213
revision_id = revision.as_revision_id(br_from)
1229
bzrdir.BzrDir.open_from_transport(to_transport)
1230
except errors.NotBranchError:
1215
# FIXME - wt.last_revision, fallback to branch, fall back to
1216
# None or perhaps NULL_REVISION to mean copy nothing
1218
revision_id = br_from.last_revision()
1219
if to_location is None:
1220
to_location = urlutils.derive_to_location(from_location)
1221
to_transport = transport.get_transport(to_location)
1223
to_transport.mkdir('.')
1224
except errors.FileExists:
1225
if not use_existing_dir:
1226
raise errors.BzrCommandError('Target directory "%s" '
1227
'already exists.' % to_location)
1233
raise errors.AlreadyBranchError(to_location)
1234
except errors.NoSuchFile:
1235
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1238
# preserve whatever source format we have.
1239
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1240
possible_transports=[to_transport],
1241
accelerator_tree=accelerator_tree,
1242
hardlink=hardlink, stacked=stacked,
1243
force_new_repo=standalone,
1244
create_tree_if_local=not no_tree,
1245
source_branch=br_from)
1246
branch = dir.open_branch()
1247
except errors.NoSuchRevision:
1248
to_transport.delete_tree('.')
1249
msg = "The branch %s has no revision %s." % (from_location,
1251
raise errors.BzrCommandError(msg)
1252
_merge_tags_if_possible(br_from, branch)
1253
# If the source branch is stacked, the new branch may
1254
# be stacked whether we asked for that explicitly or not.
1255
# We therefore need a try/except here and not just 'if stacked:'
1257
note('Created new stacked branch referring to %s.' %
1258
branch.get_stacked_on_url())
1259
except (errors.NotStacked, errors.UnstackableBranchFormat,
1260
errors.UnstackableRepositoryFormat), e:
1261
note('Branched %d revision(s).' % branch.revno())
1263
# Bind to the parent
1264
parent_branch = Branch.open(from_location)
1265
branch.bind(parent_branch)
1266
note('New branch bound to %s' % from_location)
1268
# Switch to the new branch
1269
wt, _ = WorkingTree.open_containing('.')
1270
_mod_switch.switch(wt.bzrdir, branch)
1271
note('Switched to branch: %s',
1272
urlutils.unescape_for_display(branch.base, 'utf-8'))
1230
bzrdir.BzrDir.open_from_transport(to_transport)
1231
except errors.NotBranchError:
1234
raise errors.AlreadyBranchError(to_location)
1235
except errors.NoSuchFile:
1236
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1239
# preserve whatever source format we have.
1240
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1241
possible_transports=[to_transport],
1242
accelerator_tree=accelerator_tree,
1243
hardlink=hardlink, stacked=stacked,
1244
force_new_repo=standalone,
1245
create_tree_if_local=not no_tree,
1246
source_branch=br_from)
1247
branch = dir.open_branch()
1248
except errors.NoSuchRevision:
1249
to_transport.delete_tree('.')
1250
msg = "The branch %s has no revision %s." % (from_location,
1252
raise errors.BzrCommandError(msg)
1253
_merge_tags_if_possible(br_from, branch)
1254
# If the source branch is stacked, the new branch may
1255
# be stacked whether we asked for that explicitly or not.
1256
# We therefore need a try/except here and not just 'if stacked:'
1258
note('Created new stacked branch referring to %s.' %
1259
branch.get_stacked_on_url())
1260
except (errors.NotStacked, errors.UnstackableBranchFormat,
1261
errors.UnstackableRepositoryFormat), e:
1262
note('Branched %d revision(s).' % branch.revno())
1264
# Switch to the new branch
1265
wt, _ = WorkingTree.open_containing('.')
1266
_mod_switch.switch(wt.bzrdir, branch)
1267
note('Switched to branch: %s',
1268
urlutils.unescape_for_display(branch.base, 'utf-8'))
1275
1273
class cmd_checkout(Command):
1276
__doc__ = """Create a new checkout of an existing branch.
1274
"""Create a new checkout of an existing branch.
1278
1276
If BRANCH_LOCATION is omitted, checkout will reconstitute a working tree for
1279
1277
the branch found in '.'. This is useful if you have removed the working tree
1353
1351
@display_command
1354
1352
def run(self, dir=u'.'):
1355
1353
tree = WorkingTree.open_containing(dir)[0]
1356
self.add_cleanup(tree.lock_read().unlock)
1357
new_inv = tree.inventory
1358
old_tree = tree.basis_tree()
1359
self.add_cleanup(old_tree.lock_read().unlock)
1360
old_inv = old_tree.inventory
1362
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1363
for f, paths, c, v, p, n, k, e in iterator:
1364
if paths[0] == paths[1]:
1368
renames.append(paths)
1370
for old_name, new_name in renames:
1371
self.outf.write("%s => %s\n" % (old_name, new_name))
1356
new_inv = tree.inventory
1357
old_tree = tree.basis_tree()
1358
old_tree.lock_read()
1360
old_inv = old_tree.inventory
1362
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1363
for f, paths, c, v, p, n, k, e in iterator:
1364
if paths[0] == paths[1]:
1368
renames.append(paths)
1370
for old_name, new_name in renames:
1371
self.outf.write("%s => %s\n" % (old_name, new_name))
1374
1378
class cmd_update(Command):
1375
__doc__ = """Update a tree to have the latest code committed to its branch.
1379
"""Update a tree to have the latest code committed to its branch.
1377
1381
This will perform a merge into the working tree, and may generate
1378
1382
conflicts. If you have any local changes, you will still
1381
1385
If you want to discard your local changes, you can just do a
1382
1386
'bzr revert' instead of 'bzr commit' after the update.
1384
If the tree's branch is bound to a master branch, it will also update
1385
the branch from the master.
1388
1389
_see_also = ['pull', 'working-trees', 'status-flags']
1389
1390
takes_args = ['dir?']
1390
takes_options = ['revision']
1391
1391
aliases = ['up']
1393
def run(self, dir='.', revision=None):
1394
if revision is not None and len(revision) != 1:
1395
raise errors.BzrCommandError(
1396
"bzr update --revision takes exactly one revision")
1393
def run(self, dir='.'):
1397
1394
tree = WorkingTree.open_containing(dir)[0]
1398
branch = tree.branch
1399
1395
possible_transports = []
1400
master = branch.get_master_branch(
1396
master = tree.branch.get_master_branch(
1401
1397
possible_transports=possible_transports)
1402
1398
if master is not None:
1403
branch_location = master.base
1404
1399
tree.lock_write()
1406
branch_location = tree.branch.base
1407
1401
tree.lock_tree_write()
1408
self.add_cleanup(tree.unlock)
1409
# get rid of the final '/' and be ready for display
1410
branch_location = urlutils.unescape_for_display(
1411
branch_location.rstrip('/'),
1413
existing_pending_merges = tree.get_parent_ids()[1:]
1417
# may need to fetch data into a heavyweight checkout
1418
# XXX: this may take some time, maybe we should display a
1420
old_tip = branch.update(possible_transports)
1421
if revision is not None:
1422
revision_id = revision[0].as_revision_id(branch)
1424
revision_id = branch.last_revision()
1425
if revision_id == _mod_revision.ensure_null(tree.last_revision()):
1426
revno = branch.revision_id_to_dotted_revno(revision_id)
1427
note("Tree is up to date at revision %s of branch %s" %
1428
('.'.join(map(str, revno)), branch_location))
1430
view_info = _get_view_info_for_change_reporter(tree)
1431
change_reporter = delta._ChangeReporter(
1432
unversioned_filter=tree.is_ignored,
1433
view_info=view_info)
1403
existing_pending_merges = tree.get_parent_ids()[1:]
1404
last_rev = _mod_revision.ensure_null(tree.last_revision())
1405
if last_rev == _mod_revision.ensure_null(
1406
tree.branch.last_revision()):
1407
# may be up to date, check master too.
1408
if master is None or last_rev == _mod_revision.ensure_null(
1409
master.last_revision()):
1410
revno = tree.branch.revision_id_to_revno(last_rev)
1411
note("Tree is up to date at revision %d." % (revno,))
1413
view_info = _get_view_info_for_change_reporter(tree)
1435
1414
conflicts = tree.update(
1437
possible_transports=possible_transports,
1438
revision=revision_id,
1440
except errors.NoSuchRevision, e:
1441
raise errors.BzrCommandError(
1442
"branch has no revision %s\n"
1443
"bzr update --revision only works"
1444
" for a revision in the branch history"
1446
revno = tree.branch.revision_id_to_dotted_revno(
1447
_mod_revision.ensure_null(tree.last_revision()))
1448
note('Updated to revision %s of branch %s' %
1449
('.'.join(map(str, revno)), branch_location))
1450
parent_ids = tree.get_parent_ids()
1451
if parent_ids[1:] and parent_ids[1:] != existing_pending_merges:
1452
note('Your local commits will now show as pending merges with '
1453
"'bzr status', and can be committed with 'bzr commit'.")
1415
delta._ChangeReporter(unversioned_filter=tree.is_ignored,
1416
view_info=view_info), possible_transports=possible_transports)
1417
revno = tree.branch.revision_id_to_revno(
1418
_mod_revision.ensure_null(tree.last_revision()))
1419
note('Updated to revision %d.' % (revno,))
1420
if tree.get_parent_ids()[1:] != existing_pending_merges:
1421
note('Your local commits will now show as pending merges with '
1422
"'bzr status', and can be committed with 'bzr commit'.")
1460
1431
class cmd_info(Command):
1461
__doc__ = """Show information about a working tree, branch or repository.
1432
"""Show information about a working tree, branch or repository.
1463
1434
This command will show all known locations and formats associated to the
1464
1435
tree, branch or repository.
1530
1501
if file_list is not None:
1531
1502
file_list = [f for f in file_list]
1533
self.add_cleanup(tree.lock_write().unlock)
1534
# Heuristics should probably all move into tree.remove_smart or
1537
added = tree.changes_from(tree.basis_tree(),
1538
specific_files=file_list).added
1539
file_list = sorted([f[0] for f in added], reverse=True)
1540
if len(file_list) == 0:
1541
raise errors.BzrCommandError('No matching files.')
1542
elif file_list is None:
1543
# missing files show up in iter_changes(basis) as
1544
# versioned-with-no-kind.
1546
for change in tree.iter_changes(tree.basis_tree()):
1547
# Find paths in the working tree that have no kind:
1548
if change[1][1] is not None and change[6][1] is None:
1549
missing.append(change[1][1])
1550
file_list = sorted(missing, reverse=True)
1551
file_deletion_strategy = 'keep'
1552
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1553
keep_files=file_deletion_strategy=='keep',
1554
force=file_deletion_strategy=='force')
1506
# Heuristics should probably all move into tree.remove_smart or
1509
added = tree.changes_from(tree.basis_tree(),
1510
specific_files=file_list).added
1511
file_list = sorted([f[0] for f in added], reverse=True)
1512
if len(file_list) == 0:
1513
raise errors.BzrCommandError('No matching files.')
1514
elif file_list is None:
1515
# missing files show up in iter_changes(basis) as
1516
# versioned-with-no-kind.
1518
for change in tree.iter_changes(tree.basis_tree()):
1519
# Find paths in the working tree that have no kind:
1520
if change[1][1] is not None and change[6][1] is None:
1521
missing.append(change[1][1])
1522
file_list = sorted(missing, reverse=True)
1523
file_deletion_strategy = 'keep'
1524
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1525
keep_files=file_deletion_strategy=='keep',
1526
force=file_deletion_strategy=='force')
1557
1531
class cmd_file_id(Command):
1558
__doc__ = """Print file_id of a particular file or directory.
1532
"""Print file_id of a particular file or directory.
1560
1534
The file_id is assigned when the file is first added and remains the
1561
1535
same through all revisions where the file exists, even when it is
1961
1924
raise errors.BzrCommandError('bzr diff --revision takes exactly'
1962
1925
' one or two revision specifiers')
1964
if using is not None and format is not None:
1965
raise errors.BzrCommandError('--using and --format are mutually '
1968
1927
(old_tree, new_tree,
1969
1928
old_branch, new_branch,
1970
specific_files, extra_trees) = get_trees_and_branches_to_diff_locked(
1971
file_list, revision, old, new, self.add_cleanup, apply_view=True)
1929
specific_files, extra_trees) = get_trees_and_branches_to_diff(
1930
file_list, revision, old, new, apply_view=True)
1972
1931
return show_diff_trees(old_tree, new_tree, sys.stdout,
1973
1932
specific_files=specific_files,
1974
1933
external_diff_options=diff_options,
1975
1934
old_label=old_label, new_label=new_label,
1976
extra_trees=extra_trees, using=using,
1935
extra_trees=extra_trees, using=using)
1980
1938
class cmd_deleted(Command):
1981
__doc__ = """List files deleted in the working tree.
1939
"""List files deleted in the working tree.
1983
1941
# TODO: Show files deleted since a previous revision, or
1984
1942
# between two revisions.
2331
2294
filter_by_dir = False
2333
# find the file ids to log and check for directory filtering
2334
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2335
revision, file_list, self.add_cleanup)
2336
for relpath, file_id, kind in file_info_list:
2338
raise errors.BzrCommandError(
2339
"Path unknown at end or start of revision range: %s" %
2341
# If the relpath is the top of the tree, we log everything
2298
# find the file ids to log and check for directory filtering
2299
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2300
revision, file_list)
2301
for relpath, file_id, kind in file_info_list:
2303
raise errors.BzrCommandError(
2304
"Path unknown at end or start of revision range: %s" %
2306
# If the relpath is the top of the tree, we log everything
2311
file_ids.append(file_id)
2312
filter_by_dir = filter_by_dir or (
2313
kind in ['directory', 'tree-reference'])
2316
# FIXME ? log the current subdir only RBC 20060203
2317
if revision is not None \
2318
and len(revision) > 0 and revision[0].get_branch():
2319
location = revision[0].get_branch()
2346
file_ids.append(file_id)
2347
filter_by_dir = filter_by_dir or (
2348
kind in ['directory', 'tree-reference'])
2351
# FIXME ? log the current subdir only RBC 20060203
2352
if revision is not None \
2353
and len(revision) > 0 and revision[0].get_branch():
2354
location = revision[0].get_branch()
2357
dir, relpath = bzrdir.BzrDir.open_containing(location)
2358
b = dir.open_branch()
2359
self.add_cleanup(b.lock_read().unlock)
2360
rev1, rev2 = _get_revision_range(revision, b, self.name())
2362
# Decide on the type of delta & diff filtering to use
2363
# TODO: add an --all-files option to make this configurable & consistent
2371
diff_type = 'partial'
2375
# Build the log formatter
2376
if log_format is None:
2377
log_format = log.log_formatter_registry.get_default(b)
2378
# Make a non-encoding output to include the diffs - bug 328007
2379
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2380
lf = log_format(show_ids=show_ids, to_file=self.outf,
2381
to_exact_file=unencoded_output,
2382
show_timezone=timezone,
2383
delta_format=get_verbosity_level(),
2385
show_advice=levels is None)
2387
# Choose the algorithm for doing the logging. It's annoying
2388
# having multiple code paths like this but necessary until
2389
# the underlying repository format is faster at generating
2390
# deltas or can provide everything we need from the indices.
2391
# The default algorithm - match-using-deltas - works for
2392
# multiple files and directories and is faster for small
2393
# amounts of history (200 revisions say). However, it's too
2394
# slow for logging a single file in a repository with deep
2395
# history, i.e. > 10K revisions. In the spirit of "do no
2396
# evil when adding features", we continue to use the
2397
# original algorithm - per-file-graph - for the "single
2398
# file that isn't a directory without showing a delta" case.
2399
partial_history = revision and b.repository._format.supports_chks
2400
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2401
or delta_type or partial_history)
2403
# Build the LogRequest and execute it
2404
if len(file_ids) == 0:
2406
rqst = make_log_request_dict(
2407
direction=direction, specific_fileids=file_ids,
2408
start_revision=rev1, end_revision=rev2, limit=limit,
2409
message_search=message, delta_type=delta_type,
2410
diff_type=diff_type, _match_using_deltas=match_using_deltas,
2411
exclude_common_ancestry=exclude_common_ancestry,
2413
Logger(b, rqst).show(lf)
2322
dir, relpath = bzrdir.BzrDir.open_containing(location)
2323
b = dir.open_branch()
2325
rev1, rev2 = _get_revision_range(revision, b, self.name())
2327
# Decide on the type of delta & diff filtering to use
2328
# TODO: add an --all-files option to make this configurable & consistent
2336
diff_type = 'partial'
2340
# Build the log formatter
2341
if log_format is None:
2342
log_format = log.log_formatter_registry.get_default(b)
2343
lf = log_format(show_ids=show_ids, to_file=self.outf,
2344
show_timezone=timezone,
2345
delta_format=get_verbosity_level(),
2347
show_advice=levels is None)
2349
# Choose the algorithm for doing the logging. It's annoying
2350
# having multiple code paths like this but necessary until
2351
# the underlying repository format is faster at generating
2352
# deltas or can provide everything we need from the indices.
2353
# The default algorithm - match-using-deltas - works for
2354
# multiple files and directories and is faster for small
2355
# amounts of history (200 revisions say). However, it's too
2356
# slow for logging a single file in a repository with deep
2357
# history, i.e. > 10K revisions. In the spirit of "do no
2358
# evil when adding features", we continue to use the
2359
# original algorithm - per-file-graph - for the "single
2360
# file that isn't a directory without showing a delta" case.
2361
partial_history = revision and b.repository._format.supports_chks
2362
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2363
or delta_type or partial_history)
2365
# Build the LogRequest and execute it
2366
if len(file_ids) == 0:
2368
rqst = make_log_request_dict(
2369
direction=direction, specific_fileids=file_ids,
2370
start_revision=rev1, end_revision=rev2, limit=limit,
2371
message_search=message, delta_type=delta_type,
2372
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2373
Logger(b, rqst).show(lf)
2416
2379
def _get_revision_range(revisionspec_list, branch, command_name):
2561
2523
view_str = views.view_display_str(view_files)
2562
2524
note("Ignoring files outside view. View is %s" % view_str)
2564
self.add_cleanup(tree.lock_read().unlock)
2565
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2566
from_dir=relpath, recursive=recursive):
2567
# Apply additional masking
2568
if not all and not selection[fc]:
2570
if kind is not None and fkind != kind:
2575
fullpath = osutils.pathjoin(relpath, fp)
2578
views.check_path_in_view(tree, fullpath)
2579
except errors.FileOutsideView:
2528
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2529
from_dir=relpath, recursive=recursive):
2530
# Apply additional masking
2531
if not all and not selection[fc]:
2533
if kind is not None and fkind != kind:
2538
fullpath = osutils.pathjoin(relpath, fp)
2541
views.check_path_in_view(tree, fullpath)
2542
except errors.FileOutsideView:
2584
fp = osutils.pathjoin(prefix, fp)
2585
kindch = entry.kind_character()
2586
outstring = fp + kindch
2587
ui.ui_factory.clear_term()
2589
outstring = '%-8s %s' % (fc, outstring)
2590
if show_ids and fid is not None:
2591
outstring = "%-50s %s" % (outstring, fid)
2592
self.outf.write(outstring + '\n')
2594
self.outf.write(fp + '\0')
2597
self.outf.write(fid)
2598
self.outf.write('\0')
2606
self.outf.write('%-50s %s\n' % (outstring, my_id))
2547
fp = osutils.pathjoin(prefix, fp)
2548
kindch = entry.kind_character()
2549
outstring = fp + kindch
2550
ui.ui_factory.clear_term()
2552
outstring = '%-8s %s' % (fc, outstring)
2553
if show_ids and fid is not None:
2554
outstring = "%-50s %s" % (outstring, fid)
2608
2555
self.outf.write(outstring + '\n')
2557
self.outf.write(fp + '\0')
2560
self.outf.write(fid)
2561
self.outf.write('\0')
2569
self.outf.write('%-50s %s\n' % (outstring, my_id))
2571
self.outf.write(outstring + '\n')
2611
2576
class cmd_unknowns(Command):
2612
__doc__ = """List unknown files.
2577
"""List unknown files.
2677
2625
Ignore everything but the "debian" toplevel directory::
2679
2627
bzr ignore "RE:(?!debian/).*"
2681
Ignore everything except the "local" toplevel directory,
2682
but always ignore "*~" autosave files, even under local/::
2685
bzr ignore "!./local"
2689
2630
_see_also = ['status', 'ignored', 'patterns']
2690
2631
takes_args = ['name_pattern*']
2691
2632
takes_options = [
2692
Option('default-rules',
2693
help='Display the default ignore rules that bzr uses.')
2633
Option('old-default-rules',
2634
help='Write out the ignore rules bzr < 0.9 always used.')
2696
def run(self, name_pattern_list=None, default_rules=None):
2637
def run(self, name_pattern_list=None, old_default_rules=None):
2697
2638
from bzrlib import ignores
2698
if default_rules is not None:
2699
# dump the default rules and exit
2700
for pattern in ignores.USER_DEFAULTS:
2701
self.outf.write("%s\n" % pattern)
2639
if old_default_rules is not None:
2640
# dump the rules and exit
2641
for pattern in ignores.OLD_DEFAULTS:
2703
2644
if not name_pattern_list:
2704
2645
raise errors.BzrCommandError("ignore requires at least one "
2705
"NAME_PATTERN or --default-rules.")
2646
"NAME_PATTERN or --old-default-rules")
2706
2647
name_pattern_list = [globbing.normalize_pattern(p)
2707
2648
for p in name_pattern_list]
2708
2649
for name_pattern in name_pattern_list:
3118
3057
if local and not tree.branch.get_bound_location():
3119
3058
raise errors.LocalRequiresBoundBranch()
3121
if message is not None:
3123
file_exists = osutils.lexists(message)
3124
except UnicodeError:
3125
# The commit message contains unicode characters that can't be
3126
# represented in the filesystem encoding, so that can't be a
3131
'The commit message is a file name: "%(f)s".\n'
3132
'(use --file "%(f)s" to take commit message from that file)'
3134
ui.ui_factory.show_warning(warning_msg)
3136
message = message.replace('\r\n', '\n')
3137
message = message.replace('\r', '\n')
3139
raise errors.BzrCommandError(
3140
"please specify either --message or --file")
3142
3060
def get_message(commit_obj):
3143
3061
"""Callback to get commit message"""
3145
my_message = codecs.open(
3146
file, 'rt', osutils.get_user_encoding()).read()
3147
elif message is not None:
3148
my_message = message
3150
# No message supplied: make one up.
3151
# text is the status of the tree
3152
text = make_commit_message_template_encoded(tree,
3062
my_message = message
3063
if my_message is not None and '\r' in my_message:
3064
my_message = my_message.replace('\r\n', '\n')
3065
my_message = my_message.replace('\r', '\n')
3066
if my_message is None and not file:
3067
t = make_commit_message_template_encoded(tree,
3153
3068
selected_list, diff=show_diff,
3154
3069
output_encoding=osutils.get_user_encoding())
3155
# start_message is the template generated from hooks
3156
# XXX: Warning - looks like hooks return unicode,
3157
# make_commit_message_template_encoded returns user encoding.
3158
# We probably want to be using edit_commit_message instead to
3160
3070
start_message = generate_commit_message_template(commit_obj)
3161
my_message = edit_commit_message_encoded(text,
3071
my_message = edit_commit_message_encoded(t,
3162
3072
start_message=start_message)
3163
3073
if my_message is None:
3164
3074
raise errors.BzrCommandError("please specify a commit"
3165
3075
" message with either --message or --file")
3076
elif my_message and file:
3077
raise errors.BzrCommandError(
3078
"please specify either --message or --file")
3080
my_message = codecs.open(file, 'rt',
3081
osutils.get_user_encoding()).read()
3166
3082
if my_message == "":
3167
3083
raise errors.BzrCommandError("empty commit message specified")
3168
3084
return my_message
3597
3509
verbose = not is_quiet()
3598
3510
# TODO: should possibly lock the history file...
3599
3511
benchfile = open(".perf_history", "at", buffering=1)
3600
self.add_cleanup(benchfile.close)
3602
3513
test_suite_factory = None
3603
3514
benchfile = None
3604
selftest_kwargs = {"verbose": verbose,
3606
"stop_on_failure": one,
3607
"transport": transport,
3608
"test_suite_factory": test_suite_factory,
3609
"lsprof_timed": lsprof_timed,
3610
"lsprof_tests": lsprof_tests,
3611
"bench_history": benchfile,
3612
"matching_tests_first": first,
3613
"list_only": list_only,
3614
"random_seed": randomize,
3615
"exclude_pattern": exclude,
3617
"load_list": load_list,
3618
"debug_flags": debugflag,
3619
"starting_with": starting_with
3621
selftest_kwargs.update(self.additional_selftest_args)
3622
result = selftest(**selftest_kwargs)
3516
selftest_kwargs = {"verbose": verbose,
3518
"stop_on_failure": one,
3519
"transport": transport,
3520
"test_suite_factory": test_suite_factory,
3521
"lsprof_timed": lsprof_timed,
3522
"lsprof_tests": lsprof_tests,
3523
"bench_history": benchfile,
3524
"matching_tests_first": first,
3525
"list_only": list_only,
3526
"random_seed": randomize,
3527
"exclude_pattern": exclude,
3529
"load_list": load_list,
3530
"debug_flags": debugflag,
3531
"starting_with": starting_with
3533
selftest_kwargs.update(self.additional_selftest_args)
3534
result = selftest(**selftest_kwargs)
3536
if benchfile is not None:
3623
3538
return int(not result)
3626
3541
class cmd_version(Command):
3627
__doc__ = """Show version of bzr."""
3542
"""Show version of bzr."""
3629
3544
encoding_type = 'replace'
3630
3545
takes_options = [
3806
3715
view_info = _get_view_info_for_change_reporter(tree)
3807
3716
change_reporter = delta._ChangeReporter(
3808
3717
unversioned_filter=tree.is_ignored, view_info=view_info)
3809
pb = ui.ui_factory.nested_progress_bar()
3810
self.add_cleanup(pb.finished)
3811
self.add_cleanup(tree.lock_write().unlock)
3812
if location is not None:
3814
mergeable = bundle.read_mergeable_from_url(location,
3815
possible_transports=possible_transports)
3816
except errors.NotABundle:
3720
pb = ui.ui_factory.nested_progress_bar()
3721
cleanups.append(pb.finished)
3723
cleanups.append(tree.unlock)
3724
if location is not None:
3726
mergeable = bundle.read_mergeable_from_url(location,
3727
possible_transports=possible_transports)
3728
except errors.NotABundle:
3732
raise errors.BzrCommandError('Cannot use --uncommitted'
3733
' with bundles or merge directives.')
3735
if revision is not None:
3736
raise errors.BzrCommandError(
3737
'Cannot use -r with merge directives or bundles')
3738
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3741
if merger is None and uncommitted:
3742
if revision is not None and len(revision) > 0:
3743
raise errors.BzrCommandError('Cannot use --uncommitted and'
3744
' --revision at the same time.')
3745
merger = self.get_merger_from_uncommitted(tree, location, pb,
3747
allow_pending = False
3750
merger, allow_pending = self._get_merger_from_branch(tree,
3751
location, revision, remember, possible_transports, pb)
3753
merger.merge_type = merge_type
3754
merger.reprocess = reprocess
3755
merger.show_base = show_base
3756
self.sanity_check_merger(merger)
3757
if (merger.base_rev_id == merger.other_rev_id and
3758
merger.other_rev_id is not None):
3759
note('Nothing to do.')
3762
if merger.interesting_files is not None:
3763
raise errors.BzrCommandError('Cannot pull individual files')
3764
if (merger.base_rev_id == tree.last_revision()):
3765
result = tree.pull(merger.other_branch, False,
3766
merger.other_rev_id)
3767
result.report(self.outf)
3769
if merger.this_basis is None:
3770
raise errors.BzrCommandError(
3771
"This branch has no commits."
3772
" (perhaps you would prefer 'bzr pull')")
3774
return self._do_preview(merger, cleanups)
3776
return self._do_interactive(merger, cleanups)
3820
raise errors.BzrCommandError('Cannot use --uncommitted'
3821
' with bundles or merge directives.')
3823
if revision is not None:
3824
raise errors.BzrCommandError(
3825
'Cannot use -r with merge directives or bundles')
3826
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3829
if merger is None and uncommitted:
3830
if revision is not None and len(revision) > 0:
3831
raise errors.BzrCommandError('Cannot use --uncommitted and'
3832
' --revision at the same time.')
3833
merger = self.get_merger_from_uncommitted(tree, location, None)
3834
allow_pending = False
3837
merger, allow_pending = self._get_merger_from_branch(tree,
3838
location, revision, remember, possible_transports, None)
3840
merger.merge_type = merge_type
3841
merger.reprocess = reprocess
3842
merger.show_base = show_base
3843
self.sanity_check_merger(merger)
3844
if (merger.base_rev_id == merger.other_rev_id and
3845
merger.other_rev_id is not None):
3846
note('Nothing to do.')
3849
if merger.interesting_files is not None:
3850
raise errors.BzrCommandError('Cannot pull individual files')
3851
if (merger.base_rev_id == tree.last_revision()):
3852
result = tree.pull(merger.other_branch, False,
3853
merger.other_rev_id)
3854
result.report(self.outf)
3856
if merger.this_basis is None:
3857
raise errors.BzrCommandError(
3858
"This branch has no commits."
3859
" (perhaps you would prefer 'bzr pull')")
3861
return self._do_preview(merger)
3863
return self._do_interactive(merger)
3865
return self._do_merge(merger, change_reporter, allow_pending,
3868
def _get_preview(self, merger):
3778
return self._do_merge(merger, change_reporter, allow_pending,
3781
for cleanup in reversed(cleanups):
3784
def _get_preview(self, merger, cleanups):
3869
3785
tree_merger = merger.make_merger()
3870
3786
tt = tree_merger.make_preview_transform()
3871
self.add_cleanup(tt.finalize)
3787
cleanups.append(tt.finalize)
3872
3788
result_tree = tt.get_preview_tree()
3873
3789
return result_tree
3875
def _do_preview(self, merger):
3791
def _do_preview(self, merger, cleanups):
3876
3792
from bzrlib.diff import show_diff_trees
3877
result_tree = self._get_preview(merger)
3793
result_tree = self._get_preview(merger, cleanups)
3878
3794
show_diff_trees(merger.this_tree, result_tree, self.outf,
3879
3795
old_label='', new_label='')
4067
3982
def run(self, file_list=None, merge_type=None, show_base=False,
4068
3983
reprocess=False):
4069
from bzrlib.conflicts import restore
4070
3984
if merge_type is None:
4071
3985
merge_type = _mod_merge.Merge3Merger
4072
3986
tree, file_list = tree_files(file_list)
4073
self.add_cleanup(tree.lock_write().unlock)
4074
parents = tree.get_parent_ids()
4075
if len(parents) != 2:
4076
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4077
" merges. Not cherrypicking or"
4079
repository = tree.branch.repository
4080
interesting_ids = None
4082
conflicts = tree.conflicts()
4083
if file_list is not None:
4084
interesting_ids = set()
4085
for filename in file_list:
4086
file_id = tree.path2id(filename)
4088
raise errors.NotVersionedError(filename)
4089
interesting_ids.add(file_id)
4090
if tree.kind(file_id) != "directory":
3989
parents = tree.get_parent_ids()
3990
if len(parents) != 2:
3991
raise errors.BzrCommandError("Sorry, remerge only works after normal"
3992
" merges. Not cherrypicking or"
3994
repository = tree.branch.repository
3995
interesting_ids = None
3997
conflicts = tree.conflicts()
3998
if file_list is not None:
3999
interesting_ids = set()
4000
for filename in file_list:
4001
file_id = tree.path2id(filename)
4003
raise errors.NotVersionedError(filename)
4004
interesting_ids.add(file_id)
4005
if tree.kind(file_id) != "directory":
4093
for name, ie in tree.inventory.iter_entries(file_id):
4094
interesting_ids.add(ie.file_id)
4095
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4097
# Remerge only supports resolving contents conflicts
4098
allowed_conflicts = ('text conflict', 'contents conflict')
4099
restore_files = [c.path for c in conflicts
4100
if c.typestring in allowed_conflicts]
4101
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4102
tree.set_conflicts(ConflictList(new_conflicts))
4103
if file_list is not None:
4104
restore_files = file_list
4105
for filename in restore_files:
4008
for name, ie in tree.inventory.iter_entries(file_id):
4009
interesting_ids.add(ie.file_id)
4010
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4012
# Remerge only supports resolving contents conflicts
4013
allowed_conflicts = ('text conflict', 'contents conflict')
4014
restore_files = [c.path for c in conflicts
4015
if c.typestring in allowed_conflicts]
4016
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4017
tree.set_conflicts(ConflictList(new_conflicts))
4018
if file_list is not None:
4019
restore_files = file_list
4020
for filename in restore_files:
4022
restore(tree.abspath(filename))
4023
except errors.NotConflicted:
4025
# Disable pending merges, because the file texts we are remerging
4026
# have not had those merges performed. If we use the wrong parents
4027
# list, we imply that the working tree text has seen and rejected
4028
# all the changes from the other tree, when in fact those changes
4029
# have not yet been seen.
4030
pb = ui.ui_factory.nested_progress_bar()
4031
tree.set_parent_ids(parents[:1])
4107
restore(tree.abspath(filename))
4108
except errors.NotConflicted:
4110
# Disable pending merges, because the file texts we are remerging
4111
# have not had those merges performed. If we use the wrong parents
4112
# list, we imply that the working tree text has seen and rejected
4113
# all the changes from the other tree, when in fact those changes
4114
# have not yet been seen.
4115
tree.set_parent_ids(parents[:1])
4117
merger = _mod_merge.Merger.from_revision_ids(None, tree, parents[1])
4118
merger.interesting_ids = interesting_ids
4119
merger.merge_type = merge_type
4120
merger.show_base = show_base
4121
merger.reprocess = reprocess
4122
conflicts = merger.do_merge()
4033
merger = _mod_merge.Merger.from_revision_ids(pb,
4035
merger.interesting_ids = interesting_ids
4036
merger.merge_type = merge_type
4037
merger.show_base = show_base
4038
merger.reprocess = reprocess
4039
conflicts = merger.do_merge()
4041
tree.set_parent_ids(parents)
4124
tree.set_parent_ids(parents)
4125
4045
if conflicts > 0:
4154
4074
created as above. Directories containing unknown files will not be
4157
The working tree contains a list of revisions that have been merged but
4158
not yet committed. These revisions will be included as additional parents
4159
of the next commit. Normally, using revert clears that list as well as
4160
reverting the files. If any files are specified, revert leaves the list
4161
of uncommitted merges alone and reverts only the files. Use ``bzr revert
4162
.`` in the tree root to revert all files but keep the recorded merges,
4163
and ``bzr revert --forget-merges`` to clear the pending merge list without
4077
The working tree contains a list of pending merged revisions, which will
4078
be included as parents in the next commit. Normally, revert clears that
4079
list as well as reverting the files. If any files are specified, revert
4080
leaves the pending merge list alone and reverts only the files. Use "bzr
4081
revert ." in the tree root to revert all files but keep the merge record,
4082
and "bzr revert --forget-merges" to clear the pending merge list without
4164
4083
reverting any files.
4166
Using "bzr revert --forget-merges", it is possible to apply all of the
4167
changes from a branch in a single revision. To do this, perform the merge
4168
as desired. Then doing revert with the "--forget-merges" option will keep
4169
the content of the tree as it was, but it will clear the list of pending
4170
merges. The next commit will then contain all of the changes that are
4171
present in the other branch, but without any other parent revisions.
4172
Because this technique forgets where these changes originated, it may
4173
cause additional conflicts on later merges involving the same source and
4177
4086
_see_also = ['cat', 'export']
4186
4095
def run(self, revision=None, no_backup=False, file_list=None,
4187
4096
forget_merges=None):
4188
4097
tree, file_list = tree_files(file_list)
4189
self.add_cleanup(tree.lock_tree_write().unlock)
4191
tree.set_parent_ids(tree.get_parent_ids()[:1])
4193
self._revert_tree_to_revision(tree, revision, file_list, no_backup)
4101
tree.set_parent_ids(tree.get_parent_ids()[:1])
4103
self._revert_tree_to_revision(tree, revision, file_list, no_backup)
4196
4108
def _revert_tree_to_revision(tree, revision, file_list, no_backup):
4197
4109
rev_tree = _get_one_revision_tree('revert', revision, tree=tree)
4198
tree.revert(file_list, rev_tree, not no_backup, None,
4199
report_changes=True)
4110
pb = ui.ui_factory.nested_progress_bar()
4112
tree.revert(file_list, rev_tree, not no_backup, pb,
4113
report_changes=True)
4202
4118
class cmd_assert_fail(Command):
4203
__doc__ = """Test reporting of assertion failures"""
4119
"""Test reporting of assertion failures"""
4204
4120
# intended just for use in testing
4355
4263
_get_revision_range(revision,
4356
4264
remote_branch, self.name()))
4358
local_extra, remote_extra = find_unmerged(
4359
local_branch, remote_branch, restrict,
4360
backward=not reverse,
4361
include_merges=include_merges,
4362
local_revid_range=local_revid_range,
4363
remote_revid_range=remote_revid_range)
4365
if log_format is None:
4366
registry = log.log_formatter_registry
4367
log_format = registry.get_default(local_branch)
4368
lf = log_format(to_file=self.outf,
4370
show_timezone='original')
4373
if local_extra and not theirs_only:
4374
message("You have %d extra revision(s):\n" %
4376
for revision in iter_log_revisions(local_extra,
4377
local_branch.repository,
4379
lf.log_revision(revision)
4380
printed_local = True
4383
printed_local = False
4385
if remote_extra and not mine_only:
4386
if printed_local is True:
4388
message("You are missing %d revision(s):\n" %
4390
for revision in iter_log_revisions(remote_extra,
4391
remote_branch.repository,
4393
lf.log_revision(revision)
4396
if mine_only and not local_extra:
4397
# We checked local, and found nothing extra
4398
message('This branch is up to date.\n')
4399
elif theirs_only and not remote_extra:
4400
# We checked remote, and found nothing extra
4401
message('Other branch is up to date.\n')
4402
elif not (mine_only or theirs_only or local_extra or
4404
# We checked both branches, and neither one had extra
4406
message("Branches are up to date.\n")
4266
local_branch.lock_read()
4268
remote_branch.lock_read()
4270
local_extra, remote_extra = find_unmerged(
4271
local_branch, remote_branch, restrict,
4272
backward=not reverse,
4273
include_merges=include_merges,
4274
local_revid_range=local_revid_range,
4275
remote_revid_range=remote_revid_range)
4277
if log_format is None:
4278
registry = log.log_formatter_registry
4279
log_format = registry.get_default(local_branch)
4280
lf = log_format(to_file=self.outf,
4282
show_timezone='original')
4285
if local_extra and not theirs_only:
4286
message("You have %d extra revision(s):\n" %
4288
for revision in iter_log_revisions(local_extra,
4289
local_branch.repository,
4291
lf.log_revision(revision)
4292
printed_local = True
4295
printed_local = False
4297
if remote_extra and not mine_only:
4298
if printed_local is True:
4300
message("You are missing %d revision(s):\n" %
4302
for revision in iter_log_revisions(remote_extra,
4303
remote_branch.repository,
4305
lf.log_revision(revision)
4308
if mine_only and not local_extra:
4309
# We checked local, and found nothing extra
4310
message('This branch is up to date.\n')
4311
elif theirs_only and not remote_extra:
4312
# We checked remote, and found nothing extra
4313
message('Other branch is up to date.\n')
4314
elif not (mine_only or theirs_only or local_extra or
4316
# We checked both branches, and neither one had extra
4318
message("Branches are up to date.\n")
4320
remote_branch.unlock()
4322
local_branch.unlock()
4408
4323
if not status_code and parent is None and other_branch is not None:
4409
self.add_cleanup(local_branch.lock_write().unlock)
4410
# handle race conditions - a parent might be set while we run.
4411
if local_branch.get_parent() is None:
4412
local_branch.set_parent(remote_branch.base)
4324
local_branch.lock_write()
4326
# handle race conditions - a parent might be set while we run.
4327
if local_branch.get_parent() is None:
4328
local_branch.set_parent(remote_branch.base)
4330
local_branch.unlock()
4413
4331
return status_code
4416
4334
class cmd_pack(Command):
4417
__doc__ = """Compress the data within a repository.
4419
This operation compresses the data within a bazaar repository. As
4420
bazaar supports automatic packing of repository, this operation is
4421
normally not required to be done manually.
4423
During the pack operation, bazaar takes a backup of existing repository
4424
data, i.e. pack files. This backup is eventually removed by bazaar
4425
automatically when it is safe to do so. To save disk space by removing
4426
the backed up pack files, the --clean-obsolete-packs option may be
4429
Warning: If you use --clean-obsolete-packs and your machine crashes
4430
during or immediately after repacking, you may be left with a state
4431
where the deletion has been written to disk but the new packs have not
4432
been. In this case the repository may be unusable.
4335
"""Compress the data within a repository."""
4435
4337
_see_also = ['repositories']
4436
4338
takes_args = ['branch_or_repo?']
4438
Option('clean-obsolete-packs', 'Delete obsolete packs to save disk space.'),
4441
def run(self, branch_or_repo='.', clean_obsolete_packs=False):
4340
def run(self, branch_or_repo='.'):
4442
4341
dir = bzrdir.BzrDir.open_containing(branch_or_repo)[0]
4444
4343
branch = dir.open_branch()
4445
4344
repository = branch.repository
4446
4345
except errors.NotBranchError:
4447
4346
repository = dir.open_repository()
4448
repository.pack(clean_obsolete_packs=clean_obsolete_packs)
4451
4350
class cmd_plugins(Command):
4452
__doc__ = """List the installed plugins.
4351
"""List the installed plugins.
4454
4353
This command displays the list of installed plugins including
4455
4354
version of plugin and a short description of each.
4552
4454
wt, branch, relpath = \
4553
4455
bzrdir.BzrDir.open_containing_tree_or_branch(filename)
4554
4456
if wt is not None:
4555
self.add_cleanup(wt.lock_read().unlock)
4557
self.add_cleanup(branch.lock_read().unlock)
4558
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4559
self.add_cleanup(tree.lock_read().unlock)
4561
file_id = wt.path2id(relpath)
4563
file_id = tree.path2id(relpath)
4565
raise errors.NotVersionedError(filename)
4566
file_version = tree.inventory[file_id].revision
4567
if wt is not None and revision is None:
4568
# If there is a tree and we're not annotating historical
4569
# versions, annotate the working tree's content.
4570
annotate_file_tree(wt, file_id, self.outf, long, all,
4573
annotate_file(branch, file_version, file_id, long, all, self.outf,
4461
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4463
file_id = wt.path2id(relpath)
4465
file_id = tree.path2id(relpath)
4467
raise errors.NotVersionedError(filename)
4468
file_version = tree.inventory[file_id].revision
4469
if wt is not None and revision is None:
4470
# If there is a tree and we're not annotating historical
4471
# versions, annotate the working tree's content.
4472
annotate_file_tree(wt, file_id, self.outf, long, all,
4475
annotate_file(branch, file_version, file_id, long, all, self.outf,
4577
4484
class cmd_re_sign(Command):
4578
__doc__ = """Create a digital signature for an existing revision."""
4485
"""Create a digital signature for an existing revision."""
4579
4486
# TODO be able to replace existing ones.
4581
4488
hidden = True # is this right ?
4785
4698
end_revision=last_revno)
4788
self.outf.write('Dry-run, pretending to remove'
4789
' the above revisions.\n')
4701
print 'Dry-run, pretending to remove the above revisions.'
4703
val = raw_input('Press <enter> to continue')
4791
self.outf.write('The above revision(s) will be removed.\n')
4794
if not ui.ui_factory.get_boolean('Are you sure'):
4795
self.outf.write('Canceled')
4705
print 'The above revision(s) will be removed.'
4707
val = raw_input('Are you sure [y/N]? ')
4708
if val.lower() not in ('y', 'yes'):
4798
4712
mutter('Uncommitting from {%s} to {%s}',
4799
4713
last_rev_id, rev_id)
4800
4714
uncommit(b, tree=tree, dry_run=dry_run, verbose=verbose,
4801
4715
revno=revno, local=local)
4802
self.outf.write('You can restore the old tip by running:\n'
4803
' bzr pull . -r revid:%s\n' % last_rev_id)
4716
note('You can restore the old tip by running:\n'
4717
' bzr pull . -r revid:%s', last_rev_id)
4806
4720
class cmd_break_lock(Command):
4807
__doc__ = """Break a dead lock on a repository, branch or working directory.
4721
"""Break a dead lock on a repository, branch or working directory.
4809
4723
CAUTION: Locks should only be broken when you are sure that the process
4810
4724
holding the lock has been stopped.
4812
You can get information on what locks are open via the 'bzr info
4813
[location]' command.
4726
You can get information on what locks are open via the 'bzr info' command.
4817
bzr break-lock bzr+ssh://example.com/bzr/foo
4819
4731
takes_args = ['location?']
5092
5004
directly from the merge directive, without retrieving data from a
5095
`bzr send` creates a compact data set that, when applied using bzr
5096
merge, has the same effect as merging from the source branch.
5098
By default the merge directive is self-contained and can be applied to any
5099
branch containing submit_branch in its ancestory without needing access to
5102
If --no-bundle is specified, then Bazaar doesn't send the contents of the
5103
revisions, but only a structured request to merge from the
5104
public_location. In that case the public_branch is needed and it must be
5105
up-to-date and accessible to the recipient. The public_branch is always
5106
included if known, so that people can check it later.
5108
The submit branch defaults to the parent of the source branch, but can be
5109
overridden. Both submit branch and public branch will be remembered in
5110
branch.conf the first time they are used for a particular branch. The
5111
source branch defaults to that containing the working directory, but can
5112
be changed using --from.
5114
In order to calculate those changes, bzr must analyse the submit branch.
5115
Therefore it is most efficient for the submit branch to be a local mirror.
5116
If a public location is known for the submit_branch, that location is used
5117
in the merge directive.
5119
The default behaviour is to send the merge directive by mail, unless -o is
5120
given, in which case it is sent to a file.
5007
If --no-bundle is specified, then public_branch is needed (and must be
5008
up-to-date), so that the receiver can perform the merge using the
5009
public_branch. The public_branch is always included if known, so that
5010
people can check it later.
5012
The submit branch defaults to the parent, but can be overridden. Both
5013
submit branch and public branch will be remembered if supplied.
5015
If a public_branch is known for the submit_branch, that public submit
5016
branch is used in the merge instructions. This means that a local mirror
5017
can be used as your actual submit branch, once you have set public_branch
5122
5020
Mail is sent using your preferred mail program. This should be transparent
5123
5021
on Windows (it uses MAPI). On Linux, it requires the xdg-email utility.
5311
def run(self, tag_name=None,
5200
def run(self, tag_name,
5317
5206
branch, relpath = Branch.open_containing(directory)
5318
self.add_cleanup(branch.lock_write().unlock)
5320
if tag_name is None:
5321
raise errors.BzrCommandError("No tag specified to delete.")
5322
branch.tags.delete_tag(tag_name)
5323
self.outf.write('Deleted tag %s.\n' % tag_name)
5326
if len(revision) != 1:
5327
raise errors.BzrCommandError(
5328
"Tags can only be placed on a single revision, "
5330
revision_id = revision[0].as_revision_id(branch)
5210
branch.tags.delete_tag(tag_name)
5211
self.outf.write('Deleted tag %s.\n' % tag_name)
5332
revision_id = branch.last_revision()
5333
if tag_name is None:
5334
tag_name = branch.automatic_tag_name(revision_id)
5335
if tag_name is None:
5336
raise errors.BzrCommandError(
5337
"Please specify a tag name.")
5338
if (not force) and branch.tags.has_tag(tag_name):
5339
raise errors.TagAlreadyExists(tag_name)
5340
branch.tags.set_tag(tag_name, revision_id)
5341
self.outf.write('Created tag %s.\n' % tag_name)
5214
if len(revision) != 1:
5215
raise errors.BzrCommandError(
5216
"Tags can only be placed on a single revision, "
5218
revision_id = revision[0].as_revision_id(branch)
5220
revision_id = branch.last_revision()
5221
if (not force) and branch.tags.has_tag(tag_name):
5222
raise errors.TagAlreadyExists(tag_name)
5223
branch.tags.set_tag(tag_name, revision_id)
5224
self.outf.write('Created tag %s.\n' % tag_name)
5344
5229
class cmd_tags(Command):
5345
__doc__ = """List tags.
5347
5232
This command shows a table of tag names and the revisions they reference.
5379
self.add_cleanup(branch.lock_read().unlock)
5381
graph = branch.repository.get_graph()
5382
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5383
revid1, revid2 = rev1.rev_id, rev2.rev_id
5384
# only show revisions between revid1 and revid2 (inclusive)
5385
tags = [(tag, revid) for tag, revid in tags if
5386
graph.is_between(revid, revid1, revid2)]
5389
elif sort == 'time':
5391
for tag, revid in tags:
5393
revobj = branch.repository.get_revision(revid)
5394
except errors.NoSuchRevision:
5395
timestamp = sys.maxint # place them at the end
5397
timestamp = revobj.timestamp
5398
timestamps[revid] = timestamp
5399
tags.sort(key=lambda x: timestamps[x[1]])
5401
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5402
for index, (tag, revid) in enumerate(tags):
5404
revno = branch.revision_id_to_dotted_revno(revid)
5405
if isinstance(revno, tuple):
5406
revno = '.'.join(map(str, revno))
5407
except errors.NoSuchRevision:
5408
# Bad tag data/merges can lead to tagged revisions
5409
# which are not in this branch. Fail gracefully ...
5411
tags[index] = (tag, revno)
5267
graph = branch.repository.get_graph()
5268
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5269
revid1, revid2 = rev1.rev_id, rev2.rev_id
5270
# only show revisions between revid1 and revid2 (inclusive)
5271
tags = [(tag, revid) for tag, revid in tags if
5272
graph.is_between(revid, revid1, revid2)]
5275
elif sort == 'time':
5277
for tag, revid in tags:
5279
revobj = branch.repository.get_revision(revid)
5280
except errors.NoSuchRevision:
5281
timestamp = sys.maxint # place them at the end
5283
timestamp = revobj.timestamp
5284
timestamps[revid] = timestamp
5285
tags.sort(key=lambda x: timestamps[x[1]])
5287
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5288
for index, (tag, revid) in enumerate(tags):
5290
revno = branch.revision_id_to_dotted_revno(revid)
5291
if isinstance(revno, tuple):
5292
revno = '.'.join(map(str, revno))
5293
except errors.NoSuchRevision:
5294
# Bad tag data/merges can lead to tagged revisions
5295
# which are not in this branch. Fail gracefully ...
5297
tags[index] = (tag, revno)
5413
5300
for tag, revspec in tags:
5414
5301
self.outf.write('%-20s %s\n' % (tag, revspec))
5417
5304
class cmd_reconfigure(Command):
5418
__doc__ = """Reconfigure the type of a bzr directory.
5305
"""Reconfigure the type of a bzr directory.
5420
5307
A target configuration must be specified.
5987
5844
self.outf.write('%s %s\n' % (path, location))
5990
def _register_lazy_builtins():
5991
# register lazy builtins from other modules; called at startup and should
5992
# be only called once.
5993
for (name, aliases, module_name) in [
5994
('cmd_bundle_info', [], 'bzrlib.bundle.commands'),
5995
('cmd_dpush', [], 'bzrlib.foreign'),
5996
('cmd_version_info', [], 'bzrlib.cmd_version_info'),
5997
('cmd_resolve', ['resolved'], 'bzrlib.conflicts'),
5998
('cmd_conflicts', [], 'bzrlib.conflicts'),
5999
('cmd_sign_my_commits', [], 'bzrlib.sign_my_commits'),
6001
builtin_command_registry.register_lazy(name, aliases, module_name)
5847
# these get imported and then picked up by the scan for cmd_*
5848
# TODO: Some more consistent way to split command definitions across files;
5849
# we do need to load at least some information about them to know of
5850
# aliases. ideally we would avoid loading the implementation until the
5851
# details were needed.
5852
from bzrlib.cmd_version_info import cmd_version_info
5853
from bzrlib.conflicts import cmd_resolve, cmd_conflicts, restore
5854
from bzrlib.bundle.commands import (
5857
from bzrlib.foreign import cmd_dpush
5858
from bzrlib.sign_my_commits import cmd_sign_my_commits
5859
from bzrlib.weave_commands import cmd_versionedfile_list, \
5860
cmd_weave_plan_merge, cmd_weave_merge_text