542
540
wt = WorkingTree.open_containing(directory)[0]
545
self.add_cleanup(wt.unlock)
546
543
except (errors.NoWorkingTree, errors.NotLocalUrl):
548
545
b = Branch.open_containing(directory)[0]
550
self.add_cleanup(b.unlock)
552
if revision is not None:
553
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
554
if revision_info_list is not None:
555
for rev_str in revision_info_list:
556
rev_spec = RevisionSpec.from_string(rev_str)
557
revision_ids.append(rev_spec.as_revision_id(b))
558
# No arguments supplied, default to the last revision
559
if len(revision_ids) == 0:
562
raise errors.NoWorkingTree(directory)
563
revision_ids.append(wt.last_revision())
549
if revision is not None:
550
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
551
if revision_info_list is not None:
552
for rev_str in revision_info_list:
553
rev_spec = RevisionSpec.from_string(rev_str)
554
revision_ids.append(rev_spec.as_revision_id(b))
555
# No arguments supplied, default to the last revision
556
if len(revision_ids) == 0:
559
raise errors.NoWorkingTree(directory)
560
revision_ids.append(wt.last_revision())
562
revision_ids.append(b.last_revision())
566
for revision_id in revision_ids:
568
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
569
revno = '.'.join(str(i) for i in dotted_revno)
570
except errors.NoSuchRevision:
572
maxlen = max(maxlen, len(revno))
573
revinfos.append([revno, revision_id])
565
revision_ids.append(b.last_revision())
569
for revision_id in revision_ids:
571
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
572
revno = '.'.join(str(i) for i in dotted_revno)
573
except errors.NoSuchRevision:
575
maxlen = max(maxlen, len(revno))
576
revinfos.append([revno, revision_id])
579
580
for ri in revinfos:
580
581
self.outf.write('%*s %s\n' % (maxlen, ri[0], ri[1]))
727
730
revision = _get_one_revision('inventory', revision)
728
731
work_tree, file_list = tree_files(file_list)
729
732
work_tree.lock_read()
730
self.add_cleanup(work_tree.unlock)
731
if revision is not None:
732
tree = revision.as_tree(work_tree.branch)
734
extra_trees = [work_tree]
736
self.add_cleanup(tree.unlock)
741
if file_list is not None:
742
file_ids = tree.paths2ids(file_list, trees=extra_trees,
743
require_versioned=True)
744
# find_ids_across_trees may include some paths that don't
746
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
747
for file_id in file_ids if file_id in tree)
749
entries = tree.inventory.entries()
734
if revision is not None:
735
tree = revision.as_tree(work_tree.branch)
737
extra_trees = [work_tree]
743
if file_list is not None:
744
file_ids = tree.paths2ids(file_list, trees=extra_trees,
745
require_versioned=True)
746
# find_ids_across_trees may include some paths that don't
748
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
749
for file_id in file_ids if file_id in tree)
751
entries = tree.inventory.entries()
754
if tree is not work_tree:
752
757
for path, entry in entries:
753
758
if kind and kind != entry.kind:
892
900
dest = osutils.pathjoin(dest_parent, dest_tail)
893
901
mutter("attempting to move %s => %s", src, dest)
894
902
tree.rename_one(src, dest, after=after)
896
self.outf.write("%s => %s\n" % (src, dest))
903
self.outf.write("%s => %s\n" % (src, dest))
899
906
class cmd_pull(Command):
900
907
"""Turn this branch into a mirror of another branch.
902
By default, this command only works on branches that have not diverged.
903
Branches are considered diverged if the destination branch's most recent
904
commit is one that has not been merged (directly or indirectly) into the
909
This command only works on branches that have not diverged. Branches are
910
considered diverged if the destination branch's most recent commit is one
911
that has not been merged (directly or indirectly) into the parent.
907
913
If branches have diverged, you can use 'bzr merge' to integrate the changes
908
914
from one into the other. Once one branch has merged, the other should
909
915
be able to pull it again.
911
If you want to replace your local changes and just want your branch to
912
match the remote one, use pull --overwrite. This will work even if the two
913
branches have diverged.
917
If you want to forget your local changes and just update your branch to
918
match the remote one, use pull --overwrite.
915
920
If there is no default location set, the first pull will set it. After
916
921
that, you can omit the location to use the default. To change the
998
1003
if branch_from is not branch_to:
999
1004
branch_from.lock_read()
1000
self.add_cleanup(branch_from.unlock)
1001
if revision is not None:
1002
revision_id = revision.as_revision_id(branch_from)
1004
branch_to.lock_write()
1005
self.add_cleanup(branch_to.unlock)
1006
if tree_to is not None:
1007
view_info = _get_view_info_for_change_reporter(tree_to)
1008
change_reporter = delta._ChangeReporter(
1009
unversioned_filter=tree_to.is_ignored,
1010
view_info=view_info)
1011
result = tree_to.pull(
1012
branch_from, overwrite, revision_id, change_reporter,
1013
possible_transports=possible_transports, local=local)
1015
result = branch_to.pull(
1016
branch_from, overwrite, revision_id, local=local)
1018
result.report(self.outf)
1019
if verbose and result.old_revid != result.new_revid:
1020
log.show_branch_change(
1021
branch_to, self.outf, result.old_revno,
1006
if revision is not None:
1007
revision_id = revision.as_revision_id(branch_from)
1009
branch_to.lock_write()
1011
if tree_to is not None:
1012
view_info = _get_view_info_for_change_reporter(tree_to)
1013
change_reporter = delta._ChangeReporter(
1014
unversioned_filter=tree_to.is_ignored,
1015
view_info=view_info)
1016
result = tree_to.pull(
1017
branch_from, overwrite, revision_id, change_reporter,
1018
possible_transports=possible_transports, local=local)
1020
result = branch_to.pull(
1021
branch_from, overwrite, revision_id, local=local)
1023
result.report(self.outf)
1024
if verbose and result.old_revid != result.new_revid:
1025
log.show_branch_change(
1026
branch_to, self.outf, result.old_revno,
1031
if branch_from is not branch_to:
1032
branch_from.unlock()
1025
1035
class cmd_push(Command):
1180
1190
' directory exists, but does not already'
1181
1191
' have a control directory. This flag will'
1182
1192
' allow branch to proceed.'),
1184
help="Bind new branch to from location."),
1186
1194
aliases = ['get', 'clone']
1188
1196
def run(self, from_location, to_location=None, revision=None,
1189
1197
hardlink=False, stacked=False, standalone=False, no_tree=False,
1190
use_existing_dir=False, switch=False, bind=False):
1198
use_existing_dir=False, switch=False):
1191
1199
from bzrlib import switch as _mod_switch
1192
1200
from bzrlib.tag import _merge_tags_if_possible
1193
1201
accelerator_tree, br_from = bzrdir.BzrDir.open_tree_or_branch(
1203
if (accelerator_tree is not None and
1204
accelerator_tree.supports_content_filtering()):
1205
accelerator_tree = None
1195
1206
revision = _get_one_revision('branch', revision)
1196
1207
br_from.lock_read()
1197
self.add_cleanup(br_from.unlock)
1198
if revision is not None:
1199
revision_id = revision.as_revision_id(br_from)
1201
# FIXME - wt.last_revision, fallback to branch, fall back to
1202
# None or perhaps NULL_REVISION to mean copy nothing
1204
revision_id = br_from.last_revision()
1205
if to_location is None:
1206
to_location = urlutils.derive_to_location(from_location)
1207
to_transport = transport.get_transport(to_location)
1209
to_transport.mkdir('.')
1210
except errors.FileExists:
1211
if not use_existing_dir:
1212
raise errors.BzrCommandError('Target directory "%s" '
1213
'already exists.' % to_location)
1209
if revision is not None:
1210
revision_id = revision.as_revision_id(br_from)
1216
bzrdir.BzrDir.open_from_transport(to_transport)
1217
except errors.NotBranchError:
1212
# FIXME - wt.last_revision, fallback to branch, fall back to
1213
# None or perhaps NULL_REVISION to mean copy nothing
1215
revision_id = br_from.last_revision()
1216
if to_location is None:
1217
to_location = urlutils.derive_to_location(from_location)
1218
to_transport = transport.get_transport(to_location)
1220
to_transport.mkdir('.')
1221
except errors.FileExists:
1222
if not use_existing_dir:
1223
raise errors.BzrCommandError('Target directory "%s" '
1224
'already exists.' % to_location)
1220
raise errors.AlreadyBranchError(to_location)
1221
except errors.NoSuchFile:
1222
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1225
# preserve whatever source format we have.
1226
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1227
possible_transports=[to_transport],
1228
accelerator_tree=accelerator_tree,
1229
hardlink=hardlink, stacked=stacked,
1230
force_new_repo=standalone,
1231
create_tree_if_local=not no_tree,
1232
source_branch=br_from)
1233
branch = dir.open_branch()
1234
except errors.NoSuchRevision:
1235
to_transport.delete_tree('.')
1236
msg = "The branch %s has no revision %s." % (from_location,
1238
raise errors.BzrCommandError(msg)
1239
_merge_tags_if_possible(br_from, branch)
1240
# If the source branch is stacked, the new branch may
1241
# be stacked whether we asked for that explicitly or not.
1242
# We therefore need a try/except here and not just 'if stacked:'
1244
note('Created new stacked branch referring to %s.' %
1245
branch.get_stacked_on_url())
1246
except (errors.NotStacked, errors.UnstackableBranchFormat,
1247
errors.UnstackableRepositoryFormat), e:
1248
note('Branched %d revision(s).' % branch.revno())
1250
# Bind to the parent
1251
parent_branch = Branch.open(from_location)
1252
branch.bind(parent_branch)
1253
note('New branch bound to %s' % from_location)
1255
# Switch to the new branch
1256
wt, _ = WorkingTree.open_containing('.')
1257
_mod_switch.switch(wt.bzrdir, branch)
1258
note('Switched to branch: %s',
1259
urlutils.unescape_for_display(branch.base, 'utf-8'))
1227
bzrdir.BzrDir.open_from_transport(to_transport)
1228
except errors.NotBranchError:
1231
raise errors.AlreadyBranchError(to_location)
1232
except errors.NoSuchFile:
1233
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1236
# preserve whatever source format we have.
1237
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1238
possible_transports=[to_transport],
1239
accelerator_tree=accelerator_tree,
1240
hardlink=hardlink, stacked=stacked,
1241
force_new_repo=standalone,
1242
create_tree_if_local=not no_tree,
1243
source_branch=br_from)
1244
branch = dir.open_branch()
1245
except errors.NoSuchRevision:
1246
to_transport.delete_tree('.')
1247
msg = "The branch %s has no revision %s." % (from_location,
1249
raise errors.BzrCommandError(msg)
1250
_merge_tags_if_possible(br_from, branch)
1251
# If the source branch is stacked, the new branch may
1252
# be stacked whether we asked for that explicitly or not.
1253
# We therefore need a try/except here and not just 'if stacked:'
1255
note('Created new stacked branch referring to %s.' %
1256
branch.get_stacked_on_url())
1257
except (errors.NotStacked, errors.UnstackableBranchFormat,
1258
errors.UnstackableRepositoryFormat), e:
1259
note('Branched %d revision(s).' % branch.revno())
1261
# Switch to the new branch
1262
wt, _ = WorkingTree.open_containing('.')
1263
_mod_switch.switch(wt.bzrdir, branch)
1264
note('Switched to branch: %s',
1265
urlutils.unescape_for_display(branch.base, 'utf-8'))
1262
1270
class cmd_checkout(Command):
1341
1349
def run(self, dir=u'.'):
1342
1350
tree = WorkingTree.open_containing(dir)[0]
1343
1351
tree.lock_read()
1344
self.add_cleanup(tree.unlock)
1345
new_inv = tree.inventory
1346
old_tree = tree.basis_tree()
1347
old_tree.lock_read()
1348
self.add_cleanup(old_tree.unlock)
1349
old_inv = old_tree.inventory
1351
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1352
for f, paths, c, v, p, n, k, e in iterator:
1353
if paths[0] == paths[1]:
1357
renames.append(paths)
1359
for old_name, new_name in renames:
1360
self.outf.write("%s => %s\n" % (old_name, new_name))
1353
new_inv = tree.inventory
1354
old_tree = tree.basis_tree()
1355
old_tree.lock_read()
1357
old_inv = old_tree.inventory
1359
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1360
for f, paths, c, v, p, n, k, e in iterator:
1361
if paths[0] == paths[1]:
1365
renames.append(paths)
1367
for old_name, new_name in renames:
1368
self.outf.write("%s => %s\n" % (old_name, new_name))
1363
1375
class cmd_update(Command):
1370
1382
If you want to discard your local changes, you can just do a
1371
1383
'bzr revert' instead of 'bzr commit' after the update.
1373
If the tree's branch is bound to a master branch, it will also update
1374
the branch from the master.
1377
1386
_see_also = ['pull', 'working-trees', 'status-flags']
1378
1387
takes_args = ['dir?']
1379
takes_options = ['revision']
1380
1388
aliases = ['up']
1382
def run(self, dir='.', revision=None):
1383
if revision is not None and len(revision) != 1:
1384
raise errors.BzrCommandError(
1385
"bzr update --revision takes exactly one revision")
1390
def run(self, dir='.'):
1386
1391
tree = WorkingTree.open_containing(dir)[0]
1387
branch = tree.branch
1388
1392
possible_transports = []
1389
master = branch.get_master_branch(
1393
master = tree.branch.get_master_branch(
1390
1394
possible_transports=possible_transports)
1391
1395
if master is not None:
1392
1396
tree.lock_write()
1393
branch_location = master.base
1395
1398
tree.lock_tree_write()
1396
branch_location = tree.branch.base
1397
self.add_cleanup(tree.unlock)
1398
# get rid of the final '/' and be ready for display
1399
branch_location = urlutils.unescape_for_display(branch_location[:-1],
1401
existing_pending_merges = tree.get_parent_ids()[1:]
1405
# may need to fetch data into a heavyweight checkout
1406
# XXX: this may take some time, maybe we should display a
1408
old_tip = branch.update(possible_transports)
1409
if revision is not None:
1410
revision_id = revision[0].as_revision_id(branch)
1412
revision_id = branch.last_revision()
1413
if revision_id == _mod_revision.ensure_null(tree.last_revision()):
1414
revno = branch.revision_id_to_revno(revision_id)
1415
note("Tree is up to date at revision %d of branch %s" %
1416
(revno, branch_location))
1418
view_info = _get_view_info_for_change_reporter(tree)
1419
change_reporter = delta._ChangeReporter(
1420
unversioned_filter=tree.is_ignored,
1421
view_info=view_info)
1400
existing_pending_merges = tree.get_parent_ids()[1:]
1401
last_rev = _mod_revision.ensure_null(tree.last_revision())
1402
if last_rev == _mod_revision.ensure_null(
1403
tree.branch.last_revision()):
1404
# may be up to date, check master too.
1405
if master is None or last_rev == _mod_revision.ensure_null(
1406
master.last_revision()):
1407
revno = tree.branch.revision_id_to_revno(last_rev)
1408
note("Tree is up to date at revision %d." % (revno,))
1410
view_info = _get_view_info_for_change_reporter(tree)
1423
1411
conflicts = tree.update(
1425
possible_transports=possible_transports,
1426
revision=revision_id,
1428
except errors.NoSuchRevision, e:
1429
raise errors.BzrCommandError(
1430
"branch has no revision %s\n"
1431
"bzr update --revision only works"
1432
" for a revision in the branch history"
1434
revno = tree.branch.revision_id_to_revno(
1435
_mod_revision.ensure_null(tree.last_revision()))
1436
note('Updated to revision %d of branch %s' %
1437
(revno, branch_location))
1438
if tree.get_parent_ids()[1:] != existing_pending_merges:
1439
note('Your local commits will now show as pending merges with '
1440
"'bzr status', and can be committed with 'bzr commit'.")
1412
delta._ChangeReporter(unversioned_filter=tree.is_ignored,
1413
view_info=view_info), possible_transports=possible_transports)
1414
revno = tree.branch.revision_id_to_revno(
1415
_mod_revision.ensure_null(tree.last_revision()))
1416
note('Updated to revision %d.' % (revno,))
1417
if tree.get_parent_ids()[1:] != existing_pending_merges:
1418
note('Your local commits will now show as pending merges with '
1419
"'bzr status', and can be committed with 'bzr commit'.")
1447
1428
class cmd_info(Command):
1518
1499
file_list = [f for f in file_list]
1520
1501
tree.lock_write()
1521
self.add_cleanup(tree.unlock)
1522
# Heuristics should probably all move into tree.remove_smart or
1525
added = tree.changes_from(tree.basis_tree(),
1526
specific_files=file_list).added
1527
file_list = sorted([f[0] for f in added], reverse=True)
1528
if len(file_list) == 0:
1529
raise errors.BzrCommandError('No matching files.')
1530
elif file_list is None:
1531
# missing files show up in iter_changes(basis) as
1532
# versioned-with-no-kind.
1534
for change in tree.iter_changes(tree.basis_tree()):
1535
# Find paths in the working tree that have no kind:
1536
if change[1][1] is not None and change[6][1] is None:
1537
missing.append(change[1][1])
1538
file_list = sorted(missing, reverse=True)
1539
file_deletion_strategy = 'keep'
1540
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1541
keep_files=file_deletion_strategy=='keep',
1542
force=file_deletion_strategy=='force')
1503
# Heuristics should probably all move into tree.remove_smart or
1506
added = tree.changes_from(tree.basis_tree(),
1507
specific_files=file_list).added
1508
file_list = sorted([f[0] for f in added], reverse=True)
1509
if len(file_list) == 0:
1510
raise errors.BzrCommandError('No matching files.')
1511
elif file_list is None:
1512
# missing files show up in iter_changes(basis) as
1513
# versioned-with-no-kind.
1515
for change in tree.iter_changes(tree.basis_tree()):
1516
# Find paths in the working tree that have no kind:
1517
if change[1][1] is not None and change[6][1] is None:
1518
missing.append(change[1][1])
1519
file_list = sorted(missing, reverse=True)
1520
file_deletion_strategy = 'keep'
1521
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1522
keep_files=file_deletion_strategy=='keep',
1523
force=file_deletion_strategy=='force')
1545
1528
class cmd_file_id(Command):
1767
1750
class cmd_init_repository(Command):
1768
"""Create a shared repository for branches to share storage space.
1751
"""Create a shared repository to hold branches.
1770
1753
New branches created under the repository directory will store their
1771
revisions in the repository, not in the branch directory. For branches
1772
with shared history, this reduces the amount of storage needed and
1773
speeds up the creation of new branches.
1754
revisions in the repository, not in the branch directory.
1775
If the --no-trees option is given then the branches in the repository
1776
will not have working trees by default. They will still exist as
1777
directories on disk, but they will not have separate copies of the
1778
files at a certain revision. This can be useful for repositories that
1779
store branches which are interacted with through checkouts or remote
1780
branches, such as on a server.
1756
If the --no-trees option is used then the branches in the repository
1757
will not have working trees by default.
1783
Create a shared repository holding just branches::
1760
Create a shared repositories holding just branches::
1785
1762
bzr init-repo --no-trees repo
1786
1763
bzr init repo/trunk
2306
2277
filter_by_dir = False
2308
# find the file ids to log and check for directory filtering
2309
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2310
revision, file_list)
2311
self.add_cleanup(b.unlock)
2312
for relpath, file_id, kind in file_info_list:
2314
raise errors.BzrCommandError(
2315
"Path unknown at end or start of revision range: %s" %
2317
# If the relpath is the top of the tree, we log everything
2281
# find the file ids to log and check for directory filtering
2282
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2283
revision, file_list)
2284
for relpath, file_id, kind in file_info_list:
2286
raise errors.BzrCommandError(
2287
"Path unknown at end or start of revision range: %s" %
2289
# If the relpath is the top of the tree, we log everything
2294
file_ids.append(file_id)
2295
filter_by_dir = filter_by_dir or (
2296
kind in ['directory', 'tree-reference'])
2299
# FIXME ? log the current subdir only RBC 20060203
2300
if revision is not None \
2301
and len(revision) > 0 and revision[0].get_branch():
2302
location = revision[0].get_branch()
2322
file_ids.append(file_id)
2323
filter_by_dir = filter_by_dir or (
2324
kind in ['directory', 'tree-reference'])
2327
# FIXME ? log the current subdir only RBC 20060203
2328
if revision is not None \
2329
and len(revision) > 0 and revision[0].get_branch():
2330
location = revision[0].get_branch()
2333
dir, relpath = bzrdir.BzrDir.open_containing(location)
2334
b = dir.open_branch()
2336
self.add_cleanup(b.unlock)
2337
rev1, rev2 = _get_revision_range(revision, b, self.name())
2339
# Decide on the type of delta & diff filtering to use
2340
# TODO: add an --all-files option to make this configurable & consistent
2348
diff_type = 'partial'
2352
# Build the log formatter
2353
if log_format is None:
2354
log_format = log.log_formatter_registry.get_default(b)
2355
# Make a non-encoding output to include the diffs - bug 328007
2356
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2357
lf = log_format(show_ids=show_ids, to_file=self.outf,
2358
to_exact_file=unencoded_output,
2359
show_timezone=timezone,
2360
delta_format=get_verbosity_level(),
2362
show_advice=levels is None)
2364
# Choose the algorithm for doing the logging. It's annoying
2365
# having multiple code paths like this but necessary until
2366
# the underlying repository format is faster at generating
2367
# deltas or can provide everything we need from the indices.
2368
# The default algorithm - match-using-deltas - works for
2369
# multiple files and directories and is faster for small
2370
# amounts of history (200 revisions say). However, it's too
2371
# slow for logging a single file in a repository with deep
2372
# history, i.e. > 10K revisions. In the spirit of "do no
2373
# evil when adding features", we continue to use the
2374
# original algorithm - per-file-graph - for the "single
2375
# file that isn't a directory without showing a delta" case.
2376
partial_history = revision and b.repository._format.supports_chks
2377
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2378
or delta_type or partial_history)
2380
# Build the LogRequest and execute it
2381
if len(file_ids) == 0:
2383
rqst = make_log_request_dict(
2384
direction=direction, specific_fileids=file_ids,
2385
start_revision=rev1, end_revision=rev2, limit=limit,
2386
message_search=message, delta_type=delta_type,
2387
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2388
Logger(b, rqst).show(lf)
2305
dir, relpath = bzrdir.BzrDir.open_containing(location)
2306
b = dir.open_branch()
2308
rev1, rev2 = _get_revision_range(revision, b, self.name())
2310
# Decide on the type of delta & diff filtering to use
2311
# TODO: add an --all-files option to make this configurable & consistent
2319
diff_type = 'partial'
2323
# Build the log formatter
2324
if log_format is None:
2325
log_format = log.log_formatter_registry.get_default(b)
2326
lf = log_format(show_ids=show_ids, to_file=self.outf,
2327
show_timezone=timezone,
2328
delta_format=get_verbosity_level(),
2330
show_advice=levels is None)
2332
# Choose the algorithm for doing the logging. It's annoying
2333
# having multiple code paths like this but necessary until
2334
# the underlying repository format is faster at generating
2335
# deltas or can provide everything we need from the indices.
2336
# The default algorithm - match-using-deltas - works for
2337
# multiple files and directories and is faster for small
2338
# amounts of history (200 revisions say). However, it's too
2339
# slow for logging a single file in a repository with deep
2340
# history, i.e. > 10K revisions. In the spirit of "do no
2341
# evil when adding features", we continue to use the
2342
# original algorithm - per-file-graph - for the "single
2343
# file that isn't a directory without showing a delta" case.
2344
partial_history = revision and b.repository._format.supports_chks
2345
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2346
or delta_type or partial_history)
2348
# Build the LogRequest and execute it
2349
if len(file_ids) == 0:
2351
rqst = make_log_request_dict(
2352
direction=direction, specific_fileids=file_ids,
2353
start_revision=rev1, end_revision=rev2, limit=limit,
2354
message_search=message, delta_type=delta_type,
2355
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2356
Logger(b, rqst).show(lf)
2391
2362
def _get_revision_range(revisionspec_list, branch, command_name):
2534
2507
note("Ignoring files outside view. View is %s" % view_str)
2536
2509
tree.lock_read()
2537
self.add_cleanup(tree.unlock)
2538
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2539
from_dir=relpath, recursive=recursive):
2540
# Apply additional masking
2541
if not all and not selection[fc]:
2543
if kind is not None and fkind != kind:
2548
fullpath = osutils.pathjoin(relpath, fp)
2551
views.check_path_in_view(tree, fullpath)
2552
except errors.FileOutsideView:
2511
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2512
from_dir=relpath, recursive=recursive):
2513
# Apply additional masking
2514
if not all and not selection[fc]:
2516
if kind is not None and fkind != kind:
2521
fullpath = osutils.pathjoin(relpath, fp)
2524
views.check_path_in_view(tree, fullpath)
2525
except errors.FileOutsideView:
2557
fp = osutils.pathjoin(prefix, fp)
2558
kindch = entry.kind_character()
2559
outstring = fp + kindch
2560
ui.ui_factory.clear_term()
2562
outstring = '%-8s %s' % (fc, outstring)
2563
if show_ids and fid is not None:
2564
outstring = "%-50s %s" % (outstring, fid)
2565
self.outf.write(outstring + '\n')
2567
self.outf.write(fp + '\0')
2570
self.outf.write(fid)
2571
self.outf.write('\0')
2579
self.outf.write('%-50s %s\n' % (outstring, my_id))
2530
fp = osutils.pathjoin(prefix, fp)
2531
kindch = entry.kind_character()
2532
outstring = fp + kindch
2533
ui.ui_factory.clear_term()
2535
outstring = '%-8s %s' % (fc, outstring)
2536
if show_ids and fid is not None:
2537
outstring = "%-50s %s" % (outstring, fid)
2581
2538
self.outf.write(outstring + '\n')
2540
self.outf.write(fp + '\0')
2543
self.outf.write(fid)
2544
self.outf.write('\0')
2552
self.outf.write('%-50s %s\n' % (outstring, my_id))
2554
self.outf.write(outstring + '\n')
2584
2559
class cmd_unknowns(Command):
3532
3474
verbose = not is_quiet()
3533
3475
# TODO: should possibly lock the history file...
3534
3476
benchfile = open(".perf_history", "at", buffering=1)
3535
self.add_cleanup(benchfile.close)
3537
3478
test_suite_factory = None
3538
3479
benchfile = None
3539
selftest_kwargs = {"verbose": verbose,
3541
"stop_on_failure": one,
3542
"transport": transport,
3543
"test_suite_factory": test_suite_factory,
3544
"lsprof_timed": lsprof_timed,
3545
"lsprof_tests": lsprof_tests,
3546
"bench_history": benchfile,
3547
"matching_tests_first": first,
3548
"list_only": list_only,
3549
"random_seed": randomize,
3550
"exclude_pattern": exclude,
3552
"load_list": load_list,
3553
"debug_flags": debugflag,
3554
"starting_with": starting_with
3556
selftest_kwargs.update(self.additional_selftest_args)
3557
result = selftest(**selftest_kwargs)
3481
selftest_kwargs = {"verbose": verbose,
3483
"stop_on_failure": one,
3484
"transport": transport,
3485
"test_suite_factory": test_suite_factory,
3486
"lsprof_timed": lsprof_timed,
3487
"lsprof_tests": lsprof_tests,
3488
"bench_history": benchfile,
3489
"matching_tests_first": first,
3490
"list_only": list_only,
3491
"random_seed": randomize,
3492
"exclude_pattern": exclude,
3494
"load_list": load_list,
3495
"debug_flags": debugflag,
3496
"starting_with": starting_with
3498
selftest_kwargs.update(self.additional_selftest_args)
3499
result = selftest(**selftest_kwargs)
3501
if benchfile is not None:
3558
3503
return int(not result)
3731
3680
view_info = _get_view_info_for_change_reporter(tree)
3732
3681
change_reporter = delta._ChangeReporter(
3733
3682
unversioned_filter=tree.is_ignored, view_info=view_info)
3734
pb = ui.ui_factory.nested_progress_bar()
3735
self.add_cleanup(pb.finished)
3737
self.add_cleanup(tree.unlock)
3738
if location is not None:
3740
mergeable = bundle.read_mergeable_from_url(location,
3741
possible_transports=possible_transports)
3742
except errors.NotABundle:
3685
pb = ui.ui_factory.nested_progress_bar()
3686
cleanups.append(pb.finished)
3688
cleanups.append(tree.unlock)
3689
if location is not None:
3691
mergeable = bundle.read_mergeable_from_url(location,
3692
possible_transports=possible_transports)
3693
except errors.NotABundle:
3697
raise errors.BzrCommandError('Cannot use --uncommitted'
3698
' with bundles or merge directives.')
3700
if revision is not None:
3701
raise errors.BzrCommandError(
3702
'Cannot use -r with merge directives or bundles')
3703
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3706
if merger is None and uncommitted:
3707
if revision is not None and len(revision) > 0:
3708
raise errors.BzrCommandError('Cannot use --uncommitted and'
3709
' --revision at the same time.')
3710
merger = self.get_merger_from_uncommitted(tree, location, pb,
3712
allow_pending = False
3715
merger, allow_pending = self._get_merger_from_branch(tree,
3716
location, revision, remember, possible_transports, pb)
3718
merger.merge_type = merge_type
3719
merger.reprocess = reprocess
3720
merger.show_base = show_base
3721
self.sanity_check_merger(merger)
3722
if (merger.base_rev_id == merger.other_rev_id and
3723
merger.other_rev_id is not None):
3724
note('Nothing to do.')
3727
if merger.interesting_files is not None:
3728
raise errors.BzrCommandError('Cannot pull individual files')
3729
if (merger.base_rev_id == tree.last_revision()):
3730
result = tree.pull(merger.other_branch, False,
3731
merger.other_rev_id)
3732
result.report(self.outf)
3734
if merger.this_basis is None:
3735
raise errors.BzrCommandError(
3736
"This branch has no commits."
3737
" (perhaps you would prefer 'bzr pull')")
3739
return self._do_preview(merger, cleanups)
3741
return self._do_interactive(merger, cleanups)
3746
raise errors.BzrCommandError('Cannot use --uncommitted'
3747
' with bundles or merge directives.')
3749
if revision is not None:
3750
raise errors.BzrCommandError(
3751
'Cannot use -r with merge directives or bundles')
3752
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3755
if merger is None and uncommitted:
3756
if revision is not None and len(revision) > 0:
3757
raise errors.BzrCommandError('Cannot use --uncommitted and'
3758
' --revision at the same time.')
3759
merger = self.get_merger_from_uncommitted(tree, location, pb)
3760
allow_pending = False
3763
merger, allow_pending = self._get_merger_from_branch(tree,
3764
location, revision, remember, possible_transports, pb)
3766
merger.merge_type = merge_type
3767
merger.reprocess = reprocess
3768
merger.show_base = show_base
3769
self.sanity_check_merger(merger)
3770
if (merger.base_rev_id == merger.other_rev_id and
3771
merger.other_rev_id is not None):
3772
note('Nothing to do.')
3775
if merger.interesting_files is not None:
3776
raise errors.BzrCommandError('Cannot pull individual files')
3777
if (merger.base_rev_id == tree.last_revision()):
3778
result = tree.pull(merger.other_branch, False,
3779
merger.other_rev_id)
3780
result.report(self.outf)
3782
if merger.this_basis is None:
3783
raise errors.BzrCommandError(
3784
"This branch has no commits."
3785
" (perhaps you would prefer 'bzr pull')")
3787
return self._do_preview(merger)
3789
return self._do_interactive(merger)
3791
return self._do_merge(merger, change_reporter, allow_pending,
3794
def _get_preview(self, merger):
3743
return self._do_merge(merger, change_reporter, allow_pending,
3746
for cleanup in reversed(cleanups):
3749
def _get_preview(self, merger, cleanups):
3795
3750
tree_merger = merger.make_merger()
3796
3751
tt = tree_merger.make_preview_transform()
3797
self.add_cleanup(tt.finalize)
3752
cleanups.append(tt.finalize)
3798
3753
result_tree = tt.get_preview_tree()
3799
3754
return result_tree
3801
def _do_preview(self, merger):
3756
def _do_preview(self, merger, cleanups):
3802
3757
from bzrlib.diff import show_diff_trees
3803
result_tree = self._get_preview(merger)
3758
result_tree = self._get_preview(merger, cleanups)
3804
3759
show_diff_trees(merger.this_tree, result_tree, self.outf,
3805
3760
old_label='', new_label='')
3996
3950
merge_type = _mod_merge.Merge3Merger
3997
3951
tree, file_list = tree_files(file_list)
3998
3952
tree.lock_write()
3999
self.add_cleanup(tree.unlock)
4000
parents = tree.get_parent_ids()
4001
if len(parents) != 2:
4002
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4003
" merges. Not cherrypicking or"
4005
repository = tree.branch.repository
4006
interesting_ids = None
4008
conflicts = tree.conflicts()
4009
if file_list is not None:
4010
interesting_ids = set()
4011
for filename in file_list:
4012
file_id = tree.path2id(filename)
4014
raise errors.NotVersionedError(filename)
4015
interesting_ids.add(file_id)
4016
if tree.kind(file_id) != "directory":
3954
parents = tree.get_parent_ids()
3955
if len(parents) != 2:
3956
raise errors.BzrCommandError("Sorry, remerge only works after normal"
3957
" merges. Not cherrypicking or"
3959
repository = tree.branch.repository
3960
interesting_ids = None
3962
conflicts = tree.conflicts()
3963
if file_list is not None:
3964
interesting_ids = set()
3965
for filename in file_list:
3966
file_id = tree.path2id(filename)
3968
raise errors.NotVersionedError(filename)
3969
interesting_ids.add(file_id)
3970
if tree.kind(file_id) != "directory":
4019
for name, ie in tree.inventory.iter_entries(file_id):
4020
interesting_ids.add(ie.file_id)
4021
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4023
# Remerge only supports resolving contents conflicts
4024
allowed_conflicts = ('text conflict', 'contents conflict')
4025
restore_files = [c.path for c in conflicts
4026
if c.typestring in allowed_conflicts]
4027
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4028
tree.set_conflicts(ConflictList(new_conflicts))
4029
if file_list is not None:
4030
restore_files = file_list
4031
for filename in restore_files:
3973
for name, ie in tree.inventory.iter_entries(file_id):
3974
interesting_ids.add(ie.file_id)
3975
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
3977
# Remerge only supports resolving contents conflicts
3978
allowed_conflicts = ('text conflict', 'contents conflict')
3979
restore_files = [c.path for c in conflicts
3980
if c.typestring in allowed_conflicts]
3981
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
3982
tree.set_conflicts(ConflictList(new_conflicts))
3983
if file_list is not None:
3984
restore_files = file_list
3985
for filename in restore_files:
3987
restore(tree.abspath(filename))
3988
except errors.NotConflicted:
3990
# Disable pending merges, because the file texts we are remerging
3991
# have not had those merges performed. If we use the wrong parents
3992
# list, we imply that the working tree text has seen and rejected
3993
# all the changes from the other tree, when in fact those changes
3994
# have not yet been seen.
3995
pb = ui.ui_factory.nested_progress_bar()
3996
tree.set_parent_ids(parents[:1])
4033
restore(tree.abspath(filename))
4034
except errors.NotConflicted:
4036
# Disable pending merges, because the file texts we are remerging
4037
# have not had those merges performed. If we use the wrong parents
4038
# list, we imply that the working tree text has seen and rejected
4039
# all the changes from the other tree, when in fact those changes
4040
# have not yet been seen.
4041
pb = ui.ui_factory.nested_progress_bar()
4042
tree.set_parent_ids(parents[:1])
4044
merger = _mod_merge.Merger.from_revision_ids(pb,
4046
merger.interesting_ids = interesting_ids
4047
merger.merge_type = merge_type
4048
merger.show_base = show_base
4049
merger.reprocess = reprocess
4050
conflicts = merger.do_merge()
3998
merger = _mod_merge.Merger.from_revision_ids(pb,
4000
merger.interesting_ids = interesting_ids
4001
merger.merge_type = merge_type
4002
merger.show_base = show_base
4003
merger.reprocess = reprocess
4004
conflicts = merger.do_merge()
4006
tree.set_parent_ids(parents)
4052
tree.set_parent_ids(parents)
4054
4010
if conflicts > 0:
4276
4219
if remote_branch.base == local_branch.base:
4277
4220
remote_branch = local_branch
4279
local_branch.lock_read()
4280
self.add_cleanup(local_branch.unlock)
4281
4222
local_revid_range = _revision_range_to_revid_range(
4282
4223
_get_revision_range(my_revision, local_branch,
4285
remote_branch.lock_read()
4286
self.add_cleanup(remote_branch.unlock)
4287
4226
remote_revid_range = _revision_range_to_revid_range(
4288
4227
_get_revision_range(revision,
4289
4228
remote_branch, self.name()))
4291
local_extra, remote_extra = find_unmerged(
4292
local_branch, remote_branch, restrict,
4293
backward=not reverse,
4294
include_merges=include_merges,
4295
local_revid_range=local_revid_range,
4296
remote_revid_range=remote_revid_range)
4298
if log_format is None:
4299
registry = log.log_formatter_registry
4300
log_format = registry.get_default(local_branch)
4301
lf = log_format(to_file=self.outf,
4303
show_timezone='original')
4306
if local_extra and not theirs_only:
4307
message("You have %d extra revision(s):\n" %
4309
for revision in iter_log_revisions(local_extra,
4310
local_branch.repository,
4312
lf.log_revision(revision)
4313
printed_local = True
4316
printed_local = False
4318
if remote_extra and not mine_only:
4319
if printed_local is True:
4321
message("You are missing %d revision(s):\n" %
4323
for revision in iter_log_revisions(remote_extra,
4324
remote_branch.repository,
4326
lf.log_revision(revision)
4329
if mine_only and not local_extra:
4330
# We checked local, and found nothing extra
4331
message('This branch is up to date.\n')
4332
elif theirs_only and not remote_extra:
4333
# We checked remote, and found nothing extra
4334
message('Other branch is up to date.\n')
4335
elif not (mine_only or theirs_only or local_extra or
4337
# We checked both branches, and neither one had extra
4339
message("Branches are up to date.\n")
4230
local_branch.lock_read()
4232
remote_branch.lock_read()
4234
local_extra, remote_extra = find_unmerged(
4235
local_branch, remote_branch, restrict,
4236
backward=not reverse,
4237
include_merges=include_merges,
4238
local_revid_range=local_revid_range,
4239
remote_revid_range=remote_revid_range)
4241
if log_format is None:
4242
registry = log.log_formatter_registry
4243
log_format = registry.get_default(local_branch)
4244
lf = log_format(to_file=self.outf,
4246
show_timezone='original')
4249
if local_extra and not theirs_only:
4250
message("You have %d extra revision(s):\n" %
4252
for revision in iter_log_revisions(local_extra,
4253
local_branch.repository,
4255
lf.log_revision(revision)
4256
printed_local = True
4259
printed_local = False
4261
if remote_extra and not mine_only:
4262
if printed_local is True:
4264
message("You are missing %d revision(s):\n" %
4266
for revision in iter_log_revisions(remote_extra,
4267
remote_branch.repository,
4269
lf.log_revision(revision)
4272
if mine_only and not local_extra:
4273
# We checked local, and found nothing extra
4274
message('This branch is up to date.\n')
4275
elif theirs_only and not remote_extra:
4276
# We checked remote, and found nothing extra
4277
message('Other branch is up to date.\n')
4278
elif not (mine_only or theirs_only or local_extra or
4280
# We checked both branches, and neither one had extra
4282
message("Branches are up to date.\n")
4284
remote_branch.unlock()
4286
local_branch.unlock()
4341
4287
if not status_code and parent is None and other_branch is not None:
4342
4288
local_branch.lock_write()
4343
self.add_cleanup(local_branch.unlock)
4344
# handle race conditions - a parent might be set while we run.
4345
if local_branch.get_parent() is None:
4346
local_branch.set_parent(remote_branch.base)
4290
# handle race conditions - a parent might be set while we run.
4291
if local_branch.get_parent() is None:
4292
local_branch.set_parent(remote_branch.base)
4294
local_branch.unlock()
4347
4295
return status_code
4469
4419
bzrdir.BzrDir.open_containing_tree_or_branch(filename)
4470
4420
if wt is not None:
4472
self.add_cleanup(wt.unlock)
4474
4423
branch.lock_read()
4475
self.add_cleanup(branch.unlock)
4476
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4478
self.add_cleanup(tree.unlock)
4480
file_id = wt.path2id(relpath)
4482
file_id = tree.path2id(relpath)
4484
raise errors.NotVersionedError(filename)
4485
file_version = tree.inventory[file_id].revision
4486
if wt is not None and revision is None:
4487
# If there is a tree and we're not annotating historical
4488
# versions, annotate the working tree's content.
4489
annotate_file_tree(wt, file_id, self.outf, long, all,
4492
annotate_file(branch, file_version, file_id, long, all, self.outf,
4425
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4427
file_id = wt.path2id(relpath)
4429
file_id = tree.path2id(relpath)
4431
raise errors.NotVersionedError(filename)
4432
file_version = tree.inventory[file_id].revision
4433
if wt is not None and revision is None:
4434
# If there is a tree and we're not annotating historical
4435
# versions, annotate the working tree's content.
4436
annotate_file_tree(wt, file_id, self.outf, long, all,
4439
annotate_file(branch, file_version, file_id, long, all, self.outf,
4496
4448
class cmd_re_sign(Command):
5011
4964
directly from the merge directive, without retrieving data from a
5014
`bzr send` creates a compact data set that, when applied using bzr
5015
merge, has the same effect as merging from the source branch.
5017
By default the merge directive is self-contained and can be applied to any
5018
branch containing submit_branch in its ancestory without needing access to
5021
If --no-bundle is specified, then Bazaar doesn't send the contents of the
5022
revisions, but only a structured request to merge from the
5023
public_location. In that case the public_branch is needed and it must be
5024
up-to-date and accessible to the recipient. The public_branch is always
5025
included if known, so that people can check it later.
5027
The submit branch defaults to the parent of the source branch, but can be
5028
overridden. Both submit branch and public branch will be remembered in
5029
branch.conf the first time they are used for a particular branch. The
5030
source branch defaults to that containing the working directory, but can
5031
be changed using --from.
5033
In order to calculate those changes, bzr must analyse the submit branch.
5034
Therefore it is most efficient for the submit branch to be a local mirror.
5035
If a public location is known for the submit_branch, that location is used
5036
in the merge directive.
5038
The default behaviour is to send the merge directive by mail, unless -o is
5039
given, in which case it is sent to a file.
4967
If --no-bundle is specified, then public_branch is needed (and must be
4968
up-to-date), so that the receiver can perform the merge using the
4969
public_branch. The public_branch is always included if known, so that
4970
people can check it later.
4972
The submit branch defaults to the parent, but can be overridden. Both
4973
submit branch and public branch will be remembered if supplied.
4975
If a public_branch is known for the submit_branch, that public submit
4976
branch is used in the merge instructions. This means that a local mirror
4977
can be used as your actual submit branch, once you have set public_branch
5041
4980
Mail is sent using your preferred mail program. This should be transparent
5042
4981
on Windows (it uses MAPI). On Linux, it requires the xdg-email utility.
5231
5166
branch, relpath = Branch.open_containing(directory)
5232
5167
branch.lock_write()
5233
self.add_cleanup(branch.unlock)
5235
branch.tags.delete_tag(tag_name)
5236
self.outf.write('Deleted tag %s.\n' % tag_name)
5239
if len(revision) != 1:
5240
raise errors.BzrCommandError(
5241
"Tags can only be placed on a single revision, "
5243
revision_id = revision[0].as_revision_id(branch)
5170
branch.tags.delete_tag(tag_name)
5171
self.outf.write('Deleted tag %s.\n' % tag_name)
5245
revision_id = branch.last_revision()
5246
if (not force) and branch.tags.has_tag(tag_name):
5247
raise errors.TagAlreadyExists(tag_name)
5248
branch.tags.set_tag(tag_name, revision_id)
5249
self.outf.write('Created tag %s.\n' % tag_name)
5174
if len(revision) != 1:
5175
raise errors.BzrCommandError(
5176
"Tags can only be placed on a single revision, "
5178
revision_id = revision[0].as_revision_id(branch)
5180
revision_id = branch.last_revision()
5181
if (not force) and branch.tags.has_tag(tag_name):
5182
raise errors.TagAlreadyExists(tag_name)
5183
branch.tags.set_tag(tag_name, revision_id)
5184
self.outf.write('Created tag %s.\n' % tag_name)
5252
5189
class cmd_tags(Command):
5287
5224
branch.lock_read()
5288
self.add_cleanup(branch.unlock)
5290
graph = branch.repository.get_graph()
5291
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5292
revid1, revid2 = rev1.rev_id, rev2.rev_id
5293
# only show revisions between revid1 and revid2 (inclusive)
5294
tags = [(tag, revid) for tag, revid in tags if
5295
graph.is_between(revid, revid1, revid2)]
5298
elif sort == 'time':
5300
for tag, revid in tags:
5302
revobj = branch.repository.get_revision(revid)
5303
except errors.NoSuchRevision:
5304
timestamp = sys.maxint # place them at the end
5306
timestamp = revobj.timestamp
5307
timestamps[revid] = timestamp
5308
tags.sort(key=lambda x: timestamps[x[1]])
5310
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5311
for index, (tag, revid) in enumerate(tags):
5313
revno = branch.revision_id_to_dotted_revno(revid)
5314
if isinstance(revno, tuple):
5315
revno = '.'.join(map(str, revno))
5316
except errors.NoSuchRevision:
5317
# Bad tag data/merges can lead to tagged revisions
5318
# which are not in this branch. Fail gracefully ...
5320
tags[index] = (tag, revno)
5227
graph = branch.repository.get_graph()
5228
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5229
revid1, revid2 = rev1.rev_id, rev2.rev_id
5230
# only show revisions between revid1 and revid2 (inclusive)
5231
tags = [(tag, revid) for tag, revid in tags if
5232
graph.is_between(revid, revid1, revid2)]
5235
elif sort == 'time':
5237
for tag, revid in tags:
5239
revobj = branch.repository.get_revision(revid)
5240
except errors.NoSuchRevision:
5241
timestamp = sys.maxint # place them at the end
5243
timestamp = revobj.timestamp
5244
timestamps[revid] = timestamp
5245
tags.sort(key=lambda x: timestamps[x[1]])
5247
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5248
for index, (tag, revid) in enumerate(tags):
5250
revno = branch.revision_id_to_dotted_revno(revid)
5251
if isinstance(revno, tuple):
5252
revno = '.'.join(map(str, revno))
5253
except errors.NoSuchRevision:
5254
# Bad tag data/merges can lead to tagged revisions
5255
# which are not in this branch. Fail gracefully ...
5257
tags[index] = (tag, revno)
5322
5260
for tag, revspec in tags:
5323
5261
self.outf.write('%-20s %s\n' % (tag, revspec))