15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
import sys, os, os.path, random, time, sha, sets, types, re, shutil, tempfile
21
import traceback, socket, fnmatch, difflib, time
22
from binascii import hexlify
25
from inventory import Inventory
26
from trace import mutter, note
27
from tree import Tree, EmptyTree, RevisionTree, WorkingTree
28
from inventory import InventoryEntry, Inventory
29
from osutils import isdir, quotefn, isfile, uuid, sha_file, username, chomp, \
30
format_date, compact_date, pumpfile, user_email, rand_bytes, splitpath, \
31
joinpath, sha_string, file_kind, local_time_offset, appendpath
32
from store import ImmutableStore
33
from revision import Revision
34
from errors import bailout, BzrError
35
from textui import show_status
36
from diff import diff_trees
22
from bzrlib.trace import mutter, note
23
from bzrlib.osutils import isdir, quotefn, compact_date, rand_bytes, \
25
sha_file, appendpath, file_kind
27
from bzrlib.errors import BzrError, InvalidRevisionNumber, InvalidRevisionId, \
28
DivergedBranches, NotBranchError, NoSuchFile
29
from bzrlib.textui import show_status
30
from bzrlib.revision import Revision
31
from bzrlib.delta import compare_trees
32
from bzrlib.tree import EmptyTree, RevisionTree
35
import bzrlib.transport
38
39
BZR_BRANCH_FORMAT = "Bazaar-NG branch, format 0.0.4\n"
39
40
## TODO: Maybe include checks for common corruption of newlines, etc?
43
def find_branch_root(f=None):
44
"""Find the branch root enclosing f, or pwd.
46
It is not necessary that f exists.
43
# TODO: Some operations like log might retrieve the same revisions
44
# repeatedly to calculate deltas. We could perhaps have a weakref
45
# cache in memory to make this faster.
47
def find_branch(*ignored, **ignored_too):
48
# XXX: leave this here for about one release, then remove it
49
raise NotImplementedError('find_branch() is not supported anymore, '
50
'please use one of the new branch constructors')
51
def _relpath(base, path):
52
"""Return path relative to base, or raise exception.
54
The path may be either an absolute path or a path relative to the
55
current working directory.
57
Lifted out of Branch.relpath for ease of testing.
59
os.path.commonprefix (python2.4) has a bad bug that it works just
60
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
61
avoids that problem."""
62
rp = os.path.abspath(path)
66
while len(head) >= len(base):
69
head, tail = os.path.split(head)
73
raise NotBranchError("path %r is not within branch %r" % (rp, base))
78
def find_branch_root(t):
79
"""Find the branch root enclosing the transport's base.
81
t is a Transport object.
83
It is not necessary that the base of t exists.
48
85
Basically we keep looking up until we find the control directory or
52
elif hasattr(os.path, 'realpath'):
53
f = os.path.realpath(f)
55
f = os.path.abspath(f)
86
run into the root. If there isn't one, raises NotBranchError.
60
if os.path.exists(os.path.join(f, bzrlib.BZRDIR)):
62
head, tail = os.path.split(f)
90
if t.has(bzrlib.BZRDIR):
93
if new_t.base == t.base:
64
94
# reached the root, whatever that may be
65
raise BzrError('%r is not in a branch' % orig_f)
95
raise NotBranchError('%s is not in a branch' % orig_base)
70
99
######################################################################
102
class Branch(object):
74
103
"""Branch holding a history of revisions.
76
TODO: Perhaps use different stores for different classes of object,
77
so that we can keep track of how much space each one uses,
78
or garbage-collect them.
80
TODO: Add a RemoteBranch subclass. For the basic case of read-only
81
HTTP access this should be very easy by,
82
just redirecting controlfile access into HTTP requests.
83
We would need a RemoteStore working similarly.
85
TODO: Keep the on-disk branch locked while the object exists.
89
def __init__(self, base, init=False, find_root=True):
106
Base directory/url of the branch.
110
def __init__(self, *ignored, **ignored_too):
111
raise NotImplementedError('The Branch class is abstract')
115
"""Open an existing branch, rooted at 'base' (url)"""
116
t = bzrlib.transport.transport(base)
120
def open_containing(base):
121
"""Open an existing branch, containing url (search upwards for the root)
123
t = bzrlib.transport.transport(base)
124
t = find_branch_root(t)
128
def initialize(base):
129
"""Create a new branch, rooted at 'base' (url)"""
130
t = bzrlib.transport.transport(base)
131
return _Branch(t, init=True)
133
def setup_caching(self, cache_root):
134
"""Subclasses that care about caching should override this, and set
135
up cached stores located under cache_root.
139
class _Branch(Branch):
140
"""A branch stored in the actual filesystem.
142
Note that it's "local" in the context of the filesystem; it doesn't
143
really matter if it's on an nfs/smb/afs/coda/... share, as long as
144
it's writable, and can be accessed via the normal filesystem API.
150
If _lock_mode is true, a positive count of the number of times the
154
Lock object from bzrlib.lock.
156
# We actually expect this class to be somewhat short-lived; part of its
157
# purpose is to try to isolate what bits of the branch logic are tied to
158
# filesystem access, so that in a later step, we can extricate them to
159
# a separarte ("storage") class.
164
def __init__(self, transport, init=False):
90
165
"""Create new branch object at a particular location.
92
base -- Base directory for the branch.
167
transport -- A Transport object, defining how to access files.
168
(If a string, transport.transport() will be used to
169
create a Transport object)
94
171
init -- If True, create new control files in a previously
95
172
unversioned directory. If False, the branch must already
98
find_root -- If true and init is false, find the root of the
99
existing branch containing base.
101
175
In the test suite, creation of new trees is tested using the
102
176
`ScratchBranch` class.
178
if isinstance(transport, basestring):
179
from bzrlib.transport import transport as get_transport
180
transport = get_transport(transport)
182
self._transport = transport
105
self.base = os.path.realpath(base)
106
184
self._make_control()
108
self.base = find_branch_root(base)
110
self.base = os.path.realpath(base)
111
if not isdir(self.controlfilename('.')):
112
bailout("not a bzr branch: %s" % quotefn(base),
113
['use "bzr init" to initialize a new working tree',
114
'current bzr can only operate from top-of-tree'])
115
185
self._check_format()
117
self.text_store = ImmutableStore(self.controlfilename('text-store'))
118
self.revision_store = ImmutableStore(self.controlfilename('revision-store'))
119
self.inventory_store = ImmutableStore(self.controlfilename('inventory-store'))
122
188
def __str__(self):
123
return '%s(%r)' % (self.__class__.__name__, self.base)
189
return '%s(%r)' % (self.__class__.__name__, self._transport.base)
126
192
__repr__ = __str__
196
if self._lock_mode or self._lock:
197
from bzrlib.trace import warning
198
warning("branch %r was not explicitly unlocked" % self)
201
# TODO: It might be best to do this somewhere else,
202
# but it is nice for a Branch object to automatically
203
# cache it's information.
204
# Alternatively, we could have the Transport objects cache requests
205
# See the earlier discussion about how major objects (like Branch)
206
# should never expect their __del__ function to run.
207
if hasattr(self, 'cache_root') and self.cache_root is not None:
210
shutil.rmtree(self.cache_root)
213
self.cache_root = None
217
return self._transport.base
220
base = property(_get_base)
223
def lock_write(self):
224
# TODO: Upgrade locking to support using a Transport,
225
# and potentially a remote locking protocol
227
if self._lock_mode != 'w':
228
from bzrlib.errors import LockError
229
raise LockError("can't upgrade to a write lock from %r" %
231
self._lock_count += 1
233
self._lock = self._transport.lock_write(
234
self._rel_controlfilename('branch-lock'))
235
self._lock_mode = 'w'
241
assert self._lock_mode in ('r', 'w'), \
242
"invalid lock mode %r" % self._lock_mode
243
self._lock_count += 1
245
self._lock = self._transport.lock_read(
246
self._rel_controlfilename('branch-lock'))
247
self._lock_mode = 'r'
251
if not self._lock_mode:
252
from bzrlib.errors import LockError
253
raise LockError('branch %r is not locked' % (self))
255
if self._lock_count > 1:
256
self._lock_count -= 1
260
self._lock_mode = self._lock_count = None
129
262
def abspath(self, name):
130
263
"""Return absolute filename for something in the branch"""
131
return os.path.join(self.base, name)
264
return self._transport.abspath(name)
134
266
def relpath(self, path):
135
267
"""Return path relative to this branch of something inside it.
137
269
Raises an error if path is not in this branch."""
138
rp = os.path.realpath(path)
140
if not rp.startswith(self.base):
141
bailout("path %r is not within branch %r" % (rp, self.base))
142
rp = rp[len(self.base):]
143
rp = rp.lstrip(os.sep)
270
return self._transport.relpath(path)
273
def _rel_controlfilename(self, file_or_path):
274
if isinstance(file_or_path, basestring):
275
file_or_path = [file_or_path]
276
return [bzrlib.BZRDIR] + file_or_path
147
278
def controlfilename(self, file_or_path):
148
279
"""Return location relative to branch."""
149
if isinstance(file_or_path, types.StringTypes):
150
file_or_path = [file_or_path]
151
return os.path.join(self.base, bzrlib.BZRDIR, *file_or_path)
280
return self._transport.abspath(self._rel_controlfilename(file_or_path))
154
283
def controlfile(self, file_or_path, mode='r'):
158
287
and binary. binary files are untranslated byte streams. Text
159
288
control files are stored with Unix newlines and in UTF-8, even
160
289
if the platform or locale defaults are different.
291
Controlfiles should almost never be opened in write mode but
292
rather should be atomically copied and replaced using atomicfile.
163
fn = self.controlfilename(file_or_path)
165
if mode == 'rb' or mode == 'wb':
166
return file(fn, mode)
167
elif mode == 'r' or mode == 'w':
168
# open in binary mode anyhow so there's no newline translation;
169
# codecs uses line buffering by default; don't want that.
171
return codecs.open(fn, mode + 'b', 'utf-8',
296
relpath = self._rel_controlfilename(file_or_path)
297
#TODO: codecs.open() buffers linewise, so it was overloaded with
298
# a much larger buffer, do we need to do the same for getreader/getwriter?
300
return self._transport.get(relpath)
302
raise BzrError("Branch.controlfile(mode='wb') is not supported, use put_controlfiles")
304
return codecs.getreader('utf-8')(self._transport.get(relpath), errors='replace')
306
raise BzrError("Branch.controlfile(mode='w') is not supported, use put_controlfiles")
174
308
raise BzrError("invalid controlfile mode %r" % mode)
310
def put_controlfile(self, path, f, encode=True):
311
"""Write an entry as a controlfile.
313
:param path: The path to put the file, relative to the .bzr control
315
:param f: A file-like or string object whose contents should be copied.
316
:param encode: If true, encode the contents as utf-8
318
self.put_controlfiles([(path, f)], encode=encode)
320
def put_controlfiles(self, files, encode=True):
321
"""Write several entries as controlfiles.
323
:param files: A list of [(path, file)] pairs, where the path is the directory
324
underneath the bzr control directory
325
:param encode: If true, encode the contents as utf-8
329
for path, f in files:
331
if isinstance(f, basestring):
332
f = f.encode('utf-8', 'replace')
334
f = codecs.getwriter('utf-8')(f, errors='replace')
335
path = self._rel_controlfilename(path)
336
ctrl_files.append((path, f))
337
self._transport.put_multi(ctrl_files)
178
339
def _make_control(self):
179
os.mkdir(self.controlfilename([]))
180
self.controlfile('README', 'w').write(
340
from bzrlib.inventory import Inventory
341
from cStringIO import StringIO
343
# Create an empty inventory
345
# if we want per-tree root ids then this is the place to set
346
# them; they're not needed for now and so ommitted for
348
bzrlib.xml.serializer_v4.write_inventory(Inventory(), sio)
350
dirs = [[], 'text-store', 'inventory-store', 'revision-store']
181
352
"This is a Bazaar-NG control directory.\n"
182
"Do not change any files in this directory.")
183
self.controlfile('branch-format', 'w').write(BZR_BRANCH_FORMAT)
184
for d in ('text-store', 'inventory-store', 'revision-store'):
185
os.mkdir(self.controlfilename(d))
186
for f in ('revision-history', 'merged-patches',
187
'pending-merged-patches', 'branch-name'):
188
self.controlfile(f, 'w').write('')
189
mutter('created control directory in ' + self.base)
190
Inventory().write_xml(self.controlfile('inventory','w'))
353
"Do not change any files in this directory.\n"),
354
('branch-format', BZR_BRANCH_FORMAT),
355
('revision-history', ''),
356
('merged-patches', ''),
357
('pending-merged-patches', ''),
360
('pending-merges', ''),
361
('inventory', sio.getvalue())
363
self._transport.mkdir_multi([self._rel_controlfilename(d) for d in dirs])
364
self.put_controlfiles(files)
365
mutter('created control directory in ' + self._transport.base)
193
367
def _check_format(self):
194
368
"""Check this branch format is supported.
201
375
# This ignores newlines so that we can open branches created
202
376
# on Windows from Linux and so on. I think it might be better
203
377
# to always make all internal files in unix format.
204
fmt = self.controlfile('branch-format', 'r').read()
205
fmt.replace('\r\n', '')
379
fmt = self.controlfile('branch-format', 'r').read()
381
raise NotBranchError('Could not find .bzr/branch-format in %s'
382
% self._transport.base)
383
fmt = fmt.replace('\r\n', '\n')
206
384
if fmt != BZR_BRANCH_FORMAT:
207
bailout('sorry, branch format %r not supported' % fmt,
208
['use a different bzr version',
209
'or remove the .bzr directory and "bzr init" again'])
385
raise BzrError('sorry, branch format %r not supported' % fmt,
386
['use a different bzr version',
387
'or remove the .bzr directory and "bzr init" again'])
389
# We know that the format is the currently supported one.
390
# So create the rest of the entries.
391
from bzrlib.store.compressed_text import CompressedTextStore
393
if self._transport.should_cache():
395
self.cache_root = tempfile.mkdtemp(prefix='bzr-cache')
396
mutter('Branch %r using caching in %r' % (self, self.cache_root))
398
self.cache_root = None
401
relpath = self._rel_controlfilename(name)
402
store = CompressedTextStore(self._transport.clone(relpath))
403
if self._transport.should_cache():
404
from meta_store import CachedStore
405
cache_path = os.path.join(self.cache_root, name)
407
store = CachedStore(store, cache_path)
410
self.text_store = get_store('text-store')
411
self.revision_store = get_store('revision-store')
412
self.inventory_store = get_store('inventory-store')
414
def get_root_id(self):
415
"""Return the id of this branches root"""
416
inv = self.read_working_inventory()
417
return inv.root.file_id
419
def set_root_id(self, file_id):
420
inv = self.read_working_inventory()
421
orig_root_id = inv.root.file_id
422
del inv._byid[inv.root.file_id]
423
inv.root.file_id = file_id
424
inv._byid[inv.root.file_id] = inv.root
427
if entry.parent_id in (None, orig_root_id):
428
entry.parent_id = inv.root.file_id
429
self._write_inventory(inv)
212
431
def read_working_inventory(self):
213
432
"""Read the working inventory."""
215
# ElementTree does its own conversion from UTF-8, so open in
217
inv = Inventory.read_xml(self.controlfile('inventory', 'rb'))
218
mutter("loaded inventory of %d items in %f"
219
% (len(inv), time.time() - before))
433
from bzrlib.inventory import Inventory
436
# ElementTree does its own conversion from UTF-8, so open in
438
f = self.controlfile('inventory', 'rb')
439
return bzrlib.xml.serializer_v4.read_inventory(f)
223
444
def _write_inventory(self, inv):
224
445
"""Update the working inventory.
226
447
That is to say, the inventory describing changes underway, that
227
448
will be committed to the next revision.
229
## TODO: factor out to atomicfile? is rename safe on windows?
230
## TODO: Maybe some kind of clean/dirty marker on inventory?
231
tmpfname = self.controlfilename('inventory.tmp')
232
tmpf = file(tmpfname, 'wb')
235
inv_fname = self.controlfilename('inventory')
236
if sys.platform == 'win32':
238
os.rename(tmpfname, inv_fname)
450
from cStringIO import StringIO
454
bzrlib.xml.serializer_v4.write_inventory(inv, sio)
456
# Transport handles atomicity
457
self.put_controlfile('inventory', sio)
239
461
mutter('wrote working inventory')
242
464
inventory = property(read_working_inventory, _write_inventory, None,
243
465
"""Inventory for the working copy.""")
246
def add(self, files, verbose=False):
468
def add(self, files, ids=None):
247
469
"""Make files versioned.
249
Note that the command line normally calls smart_add instead.
471
Note that the command line normally calls smart_add instead,
472
which can automatically recurse.
251
474
This puts the files in the Added state, so that they will be
252
475
recorded by the next commit.
478
List of paths to add, relative to the base of the tree.
481
If set, use these instead of automatically generated ids.
482
Must be the same length as the list of files, but may
483
contain None for ids that are to be autogenerated.
254
485
TODO: Perhaps have an option to add the ids even if the files do
257
TODO: Perhaps return the ids of the files? But then again it
258
is easy to retrieve them if they're needed.
260
TODO: Option to specify file id.
262
TODO: Adding a directory should optionally recurse down and
263
add all non-ignored children. Perhaps do that in a
266
>>> b = ScratchBranch(files=['foo'])
267
>>> 'foo' in b.unknowns()
272
>>> 'foo' in b.unknowns()
274
>>> bool(b.inventory.path2id('foo'))
280
Traceback (most recent call last):
282
BzrError: ('foo is already versioned', [])
284
>>> b.add(['nothere'])
285
Traceback (most recent call last):
286
BzrError: ('cannot add: not a regular file or directory: nothere', [])
488
TODO: Perhaps yield the ids and paths as they're added.
289
490
# TODO: Re-adding a file that is removed in the working copy
290
491
# should probably put it back with the previous ID.
291
if isinstance(files, types.StringTypes):
492
if isinstance(files, basestring):
493
assert(ids is None or isinstance(ids, basestring))
294
inv = self.read_working_inventory()
296
if is_control_file(f):
297
bailout("cannot add control file %s" % quotefn(f))
302
bailout("cannot add top-level %r" % f)
304
fullpath = os.path.normpath(self.abspath(f))
307
kind = file_kind(fullpath)
309
# maybe something better?
310
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
312
if kind != 'file' and kind != 'directory':
313
bailout('cannot add: not a regular file or directory: %s' % quotefn(f))
315
file_id = gen_file_id(f)
316
inv.add_path(f, kind=kind, file_id=file_id)
319
show_status('A', kind, quotefn(f))
321
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
323
self._write_inventory(inv)
499
ids = [None] * len(files)
501
assert(len(ids) == len(files))
505
inv = self.read_working_inventory()
506
for f,file_id in zip(files, ids):
507
if is_control_file(f):
508
raise BzrError("cannot add control file %s" % quotefn(f))
513
raise BzrError("cannot add top-level %r" % f)
515
fullpath = os.path.normpath(self.abspath(f))
518
kind = file_kind(fullpath)
520
# maybe something better?
521
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
523
if kind != 'file' and kind != 'directory':
524
raise BzrError('cannot add: not a regular file or directory: %s' % quotefn(f))
527
file_id = gen_file_id(f)
528
inv.add_path(f, kind=kind, file_id=file_id)
530
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
532
self._write_inventory(inv)
326
537
def print_file(self, file, revno):
327
538
"""Print `file` to stdout."""
328
tree = self.revision_tree(self.lookup_revision(revno))
329
# use inventory as it was in that revision
330
file_id = tree.inventory.path2id(file)
332
bailout("%r is not present in revision %d" % (file, revno))
333
tree.print_file(file_id)
541
tree = self.revision_tree(self.get_rev_id(revno))
542
# use inventory as it was in that revision
543
file_id = tree.inventory.path2id(file)
545
raise BzrError("%r is not present in revision %s" % (file, revno))
546
tree.print_file(file_id)
336
551
def remove(self, files, verbose=False):
337
552
"""Mark nominated files for removal from the inventory.
415
624
return self.working_tree().unknowns()
418
def commit(self, message, timestamp=None, timezone=None,
421
"""Commit working copy as a new revision.
423
The basic approach is to add all the file texts into the
424
store, then the inventory, then make a new revision pointing
425
to that inventory and store that.
427
This is not quite safe if the working copy changes during the
428
commit; for the moment that is simply not allowed. A better
429
approach is to make a temporary copy of the files before
430
computing their hashes, and then add those hashes in turn to
431
the inventory. This should mean at least that there are no
432
broken hash pointers. There is no way we can get a snapshot
433
of the whole directory at an instant. This would also have to
434
be robust against files disappearing, moving, etc. So the
435
whole thing is a bit hard.
437
timestamp -- if not None, seconds-since-epoch for a
438
postdated/predated commit.
441
## TODO: Show branch names
443
# TODO: Don't commit if there are no changes, unless forced?
445
# First walk over the working inventory; and both update that
446
# and also build a new revision inventory. The revision
447
# inventory needs to hold the text-id, sha1 and size of the
448
# actual file versions committed in the revision. (These are
449
# not present in the working inventory.) We also need to
450
# detect missing/deleted files, and remove them from the
453
work_inv = self.read_working_inventory()
455
basis = self.basis_tree()
456
basis_inv = basis.inventory
458
for path, entry in work_inv.iter_entries():
459
## TODO: Cope with files that have gone missing.
461
## TODO: Check that the file kind has not changed from the previous
462
## revision of this file (if any).
466
p = self.abspath(path)
467
file_id = entry.file_id
468
mutter('commit prep file %s, id %r ' % (p, file_id))
470
if not os.path.exists(p):
471
mutter(" file is missing, removing from inventory")
473
show_status('D', entry.kind, quotefn(path))
474
missing_ids.append(file_id)
477
# TODO: Handle files that have been deleted
479
# TODO: Maybe a special case for empty files? Seems a
480
# waste to store them many times.
484
if basis_inv.has_id(file_id):
485
old_kind = basis_inv[file_id].kind
486
if old_kind != entry.kind:
487
bailout("entry %r changed kind from %r to %r"
488
% (file_id, old_kind, entry.kind))
490
if entry.kind == 'directory':
492
bailout("%s is entered as directory but not a directory" % quotefn(p))
493
elif entry.kind == 'file':
495
bailout("%s is entered as file but is not a file" % quotefn(p))
497
content = file(p, 'rb').read()
499
entry.text_sha1 = sha_string(content)
500
entry.text_size = len(content)
502
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
504
and (old_ie.text_size == entry.text_size)
505
and (old_ie.text_sha1 == entry.text_sha1)):
506
## assert content == basis.get_file(file_id).read()
507
entry.text_id = basis_inv[file_id].text_id
508
mutter(' unchanged from previous text_id {%s}' %
512
entry.text_id = gen_file_id(entry.name)
513
self.text_store.add(content, entry.text_id)
514
mutter(' stored with text_id {%s}' % entry.text_id)
518
elif (old_ie.name == entry.name
519
and old_ie.parent_id == entry.parent_id):
524
show_status(state, entry.kind, quotefn(path))
526
for file_id in missing_ids:
527
# have to do this later so we don't mess up the iterator.
528
# since parents may be removed before their children we
531
# FIXME: There's probably a better way to do this; perhaps
532
# the workingtree should know how to filter itself.
533
if work_inv.has_id(file_id):
534
del work_inv[file_id]
537
inv_id = rev_id = _gen_revision_id(time.time())
539
inv_tmp = tempfile.TemporaryFile()
540
inv.write_xml(inv_tmp)
542
self.inventory_store.add(inv_tmp, inv_id)
543
mutter('new inventory_id is {%s}' % inv_id)
545
self._write_inventory(work_inv)
547
if timestamp == None:
548
timestamp = time.time()
550
if committer == None:
551
committer = username()
554
timezone = local_time_offset()
556
mutter("building commit log message")
557
rev = Revision(timestamp=timestamp,
560
precursor = self.last_patch(),
565
rev_tmp = tempfile.TemporaryFile()
566
rev.write_xml(rev_tmp)
568
self.revision_store.add(rev_tmp, rev_id)
569
mutter("new revision_id is {%s}" % rev_id)
571
## XXX: Everything up to here can simply be orphaned if we abort
572
## the commit; it will leave junk files behind but that doesn't
575
## TODO: Read back the just-generated changeset, and make sure it
576
## applies and recreates the right state.
578
## TODO: Also calculate and store the inventory SHA1
579
mutter("committing patch r%d" % (self.revno() + 1))
582
self.append_revision(rev_id)
585
note("commited r%d" % self.revno())
588
def append_revision(self, revision_id):
589
mutter("add {%s} to revision-history" % revision_id)
627
def append_revision(self, *revision_ids):
628
for revision_id in revision_ids:
629
mutter("add {%s} to revision-history" % revision_id)
590
631
rev_history = self.revision_history()
592
tmprhname = self.controlfilename('revision-history.tmp')
593
rhname = self.controlfilename('revision-history')
595
f = file(tmprhname, 'wt')
596
rev_history.append(revision_id)
597
f.write('\n'.join(rev_history))
601
if sys.platform == 'win32':
603
os.rename(tmprhname, rhname)
632
rev_history.extend(revision_ids)
636
self.put_controlfile('revision-history', '\n'.join(rev_history))
641
def get_revision_xml_file(self, revision_id):
642
"""Return XML file object for revision object."""
643
if not revision_id or not isinstance(revision_id, basestring):
644
raise InvalidRevisionId(revision_id)
649
return self.revision_store[revision_id]
650
except (IndexError, KeyError):
651
raise bzrlib.errors.NoSuchRevision(self, revision_id)
657
get_revision_xml = get_revision_xml_file
607
660
def get_revision(self, revision_id):
608
661
"""Return the Revision object for a named revision"""
609
r = Revision.read_xml(self.revision_store[revision_id])
662
xml_file = self.get_revision_xml_file(revision_id)
665
r = bzrlib.xml.serializer_v4.read_revision(xml_file)
666
except SyntaxError, e:
667
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
610
671
assert r.revision_id == revision_id
675
def get_revision_delta(self, revno):
676
"""Return the delta for one revision.
678
The delta is relative to its mainline predecessor, or the
679
empty tree for revision 1.
681
assert isinstance(revno, int)
682
rh = self.revision_history()
683
if not (1 <= revno <= len(rh)):
684
raise InvalidRevisionNumber(revno)
686
# revno is 1-based; list is 0-based
688
new_tree = self.revision_tree(rh[revno-1])
690
old_tree = EmptyTree()
692
old_tree = self.revision_tree(rh[revno-2])
694
return compare_trees(old_tree, new_tree)
697
def get_revisions(self, revision_ids, pb=None):
698
"""Return the Revision object for a set of named revisions"""
699
from bzrlib.revision import Revision
700
from bzrlib.xml import unpack_xml
702
# TODO: We need to decide what to do here
703
# we cannot use a generator with a try/finally, because
704
# you cannot guarantee that the caller will iterate through
706
# in the past, get_inventory wasn't even wrapped in a
707
# try/finally locking block.
708
# We could either lock without the try/finally, or just
709
# not lock at all. We are reading entries that should
711
# I prefer locking with no finally, so that if someone
712
# asks for a list of revisions, but doesn't consume them,
713
# that is their problem, and they will suffer the consequences
715
for xml_file in self.revision_store.get(revision_ids, pb=pb):
717
r = bzrlib.xml.serializer_v4.read_revision(xml_file)
718
except SyntaxError, e:
719
raise bzrlib.errors.BzrError('failed to unpack revision_xml',
725
def get_revision_sha1(self, revision_id):
726
"""Hash the stored value of a revision, and return it."""
727
# In the future, revision entries will be signed. At that
728
# point, it is probably best *not* to include the signature
729
# in the revision hash. Because that lets you re-sign
730
# the revision, (add signatures/remove signatures) and still
731
# have all hash pointers stay consistent.
732
# But for now, just hash the contents.
733
return bzrlib.osutils.sha_file(self.get_revision_xml(revision_id))
614
736
def get_inventory(self, inventory_id):
615
737
"""Get Inventory object by hash.
617
739
TODO: Perhaps for this and similar methods, take a revision
618
740
parameter which can be either an integer revno or a
620
i = Inventory.read_xml(self.inventory_store[inventory_id])
743
f = self.get_inventory_xml_file(inventory_id)
744
return bzrlib.xml.serializer_v4.read_inventory(f)
747
def get_inventory_xml(self, inventory_id):
748
"""Get inventory XML as a file object."""
749
# Shouldn't this have a read-lock around it?
750
# As well as some sort of trap for missing ids?
751
return self.inventory_store[inventory_id]
753
get_inventory_xml_file = get_inventory_xml
755
def get_inventories(self, inventory_ids, pb=None, permit_failure=False):
756
"""Get Inventory objects by id
758
# See the discussion in get_revisions for why
759
# we don't use a try/finally block here
761
for f in self.inventory_store.get(inventory_ids,
762
permit_failure=permit_failure, pb=pb):
764
# TODO: Possibly put a try/except around this to handle
765
# read serialization errors
766
r = bzrlib.xml.serializer_v4.read_inventory(f)
771
raise bzrlib.errors.NoSuchRevision(self, revision_id)
774
def get_inventory_sha1(self, inventory_id):
775
"""Return the sha1 hash of the inventory entry
777
return sha_file(self.get_inventory_xml(inventory_id))
624
780
def get_revision_inventory(self, revision_id):
625
781
"""Return inventory of a past revision."""
782
# bzr 0.0.6 imposes the constraint that the inventory_id
783
# must be the same as its revision, so this is trivial.
626
784
if revision_id == None:
785
from bzrlib.inventory import Inventory
786
return Inventory(self.get_root_id())
629
return self.get_inventory(self.get_revision(revision_id).inventory_id)
788
return self.get_inventory(revision_id)
632
791
def revision_history(self):
644
853
That is equivalent to the number of revisions committed to
647
>>> b = ScratchBranch()
650
>>> b.commit('no foo')
654
856
return len(self.revision_history())
657
859
def last_patch(self):
658
860
"""Return last patch hash, or None if no history.
660
>>> ScratchBranch().last_patch() == None
663
862
ph = self.revision_history()
670
def lookup_revision(self, revno):
671
"""Return revision hash for revision number."""
869
def missing_revisions(self, other, stop_revision=None, diverged_ok=False):
871
If self and other have not diverged, return a list of the revisions
872
present in other, but missing from self.
874
>>> from bzrlib.commit import commit
875
>>> bzrlib.trace.silent = True
876
>>> br1 = ScratchBranch()
877
>>> br2 = ScratchBranch()
878
>>> br1.missing_revisions(br2)
880
>>> commit(br2, "lala!", rev_id="REVISION-ID-1")
881
>>> br1.missing_revisions(br2)
883
>>> br2.missing_revisions(br1)
885
>>> commit(br1, "lala!", rev_id="REVISION-ID-1")
886
>>> br1.missing_revisions(br2)
888
>>> commit(br2, "lala!", rev_id="REVISION-ID-2A")
889
>>> br1.missing_revisions(br2)
891
>>> commit(br1, "lala!", rev_id="REVISION-ID-2B")
892
>>> br1.missing_revisions(br2)
893
Traceback (most recent call last):
894
DivergedBranches: These branches have diverged.
896
self_history = self.revision_history()
897
self_len = len(self_history)
898
other_history = other.revision_history()
899
other_len = len(other_history)
900
common_index = min(self_len, other_len) -1
901
if common_index >= 0 and \
902
self_history[common_index] != other_history[common_index]:
903
raise DivergedBranches(self, other)
905
if stop_revision is None:
906
stop_revision = other_len
907
elif stop_revision > other_len:
908
raise bzrlib.errors.NoSuchRevision(self, stop_revision)
910
return other_history[self_len:stop_revision]
913
def update_revisions(self, other, stop_revision=None):
914
"""Pull in all new revisions from other branch.
916
from bzrlib.fetch import greedy_fetch
917
from bzrlib.revision import get_intervening_revisions
919
pb = bzrlib.ui.ui_factory.progress_bar()
920
pb.update('comparing histories')
921
if stop_revision is None:
922
other_revision = other.last_patch()
924
other_revision = other.get_rev_id(stop_revision)
925
count = greedy_fetch(self, other, other_revision, pb)[0]
927
revision_ids = self.missing_revisions(other, stop_revision)
928
except DivergedBranches, e:
930
revision_ids = get_intervening_revisions(self.last_patch(),
931
other_revision, self)
932
assert self.last_patch() not in revision_ids
933
except bzrlib.errors.NotAncestor:
936
self.append_revision(*revision_ids)
939
def install_revisions(self, other, revision_ids, pb):
940
# We are going to iterate this many times, so make sure
941
# that it is a list, and not a generator
942
revision_ids = list(revision_ids)
943
if hasattr(other.revision_store, "prefetch"):
944
other.revision_store.prefetch(revision_ids)
945
if hasattr(other.inventory_store, "prefetch"):
946
other.inventory_store.prefetch(revision_ids)
949
pb = bzrlib.ui.ui_factory.progress_bar()
951
# This entire next section is generally done
952
# with either generators, or bulk updates
953
inventories = other.get_inventories(revision_ids, permit_failure=True)
957
good_revisions = set()
958
for i, (inv, rev_id) in enumerate(zip(inventories, revision_ids)):
959
pb.update('fetching revision', i+1, len(revision_ids))
961
# We don't really need to get the revision here, because
962
# the only thing we needed was the inventory_id, which now
963
# is (by design) identical to the revision_id
965
# rev = other.get_revision(rev_id)
966
# except bzrlib.errors.NoSuchRevision:
967
# failures.add(rev_id)
974
good_revisions.add(rev_id)
977
for key, entry in inv.iter_entries():
978
if entry.text_id is None:
980
text_ids.append(entry.text_id)
982
has_ids = self.text_store.has(text_ids)
983
for has, text_id in zip(has_ids, text_ids):
985
needed_texts.add(text_id)
989
count, cp_fail = self.text_store.copy_multi(other.text_store,
991
#print "Added %d texts." % count
992
count, cp_fail = self.inventory_store.copy_multi(other.inventory_store,
994
#print "Added %d inventories." % count
995
count, cp_fail = self.revision_store.copy_multi(other.revision_store,
998
assert len(cp_fail) == 0
999
return count, failures
1002
def commit(self, *args, **kw):
1003
from bzrlib.commit import commit
1004
commit(self, *args, **kw)
1007
def revision_id_to_revno(self, revision_id):
1008
"""Given a revision id, return its revno"""
1009
history = self.revision_history()
1011
return history.index(revision_id) + 1
1013
raise bzrlib.errors.NoSuchRevision(self, revision_id)
1016
def get_rev_id(self, revno, history=None):
1017
"""Find the revision id of the specified revno."""
676
# list is 0-based; revisions are 1-based
677
return self.revision_history()[revno-1]
679
raise BzrError("no such revision %s" % revno)
1021
history = self.revision_history()
1022
elif revno <= 0 or revno > len(history):
1023
raise bzrlib.errors.NoSuchRevision(self, revno)
1024
return history[revno - 1]
682
1027
def revision_tree(self, revision_id):
723
def write_log(self, show_timezone='original', verbose=False):
724
"""Write out human-readable log of commits to this branch
726
utc -- If true, show dates in universal time, not local time."""
727
## TODO: Option to choose either original, utc or local timezone
730
for p in self.revision_history():
732
print 'revno:', revno
733
## TODO: Show hash if --id is given.
734
##print 'revision-hash:', p
735
rev = self.get_revision(p)
736
print 'committer:', rev.committer
737
print 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
740
## opportunistic consistency check, same as check_patch_chaining
741
if rev.precursor != precursor:
742
bailout("mismatched precursor!")
746
print ' (no message)'
748
for l in rev.message.split('\n'):
751
if verbose == True and precursor != None:
752
print 'changed files:'
753
tree = self.revision_tree(p)
754
prevtree = self.revision_tree(precursor)
756
for file_state, fid, old_name, new_name, kind in \
757
diff_trees(prevtree, tree, ):
758
if file_state == 'A' or file_state == 'M':
759
show_status(file_state, kind, new_name)
760
elif file_state == 'D':
761
show_status(file_state, kind, old_name)
762
elif file_state == 'R':
763
show_status(file_state, kind,
764
old_name + ' => ' + new_name)
770
1061
def rename_one(self, from_rel, to_rel):
771
tree = self.working_tree()
773
if not tree.has_filename(from_rel):
774
bailout("can't rename: old working file %r does not exist" % from_rel)
775
if tree.has_filename(to_rel):
776
bailout("can't rename: new working file %r already exists" % to_rel)
778
file_id = inv.path2id(from_rel)
780
bailout("can't rename: old name %r is not versioned" % from_rel)
782
if inv.path2id(to_rel):
783
bailout("can't rename: new name %r is already versioned" % to_rel)
785
to_dir, to_tail = os.path.split(to_rel)
786
to_dir_id = inv.path2id(to_dir)
787
if to_dir_id == None and to_dir != '':
788
bailout("can't determine destination directory id for %r" % to_dir)
790
mutter("rename_one:")
791
mutter(" file_id {%s}" % file_id)
792
mutter(" from_rel %r" % from_rel)
793
mutter(" to_rel %r" % to_rel)
794
mutter(" to_dir %r" % to_dir)
795
mutter(" to_dir_id {%s}" % to_dir_id)
797
inv.rename(file_id, to_dir_id, to_tail)
799
print "%s => %s" % (from_rel, to_rel)
801
from_abs = self.abspath(from_rel)
802
to_abs = self.abspath(to_rel)
1064
This can change the directory or the filename or both.
804
os.rename(from_abs, to_abs)
806
bailout("failed to rename %r to %r: %s"
807
% (from_abs, to_abs, e[1]),
808
["rename rolled back"])
810
self._write_inventory(inv)
1068
tree = self.working_tree()
1069
inv = tree.inventory
1070
if not tree.has_filename(from_rel):
1071
raise BzrError("can't rename: old working file %r does not exist" % from_rel)
1072
if tree.has_filename(to_rel):
1073
raise BzrError("can't rename: new working file %r already exists" % to_rel)
1075
file_id = inv.path2id(from_rel)
1077
raise BzrError("can't rename: old name %r is not versioned" % from_rel)
1079
if inv.path2id(to_rel):
1080
raise BzrError("can't rename: new name %r is already versioned" % to_rel)
1082
to_dir, to_tail = os.path.split(to_rel)
1083
to_dir_id = inv.path2id(to_dir)
1084
if to_dir_id == None and to_dir != '':
1085
raise BzrError("can't determine destination directory id for %r" % to_dir)
1087
mutter("rename_one:")
1088
mutter(" file_id {%s}" % file_id)
1089
mutter(" from_rel %r" % from_rel)
1090
mutter(" to_rel %r" % to_rel)
1091
mutter(" to_dir %r" % to_dir)
1092
mutter(" to_dir_id {%s}" % to_dir_id)
1094
inv.rename(file_id, to_dir_id, to_tail)
1096
from_abs = self.abspath(from_rel)
1097
to_abs = self.abspath(to_rel)
1099
os.rename(from_abs, to_abs)
1101
raise BzrError("failed to rename %r to %r: %s"
1102
% (from_abs, to_abs, e[1]),
1103
["rename rolled back"])
1105
self._write_inventory(inv)
814
1110
def move(self, from_paths, to_name):
822
1118
Note that to_name is only the last component of the new name;
823
1119
this doesn't change the directory.
825
## TODO: Option to move IDs only
826
assert not isinstance(from_paths, basestring)
827
tree = self.working_tree()
829
to_abs = self.abspath(to_name)
830
if not isdir(to_abs):
831
bailout("destination %r is not a directory" % to_abs)
832
if not tree.has_filename(to_name):
833
bailout("destination %r not in working directory" % to_abs)
834
to_dir_id = inv.path2id(to_name)
835
if to_dir_id == None and to_name != '':
836
bailout("destination %r is not a versioned directory" % to_name)
837
to_dir_ie = inv[to_dir_id]
838
if to_dir_ie.kind not in ('directory', 'root_directory'):
839
bailout("destination %r is not a directory" % to_abs)
841
to_idpath = Set(inv.get_idpath(to_dir_id))
844
if not tree.has_filename(f):
845
bailout("%r does not exist in working tree" % f)
846
f_id = inv.path2id(f)
848
bailout("%r is not versioned" % f)
849
name_tail = splitpath(f)[-1]
850
dest_path = appendpath(to_name, name_tail)
851
if tree.has_filename(dest_path):
852
bailout("destination %r already exists" % dest_path)
853
if f_id in to_idpath:
854
bailout("can't move %r to a subdirectory of itself" % f)
856
# OK, so there's a race here, it's possible that someone will
857
# create a file in this interval and then the rename might be
858
# left half-done. But we should have caught most problems.
861
name_tail = splitpath(f)[-1]
862
dest_path = appendpath(to_name, name_tail)
863
print "%s => %s" % (f, dest_path)
864
inv.rename(inv.path2id(f), to_dir_id, name_tail)
866
os.rename(self.abspath(f), self.abspath(dest_path))
868
bailout("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
869
["rename rolled back"])
871
self._write_inventory(inv)
875
def show_status(self, show_all=False):
876
"""Display single-line status for non-ignored working files.
878
The list is show sorted in order by file name.
880
>>> b = ScratchBranch(files=['foo', 'foo~'])
886
>>> b.commit("add foo")
888
>>> os.unlink(b.abspath('foo'))
892
TODO: Get state for single files.
895
# We have to build everything into a list first so that it can
896
# sorted by name, incorporating all the different sources.
898
# FIXME: Rather than getting things in random order and then sorting,
899
# just step through in order.
901
# Interesting case: the old ID for a file has been removed,
902
# but a new file has been created under that name.
904
old = self.basis_tree()
905
new = self.working_tree()
907
for fs, fid, oldname, newname, kind in diff_trees(old, new):
909
show_status(fs, kind,
910
oldname + ' => ' + newname)
911
elif fs == 'A' or fs == 'M':
912
show_status(fs, kind, newname)
914
show_status(fs, kind, oldname)
917
show_status(fs, kind, newname)
920
show_status(fs, kind, newname)
922
show_status(fs, kind, newname)
924
bailout("weird file state %r" % ((fs, fid),))
928
class ScratchBranch(Branch):
1121
This returns a list of (from_path, to_path) pairs for each
1122
entry that is moved.
1127
## TODO: Option to move IDs only
1128
assert not isinstance(from_paths, basestring)
1129
tree = self.working_tree()
1130
inv = tree.inventory
1131
to_abs = self.abspath(to_name)
1132
if not isdir(to_abs):
1133
raise BzrError("destination %r is not a directory" % to_abs)
1134
if not tree.has_filename(to_name):
1135
raise BzrError("destination %r not in working directory" % to_abs)
1136
to_dir_id = inv.path2id(to_name)
1137
if to_dir_id == None and to_name != '':
1138
raise BzrError("destination %r is not a versioned directory" % to_name)
1139
to_dir_ie = inv[to_dir_id]
1140
if to_dir_ie.kind not in ('directory', 'root_directory'):
1141
raise BzrError("destination %r is not a directory" % to_abs)
1143
to_idpath = inv.get_idpath(to_dir_id)
1145
for f in from_paths:
1146
if not tree.has_filename(f):
1147
raise BzrError("%r does not exist in working tree" % f)
1148
f_id = inv.path2id(f)
1150
raise BzrError("%r is not versioned" % f)
1151
name_tail = splitpath(f)[-1]
1152
dest_path = appendpath(to_name, name_tail)
1153
if tree.has_filename(dest_path):
1154
raise BzrError("destination %r already exists" % dest_path)
1155
if f_id in to_idpath:
1156
raise BzrError("can't move %r to a subdirectory of itself" % f)
1158
# OK, so there's a race here, it's possible that someone will
1159
# create a file in this interval and then the rename might be
1160
# left half-done. But we should have caught most problems.
1162
for f in from_paths:
1163
name_tail = splitpath(f)[-1]
1164
dest_path = appendpath(to_name, name_tail)
1165
result.append((f, dest_path))
1166
inv.rename(inv.path2id(f), to_dir_id, name_tail)
1168
os.rename(self.abspath(f), self.abspath(dest_path))
1170
raise BzrError("failed to rename %r to %r: %s" % (f, dest_path, e[1]),
1171
["rename rolled back"])
1173
self._write_inventory(inv)
1180
def revert(self, filenames, old_tree=None, backups=True):
1181
"""Restore selected files to the versions from a previous tree.
1184
If true (default) backups are made of files before
1187
from bzrlib.errors import NotVersionedError, BzrError
1188
from bzrlib.atomicfile import AtomicFile
1189
from bzrlib.osutils import backup_file
1191
inv = self.read_working_inventory()
1192
if old_tree is None:
1193
old_tree = self.basis_tree()
1194
old_inv = old_tree.inventory
1197
for fn in filenames:
1198
file_id = inv.path2id(fn)
1200
raise NotVersionedError("not a versioned file", fn)
1201
if not old_inv.has_id(file_id):
1202
raise BzrError("file not present in old tree", fn, file_id)
1203
nids.append((fn, file_id))
1205
# TODO: Rename back if it was previously at a different location
1207
# TODO: If given a directory, restore the entire contents from
1208
# the previous version.
1210
# TODO: Make a backup to a temporary file.
1212
# TODO: If the file previously didn't exist, delete it?
1213
for fn, file_id in nids:
1216
f = AtomicFile(fn, 'wb')
1218
f.write(old_tree.get_file(file_id).read())
1224
def pending_merges(self):
1225
"""Return a list of pending merges.
1227
These are revisions that have been merged into the working
1228
directory but not yet committed.
1230
cfn = self._rel_controlfilename('pending-merges')
1231
if not self._transport.has(cfn):
1234
for l in self.controlfile('pending-merges', 'r').readlines():
1235
p.append(l.rstrip('\n'))
1239
def add_pending_merge(self, *revision_ids):
1240
from bzrlib.revision import validate_revision_id
1242
for rev_id in revision_ids:
1243
validate_revision_id(rev_id)
1245
p = self.pending_merges()
1247
for rev_id in revision_ids:
1253
self.set_pending_merges(p)
1255
def set_pending_merges(self, rev_list):
1258
self.put_controlfile('pending-merges', '\n'.join(rev_list))
1263
def get_parent(self):
1264
"""Return the parent location of the branch.
1266
This is the default location for push/pull/missing. The usual
1267
pattern is that the user can override it by specifying a
1271
_locs = ['parent', 'pull', 'x-pull']
1274
return self.controlfile(l, 'r').read().strip('\n')
1276
if e.errno != errno.ENOENT:
1281
def set_parent(self, url):
1282
# TODO: Maybe delete old location files?
1283
from bzrlib.atomicfile import AtomicFile
1286
f = AtomicFile(self.controlfilename('parent'))
1295
def check_revno(self, revno):
1297
Check whether a revno corresponds to any revision.
1298
Zero (the NULL revision) is considered valid.
1301
self.check_real_revno(revno)
1303
def check_real_revno(self, revno):
1305
Check whether a revno corresponds to a real revision.
1306
Zero (the NULL revision) is considered invalid
1308
if revno < 1 or revno > self.revno():
1309
raise InvalidRevisionNumber(revno)
1315
class ScratchBranch(_Branch):
929
1316
"""Special test class: a branch that cleans up after itself.
931
1318
>>> b = ScratchBranch()
932
1319
>>> isdir(b.base)
939
def __init__(self, files=[], dirs=[]):
1326
def __init__(self, files=[], dirs=[], base=None):
940
1327
"""Make a test branch.
942
1329
This creates a temporary directory and runs init-tree in it.
944
1331
If any files are listed, they are created in the working copy.
946
Branch.__init__(self, tempfile.mkdtemp(), init=True)
1333
from tempfile import mkdtemp
1338
_Branch.__init__(self, base, init=init)
948
os.mkdir(self.abspath(d))
1340
self._transport.mkdir(d)
951
file(os.path.join(self.base, f), 'w').write('content of %s' % f)
1343
self._transport.put(f, 'content of %s' % f)
1348
>>> orig = ScratchBranch(files=["file1", "file2"])
1349
>>> clone = orig.clone()
1350
>>> os.path.samefile(orig.base, clone.base)
1352
>>> os.path.isfile(os.path.join(clone.base, "file1"))
1355
from shutil import copytree
1356
from tempfile import mkdtemp
1359
copytree(self.base, base, symlinks=True)
1360
return ScratchBranch(base=base)
954
1364
def __del__(self):
955
1368
"""Destroy the test branch, removing the scratch directory."""
1369
from shutil import rmtree
957
shutil.rmtree(self.base)
1372
mutter("delete ScratchBranch %s" % self.base)
959
1375
# Work around for shutil.rmtree failing on Windows when
960
1376
# readonly files are encountered
1377
mutter("hit exception in destroying ScratchBranch: %s" % e)
961
1378
for root, dirs, files in os.walk(self.base, topdown=False):
962
1379
for name in files:
963
1380
os.chmod(os.path.join(root, name), 0700)
964
shutil.rmtree(self.base)
1382
self._transport = None