1
# Copyright (C) 2005-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Implementation of Transport over SFTP, using paramiko."""
19
# TODO: Remove the transport-based lock_read and lock_write methods. They'll
20
# then raise TransportNotPossible, which will break remote access to any
21
# formats which rely on OS-level locks. That should be fine as those formats
22
# are pretty old, but these combinations may have to be removed from the test
23
# suite. Those formats all date back to 0.7; so we should be able to remove
24
# these methods when we officially drop support for those formats.
44
from bzrlib.errors import (FileExists,
45
NoSuchFile, PathNotChild,
51
from bzrlib.osutils import pathjoin, fancy_rename, getcwd
52
from bzrlib.symbol_versioning import (
55
from bzrlib.trace import mutter, warning
56
from bzrlib.transport import (
65
# Disable one particular warning that comes from paramiko in Python2.5; if
66
# this is emitted at the wrong time it tends to cause spurious test failures
67
# or at least noise in the test case::
69
# [1770/7639 in 86s, 1 known failures, 50 skipped, 2 missing features]
70
# test_permissions.TestSftpPermissions.test_new_files
71
# /var/lib/python-support/python2.5/paramiko/message.py:226: DeprecationWarning: integer argument expected, got float
72
# self.packet.write(struct.pack('>I', n))
73
warnings.filterwarnings('ignore',
74
'integer argument expected, got float',
75
category=DeprecationWarning,
76
module='paramiko.message')
80
except ImportError, e:
81
raise ParamikoNotPresent(e)
83
from paramiko.sftp import (SFTP_FLAG_WRITE, SFTP_FLAG_CREATE,
84
SFTP_FLAG_EXCL, SFTP_FLAG_TRUNC,
85
SFTP_OK, CMD_HANDLE, CMD_OPEN)
86
from paramiko.sftp_attr import SFTPAttributes
87
from paramiko.sftp_file import SFTPFile
90
_paramiko_version = getattr(paramiko, '__version_info__', (0, 0, 0))
91
# don't use prefetch unless paramiko version >= 1.5.5 (there were bugs earlier)
92
_default_do_prefetch = (_paramiko_version >= (1, 5, 5))
95
class SFTPLock(object):
96
"""This fakes a lock in a remote location.
98
A present lock is indicated just by the existence of a file. This
99
doesn't work well on all transports and they are only used in
100
deprecated storage formats.
103
__slots__ = ['path', 'lock_path', 'lock_file', 'transport']
105
def __init__(self, path, transport):
106
self.lock_file = None
108
self.lock_path = path + '.write-lock'
109
self.transport = transport
111
# RBC 20060103 FIXME should we be using private methods here ?
112
abspath = transport._remote_path(self.lock_path)
113
self.lock_file = transport._sftp_open_exclusive(abspath)
115
raise LockError('File %r already locked' % (self.path,))
118
if not self.lock_file:
120
self.lock_file.close()
121
self.lock_file = None
123
self.transport.delete(self.lock_path)
124
except (NoSuchFile,):
125
# What specific errors should we catch here?
129
class _SFTPReadvHelper(object):
130
"""A class to help with managing the state of a readv request."""
132
# See _get_requests for an explanation.
133
_max_request_size = 32768
135
def __init__(self, original_offsets, relpath, _report_activity):
136
"""Create a new readv helper.
138
:param original_offsets: The original requests given by the caller of
140
:param relpath: The name of the file (if known)
141
:param _report_activity: A Transport._report_activity bound method,
142
to be called as data arrives.
144
self.original_offsets = list(original_offsets)
145
self.relpath = relpath
146
self._report_activity = _report_activity
148
def _get_requests(self):
149
"""Break up the offsets into individual requests over sftp.
151
The SFTP spec only requires implementers to support 32kB requests. We
152
could try something larger (openssh supports 64kB), but then we have to
153
handle requests that fail.
154
So instead, we just break up our maximum chunks into 32kB chunks, and
155
asyncronously requests them.
156
Newer versions of paramiko would do the chunking for us, but we want to
157
start processing results right away, so we do it ourselves.
159
# TODO: Because we issue async requests, we don't 'fudge' any extra
160
# data. I'm not 100% sure that is the best choice.
162
# The first thing we do, is to collapse the individual requests as much
163
# as possible, so we don't issues requests <32kB
164
sorted_offsets = sorted(self.original_offsets)
165
coalesced = list(ConnectedTransport._coalesce_offsets(sorted_offsets,
166
limit=0, fudge_factor=0))
168
for c_offset in coalesced:
169
start = c_offset.start
170
size = c_offset.length
172
# Break this up into 32kB requests
174
next_size = min(size, self._max_request_size)
175
requests.append((start, next_size))
178
if 'sftp' in debug.debug_flags:
179
mutter('SFTP.readv(%s) %s offsets => %s coalesced => %s requests',
180
self.relpath, len(sorted_offsets), len(coalesced),
184
def request_and_yield_offsets(self, fp):
185
"""Request the data from the remote machine, yielding the results.
187
:param fp: A Paramiko SFTPFile object that supports readv.
188
:return: Yield the data requested by the original readv caller, one by
191
requests = self._get_requests()
192
offset_iter = iter(self.original_offsets)
193
cur_offset, cur_size = offset_iter.next()
194
# paramiko .readv() yields strings that are in the order of the requests
195
# So we track the current request to know where the next data is
196
# being returned from.
202
# This is used to buffer chunks which we couldn't process yet
203
# It is (start, end, data) tuples.
205
# Create an 'unlimited' data stream, so we stop based on requests,
206
# rather than just because the data stream ended. This lets us detect
208
data_stream = itertools.chain(fp.readv(requests),
209
itertools.repeat(None))
210
for (start, length), data in itertools.izip(requests, data_stream):
212
if cur_coalesced is not None:
213
raise errors.ShortReadvError(self.relpath,
214
start, length, len(data))
215
if len(data) != length:
216
raise errors.ShortReadvError(self.relpath,
217
start, length, len(data))
218
self._report_activity(length, 'read')
220
# This is the first request, just buffer it
221
buffered_data = [data]
222
buffered_len = length
224
elif start == last_end:
225
# The data we are reading fits neatly on the previous
226
# buffer, so this is all part of a larger coalesced range.
227
buffered_data.append(data)
228
buffered_len += length
230
# We have an 'interrupt' in the data stream. So we know we are
231
# at a request boundary.
233
# We haven't consumed the buffer so far, so put it into
234
# data_chunks, and continue.
235
buffered = ''.join(buffered_data)
236
data_chunks.append((input_start, buffered))
238
buffered_data = [data]
239
buffered_len = length
240
last_end = start + length
241
if input_start == cur_offset and cur_size <= buffered_len:
242
# Simplify the next steps a bit by transforming buffered_data
243
# into a single string. We also have the nice property that
244
# when there is only one string ''.join([x]) == x, so there is
246
buffered = ''.join(buffered_data)
247
# Clean out buffered data so that we keep memory
251
# TODO: We *could* also consider the case where cur_offset is in
252
# in the buffered range, even though it doesn't *start*
253
# the buffered range. But for packs we pretty much always
254
# read in order, so you won't get any extra data in the
256
while (input_start == cur_offset
257
and (buffered_offset + cur_size) <= buffered_len):
258
# We've buffered enough data to process this request, spit it
260
cur_data = buffered[buffered_offset:buffered_offset + cur_size]
261
# move the direct pointer into our buffered data
262
buffered_offset += cur_size
263
# Move the start-of-buffer pointer
264
input_start += cur_size
265
# Yield the requested data
266
yield cur_offset, cur_data
267
cur_offset, cur_size = offset_iter.next()
268
# at this point, we've consumed as much of buffered as we can,
269
# so break off the portion that we consumed
270
if buffered_offset == len(buffered_data):
271
# No tail to leave behind
275
buffered = buffered[buffered_offset:]
276
buffered_data = [buffered]
277
buffered_len = len(buffered)
278
# now that the data stream is done, close the handle
281
buffered = ''.join(buffered_data)
283
data_chunks.append((input_start, buffered))
285
if 'sftp' in debug.debug_flags:
286
mutter('SFTP readv left with %d out-of-order bytes',
287
sum(map(lambda x: len(x[1]), data_chunks)))
288
# We've processed all the readv data, at this point, anything we
289
# couldn't process is in data_chunks. This doesn't happen often, so
290
# this code path isn't optimized
291
# We use an interesting process for data_chunks
292
# Specifically if we have "bisect_left([(start, len, entries)],
294
# If start == qstart, then we get the specific node. Otherwise we
295
# get the previous node
297
idx = bisect.bisect_left(data_chunks, (cur_offset,))
298
if idx < len(data_chunks) and data_chunks[idx][0] == cur_offset:
299
# The data starts here
300
data = data_chunks[idx][1][:cur_size]
302
# The data is in a portion of a previous page
304
sub_offset = cur_offset - data_chunks[idx][0]
305
data = data_chunks[idx][1]
306
data = data[sub_offset:sub_offset + cur_size]
308
# We are missing the page where the data should be found,
311
if len(data) != cur_size:
312
raise AssertionError('We must have miscalulated.'
313
' We expected %d bytes, but only found %d'
314
% (cur_size, len(data)))
315
yield cur_offset, data
316
cur_offset, cur_size = offset_iter.next()
319
class SFTPTransport(ConnectedTransport):
320
"""Transport implementation for SFTP access."""
322
_do_prefetch = _default_do_prefetch
323
# TODO: jam 20060717 Conceivably these could be configurable, either
324
# by auto-tuning at run-time, or by a configuration (per host??)
325
# but the performance curve is pretty flat, so just going with
326
# reasonable defaults.
327
_max_readv_combine = 200
328
# Having to round trip to the server means waiting for a response,
329
# so it is better to download extra bytes.
330
# 8KiB had good performance for both local and remote network operations
331
_bytes_to_read_before_seek = 8192
333
# The sftp spec says that implementations SHOULD allow reads
334
# to be at least 32K. paramiko.readv() does an async request
335
# for the chunks. So we need to keep it within a single request
336
# size for paramiko <= 1.6.1. paramiko 1.6.2 will probably chop
337
# up the request itself, rather than us having to worry about it
338
_max_request_size = 32768
340
def __init__(self, base, _from_transport=None):
341
super(SFTPTransport, self).__init__(base,
342
_from_transport=_from_transport)
344
def _remote_path(self, relpath):
345
"""Return the path to be passed along the sftp protocol for relpath.
347
:param relpath: is a urlencoded string.
349
relative = urlutils.unescape(relpath).encode('utf-8')
350
remote_path = self._combine_paths(self._path, relative)
351
# the initial slash should be removed from the path, and treated as a
352
# homedir relative path (the path begins with a double slash if it is
353
# absolute). see draft-ietf-secsh-scp-sftp-ssh-uri-03.txt
354
# RBC 20060118 we are not using this as its too user hostile. instead
355
# we are following lftp and using /~/foo to mean '~/foo'
356
# vila--20070602 and leave absolute paths begin with a single slash.
357
if remote_path.startswith('/~/'):
358
remote_path = remote_path[3:]
359
elif remote_path == '/~':
363
def _create_connection(self, credentials=None):
364
"""Create a new connection with the provided credentials.
366
:param credentials: The credentials needed to establish the connection.
368
:return: The created connection and its associated credentials.
370
The credentials are only the password as it may have been entered
371
interactively by the user and may be different from the one provided
372
in base url at transport creation time.
374
if credentials is None:
375
password = self._password
377
password = credentials
379
vendor = ssh._get_ssh_vendor()
382
auth = config.AuthenticationConfig()
383
user = auth.get_user('ssh', self._host, self._port)
384
connection = vendor.connect_sftp(self._user, password,
385
self._host, self._port)
386
return connection, (user, password)
388
def disconnect(self):
389
connection = self._get_connection()
390
if connection is not None:
394
"""Ensures that a connection is established"""
395
connection = self._get_connection()
396
if connection is None:
397
# First connection ever
398
connection, credentials = self._create_connection()
399
self._set_connection(connection, credentials)
402
def has(self, relpath):
404
Does the target location exist?
407
self._get_sftp().stat(self._remote_path(relpath))
408
# stat result is about 20 bytes, let's say
409
self._report_activity(20, 'read')
414
def get(self, relpath):
415
"""Get the file at the given relative path.
417
:param relpath: The relative path to the file
420
path = self._remote_path(relpath)
421
f = self._get_sftp().file(path, mode='rb')
422
if self._do_prefetch and (getattr(f, 'prefetch', None) is not None):
425
except (IOError, paramiko.SSHException), e:
426
self._translate_io_exception(e, path, ': error retrieving',
427
failure_exc=errors.ReadError)
429
def get_bytes(self, relpath):
430
# reimplement this here so that we can report how many bytes came back
431
f = self.get(relpath)
434
self._report_activity(len(bytes), 'read')
439
def _readv(self, relpath, offsets):
440
"""See Transport.readv()"""
441
# We overload the default readv() because we want to use a file
442
# that does not have prefetch enabled.
443
# Also, if we have a new paramiko, it implements an async readv()
448
path = self._remote_path(relpath)
449
fp = self._get_sftp().file(path, mode='rb')
450
readv = getattr(fp, 'readv', None)
452
return self._sftp_readv(fp, offsets, relpath)
453
if 'sftp' in debug.debug_flags:
454
mutter('seek and read %s offsets', len(offsets))
455
return self._seek_and_read(fp, offsets, relpath)
456
except (IOError, paramiko.SSHException), e:
457
self._translate_io_exception(e, path, ': error retrieving')
459
def recommended_page_size(self):
460
"""See Transport.recommended_page_size().
462
For SFTP we suggest a large page size to reduce the overhead
463
introduced by latency.
467
def _sftp_readv(self, fp, offsets, relpath):
468
"""Use the readv() member of fp to do async readv.
470
Then read them using paramiko.readv(). paramiko.readv()
471
does not support ranges > 64K, so it caps the request size, and
472
just reads until it gets all the stuff it wants.
474
helper = _SFTPReadvHelper(offsets, relpath, self._report_activity)
475
return helper.request_and_yield_offsets(fp)
477
def put_file(self, relpath, f, mode=None):
479
Copy the file-like object into the location.
481
:param relpath: Location to put the contents, relative to base.
482
:param f: File-like object.
483
:param mode: The final mode for the file
485
final_path = self._remote_path(relpath)
486
return self._put(final_path, f, mode=mode)
488
def _put(self, abspath, f, mode=None):
489
"""Helper function so both put() and copy_abspaths can reuse the code"""
490
tmp_abspath = '%s.tmp.%.9f.%d.%d' % (abspath, time.time(),
491
os.getpid(), random.randint(0,0x7FFFFFFF))
492
fout = self._sftp_open_exclusive(tmp_abspath, mode=mode)
496
fout.set_pipelined(True)
497
length = self._pump(f, fout)
498
except (IOError, paramiko.SSHException), e:
499
self._translate_io_exception(e, tmp_abspath)
500
# XXX: This doesn't truly help like we would like it to.
501
# The problem is that openssh strips sticky bits. So while we
502
# can properly set group write permission, we lose the group
503
# sticky bit. So it is probably best to stop chmodding, and
504
# just tell users that they need to set the umask correctly.
505
# The attr.st_mode = mode, in _sftp_open_exclusive
506
# will handle when the user wants the final mode to be more
507
# restrictive. And then we avoid a round trip. Unless
508
# paramiko decides to expose an async chmod()
510
# This is designed to chmod() right before we close.
511
# Because we set_pipelined() earlier, theoretically we might
512
# avoid the round trip for fout.close()
514
self._get_sftp().chmod(tmp_abspath, mode)
517
self._rename_and_overwrite(tmp_abspath, abspath)
520
# If we fail, try to clean up the temporary file
521
# before we throw the exception
522
# but don't let another exception mess things up
523
# Write out the traceback, because otherwise
524
# the catch and throw destroys it
526
mutter(traceback.format_exc())
530
self._get_sftp().remove(tmp_abspath)
532
# raise the saved except
534
# raise the original with its traceback if we can.
537
def _put_non_atomic_helper(self, relpath, writer, mode=None,
538
create_parent_dir=False,
540
abspath = self._remote_path(relpath)
542
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
543
# set the file mode at create time. If it does, use it.
544
# But for now, we just chmod later anyway.
546
def _open_and_write_file():
547
"""Try to open the target file, raise error on failure"""
551
fout = self._get_sftp().file(abspath, mode='wb')
552
fout.set_pipelined(True)
554
except (paramiko.SSHException, IOError), e:
555
self._translate_io_exception(e, abspath,
558
# This is designed to chmod() right before we close.
559
# Because we set_pipelined() earlier, theoretically we might
560
# avoid the round trip for fout.close()
562
self._get_sftp().chmod(abspath, mode)
567
if not create_parent_dir:
568
_open_and_write_file()
571
# Try error handling to create the parent directory if we need to
573
_open_and_write_file()
575
# Try to create the parent directory, and then go back to
577
parent_dir = os.path.dirname(abspath)
578
self._mkdir(parent_dir, dir_mode)
579
_open_and_write_file()
581
def put_file_non_atomic(self, relpath, f, mode=None,
582
create_parent_dir=False,
584
"""Copy the file-like object into the target location.
586
This function is not strictly safe to use. It is only meant to
587
be used when you already know that the target does not exist.
588
It is not safe, because it will open and truncate the remote
589
file. So there may be a time when the file has invalid contents.
591
:param relpath: The remote location to put the contents.
592
:param f: File-like object.
593
:param mode: Possible access permissions for new file.
594
None means do not set remote permissions.
595
:param create_parent_dir: If we cannot create the target file because
596
the parent directory does not exist, go ahead and
597
create it, and then try again.
601
self._put_non_atomic_helper(relpath, writer, mode=mode,
602
create_parent_dir=create_parent_dir,
605
def put_bytes_non_atomic(self, relpath, bytes, mode=None,
606
create_parent_dir=False,
610
self._put_non_atomic_helper(relpath, writer, mode=mode,
611
create_parent_dir=create_parent_dir,
614
def iter_files_recursive(self):
615
"""Walk the relative paths of all files in this transport."""
616
# progress is handled by list_dir
617
queue = list(self.list_dir('.'))
619
relpath = queue.pop(0)
620
st = self.stat(relpath)
621
if stat.S_ISDIR(st.st_mode):
622
for i, basename in enumerate(self.list_dir(relpath)):
623
queue.insert(i, relpath+'/'+basename)
627
def _mkdir(self, abspath, mode=None):
633
self._report_activity(len(abspath), 'write')
634
self._get_sftp().mkdir(abspath, local_mode)
635
self._report_activity(1, 'read')
637
# chmod a dir through sftp will erase any sgid bit set
638
# on the server side. So, if the bit mode are already
639
# set, avoid the chmod. If the mode is not fine but
640
# the sgid bit is set, report a warning to the user
641
# with the umask fix.
642
stat = self._get_sftp().lstat(abspath)
643
mode = mode & 0777 # can't set special bits anyway
644
if mode != stat.st_mode & 0777:
645
if stat.st_mode & 06000:
646
warning('About to chmod %s over sftp, which will result'
647
' in its suid or sgid bits being cleared. If'
648
' you want to preserve those bits, change your '
649
' environment on the server to use umask 0%03o.'
650
% (abspath, 0777 - mode))
651
self._get_sftp().chmod(abspath, mode=mode)
652
except (paramiko.SSHException, IOError), e:
653
self._translate_io_exception(e, abspath, ': unable to mkdir',
654
failure_exc=FileExists)
656
def mkdir(self, relpath, mode=None):
657
"""Create a directory at the given path."""
658
self._mkdir(self._remote_path(relpath), mode=mode)
660
def open_write_stream(self, relpath, mode=None):
661
"""See Transport.open_write_stream."""
662
# initialise the file to zero-length
663
# this is three round trips, but we don't use this
664
# api more than once per write_group at the moment so
665
# it is a tolerable overhead. Better would be to truncate
666
# the file after opening. RBC 20070805
667
self.put_bytes_non_atomic(relpath, "", mode)
668
abspath = self._remote_path(relpath)
669
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
670
# set the file mode at create time. If it does, use it.
671
# But for now, we just chmod later anyway.
674
handle = self._get_sftp().file(abspath, mode='wb')
675
handle.set_pipelined(True)
676
except (paramiko.SSHException, IOError), e:
677
self._translate_io_exception(e, abspath,
679
_file_streams[self.abspath(relpath)] = handle
680
return FileFileStream(self, relpath, handle)
682
def _translate_io_exception(self, e, path, more_info='',
683
failure_exc=PathError):
684
"""Translate a paramiko or IOError into a friendlier exception.
686
:param e: The original exception
687
:param path: The path in question when the error is raised
688
:param more_info: Extra information that can be included,
689
such as what was going on
690
:param failure_exc: Paramiko has the super fun ability to raise completely
691
opaque errors that just set "e.args = ('Failure',)" with
693
If this parameter is set, it defines the exception
694
to raise in these cases.
696
# paramiko seems to generate detailless errors.
697
self._translate_error(e, path, raise_generic=False)
698
if getattr(e, 'args', None) is not None:
699
if (e.args == ('No such file or directory',) or
700
e.args == ('No such file',)):
701
raise NoSuchFile(path, str(e) + more_info)
702
if (e.args == ('mkdir failed',) or
703
e.args[0].startswith('syserr: File exists')):
704
raise FileExists(path, str(e) + more_info)
705
# strange but true, for the paramiko server.
706
if (e.args == ('Failure',)):
707
raise failure_exc(path, str(e) + more_info)
708
# Can be something like args = ('Directory not empty:
709
# '/srv/bazaar.launchpad.net/blah...: '
710
# [Errno 39] Directory not empty',)
711
if (e.args[0].startswith('Directory not empty: ')
712
or getattr(e, 'errno', None) == errno.ENOTEMPTY):
713
raise errors.DirectoryNotEmpty(path, str(e))
714
if e.args == ('Operation unsupported',):
715
raise errors.TransportNotPossible()
716
mutter('Raising exception with args %s', e.args)
717
if getattr(e, 'errno', None) is not None:
718
mutter('Raising exception with errno %s', e.errno)
721
def append_file(self, relpath, f, mode=None):
723
Append the text in the file-like object into the final
727
path = self._remote_path(relpath)
728
fout = self._get_sftp().file(path, 'ab')
730
self._get_sftp().chmod(path, mode)
734
except (IOError, paramiko.SSHException), e:
735
self._translate_io_exception(e, relpath, ': unable to append')
737
def rename(self, rel_from, rel_to):
738
"""Rename without special overwriting"""
740
self._get_sftp().rename(self._remote_path(rel_from),
741
self._remote_path(rel_to))
742
except (IOError, paramiko.SSHException), e:
743
self._translate_io_exception(e, rel_from,
744
': unable to rename to %r' % (rel_to))
746
def _rename_and_overwrite(self, abs_from, abs_to):
747
"""Do a fancy rename on the remote server.
749
Using the implementation provided by osutils.
752
sftp = self._get_sftp()
753
fancy_rename(abs_from, abs_to,
754
rename_func=sftp.rename,
755
unlink_func=sftp.remove)
756
except (IOError, paramiko.SSHException), e:
757
self._translate_io_exception(e, abs_from,
758
': unable to rename to %r' % (abs_to))
760
def move(self, rel_from, rel_to):
761
"""Move the item at rel_from to the location at rel_to"""
762
path_from = self._remote_path(rel_from)
763
path_to = self._remote_path(rel_to)
764
self._rename_and_overwrite(path_from, path_to)
766
def delete(self, relpath):
767
"""Delete the item at relpath"""
768
path = self._remote_path(relpath)
770
self._get_sftp().remove(path)
771
except (IOError, paramiko.SSHException), e:
772
self._translate_io_exception(e, path, ': unable to delete')
774
def external_url(self):
775
"""See bzrlib.transport.Transport.external_url."""
776
# the external path for SFTP is the base
780
"""Return True if this store supports listing."""
783
def list_dir(self, relpath):
785
Return a list of all files at the given location.
787
# does anything actually use this?
789
# This is at least used by copy_tree for remote upgrades.
790
# -- David Allouche 2006-08-11
791
path = self._remote_path(relpath)
793
entries = self._get_sftp().listdir(path)
794
self._report_activity(sum(map(len, entries)), 'read')
795
except (IOError, paramiko.SSHException), e:
796
self._translate_io_exception(e, path, ': failed to list_dir')
797
return [urlutils.escape(entry) for entry in entries]
799
def rmdir(self, relpath):
800
"""See Transport.rmdir."""
801
path = self._remote_path(relpath)
803
return self._get_sftp().rmdir(path)
804
except (IOError, paramiko.SSHException), e:
805
self._translate_io_exception(e, path, ': failed to rmdir')
807
def stat(self, relpath):
808
"""Return the stat information for a file."""
809
path = self._remote_path(relpath)
811
return self._get_sftp().lstat(path)
812
except (IOError, paramiko.SSHException), e:
813
self._translate_io_exception(e, path, ': unable to stat')
815
def readlink(self, relpath):
816
"""See Transport.readlink."""
817
path = self._remote_path(relpath)
819
return self._get_sftp().readlink(path)
820
except (IOError, paramiko.SSHException), e:
821
self._translate_io_exception(e, path, ': unable to readlink')
823
def symlink(self, source, link_name):
824
"""See Transport.symlink."""
826
conn = self._get_sftp()
827
sftp_retval = conn.symlink(source, link_name)
828
if SFTP_OK != sftp_retval:
829
raise TransportError(
830
'%r: unable to create symlink to %r' % (link_name, source),
833
except (IOError, paramiko.SSHException), e:
834
self._translate_io_exception(e, link_name,
835
': unable to create symlink to %r' % (source))
837
def lock_read(self, relpath):
839
Lock the given file for shared (read) access.
840
:return: A lock object, which has an unlock() member function
842
# FIXME: there should be something clever i can do here...
843
class BogusLock(object):
844
def __init__(self, path):
848
return BogusLock(relpath)
850
def lock_write(self, relpath):
852
Lock the given file for exclusive (write) access.
853
WARNING: many transports do not support this, so trying avoid using it
855
:return: A lock object, which has an unlock() member function
857
# This is a little bit bogus, but basically, we create a file
858
# which should not already exist, and if it does, we assume
859
# that there is a lock, and if it doesn't, the we assume
860
# that we have taken the lock.
861
return SFTPLock(relpath, self)
863
def _sftp_open_exclusive(self, abspath, mode=None):
864
"""Open a remote path exclusively.
866
SFTP supports O_EXCL (SFTP_FLAG_EXCL), which fails if
867
the file already exists. However it does not expose this
868
at the higher level of SFTPClient.open(), so we have to
871
WARNING: This breaks the SFTPClient abstraction, so it
872
could easily break against an updated version of paramiko.
874
:param abspath: The remote absolute path where the file should be opened
875
:param mode: The mode permissions bits for the new file
877
# TODO: jam 20060816 Paramiko >= 1.6.2 (probably earlier) supports
878
# using the 'x' flag to indicate SFTP_FLAG_EXCL.
879
# However, there is no way to set the permission mode at open
880
# time using the sftp_client.file() functionality.
881
path = self._get_sftp()._adjust_cwd(abspath)
882
# mutter('sftp abspath %s => %s', abspath, path)
883
attr = SFTPAttributes()
886
omode = (SFTP_FLAG_WRITE | SFTP_FLAG_CREATE
887
| SFTP_FLAG_TRUNC | SFTP_FLAG_EXCL)
889
t, msg = self._get_sftp()._request(CMD_OPEN, path, omode, attr)
891
raise TransportError('Expected an SFTP handle')
892
handle = msg.get_string()
893
return SFTPFile(self._get_sftp(), handle, 'wb', -1)
894
except (paramiko.SSHException, IOError), e:
895
self._translate_io_exception(e, abspath, ': unable to open',
896
failure_exc=FileExists)
898
def _can_roundtrip_unix_modebits(self):
899
if sys.platform == 'win32':
906
def get_test_permutations():
907
"""Return the permutations to be used in testing."""
908
from bzrlib.tests import stub_sftp
909
return [(SFTPTransport, stub_sftp.SFTPAbsoluteServer),
910
(SFTPTransport, stub_sftp.SFTPHomeDirServer),
911
(SFTPTransport, stub_sftp.SFTPSiblingAbsoluteServer),